Skip to content

Commit 41fd11a

Browse files
committed
Reset release notes and migration script
Signed-off-by: Leandro Lucarella <luca-frequenz@llucax.com>
1 parent 6e45a2f commit 41fd11a

2 files changed

Lines changed: 21 additions & 315 deletions

File tree

RELEASE_NOTES.md

Lines changed: 19 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,23 +2,36 @@
22

33
## Summary
44

5-
This is a maintenance, template-only, bugfix release.
5+
<!-- Here goes a general summary of what this release is about -->
66

77
## Upgrading
88

9+
<!-- Here goes notes on how to upgrade from previous versions, including deprecations and what they should be replaced with -->
10+
911
### Cookiecutter template
1012

1113
All upgrading should be done via the migration script or regenerating the templates.
1214

1315
```bash
14-
curl -sSL https://raw.githubusercontent.com/frequenz-floss/frequenz-repo-config-python/v0.16.0/cookiecutter/migrate.py | python3
16+
curl -sSL https://raw.githubusercontent.com/frequenz-floss/frequenz-repo-config-python/<tag>/cookiecutter/migrate.py | python3
1517
```
1618

19+
But you might still need to adapt your code:
20+
21+
<!-- Here upgrade steps for cookiecutter specifically -->
22+
23+
## New Features
24+
25+
<!-- Here goes the main new features and examples or instructions on how to use them -->
26+
27+
### Cookiecutter template
28+
29+
<!-- Here new features for cookiecutter specifically -->
30+
1731
## Bug Fixes
1832

33+
<!-- Here goes notable bug fixes that are worth a special mention or explanation -->
34+
1935
### Cookiecutter template
2036

21-
- Added a migration step for api repositories to fix `mkdocs.yml` when the previous `mkdocstrings-python` v2 migration moved only `paths: ["src"]` under `handlers.python.options` but not `paths: ["py"]`.
22-
- Fixed runners for jobs that require Docker and where wrongly converted to `ubuntu-slim` in v0.15.0, changing them back to `ubuntu-24.04` to avoid Docker-related failures. The template and the migration script were both updated to reflect this change.
23-
- Updated the repo-config migration workflow template and migration script so existing repositories also add the `merge_group` trigger and skip the job unless the event is `pull_request_target`, allowing the workflow to be used as a required merge-queue check.
24-
- Added a migration step to remove the copilot review request from the Protect version branch protection rules. This was also done by v0.15.0 in theory, but the migration step was wrong and didn't update it properly.
37+
<!-- Here bug fixes for cookiecutter specifically -->

cookiecutter/migrate.py

Lines changed: 2 additions & 309 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,8 @@
2020
And remember to follow any manual instructions for each run.
2121
""" # noqa: E501
2222

23-
# pylint: disable=too-many-lines, too-many-locals, too-many-branches
23+
# R0801 is similarity detection, as the template is always similar to the current script
24+
# pylint: disable=too-many-lines, too-many-locals, too-many-branches, R0801
2425

2526
import hashlib
2627
import json
@@ -38,18 +39,6 @@ def main() -> None:
3839
"""Run the migration steps."""
3940
# Add a separation line like this one after each migration step.
4041
print("=" * 72)
41-
print("Fixing repo-config migration merge queue trigger...")
42-
migrate_repo_config_migration_merge_group_trigger()
43-
print("=" * 72)
44-
print("Fixing mkdocstrings-python v2 paths for api repos...")
45-
migrate_api_mkdocs_mkdocstrings_paths()
46-
print("=" * 72)
47-
print("Migrating protolint and publish-to-pypi runners to ubuntu-24.04...")
48-
migrate_docker_based_runners()
49-
print("=" * 72)
50-
print("Updating 'Protect version branches' GitHub ruleset...")
51-
migrate_protect_version_branches_ruleset()
52-
print("=" * 72)
5342
print()
5443

5544
if _manual_steps:
@@ -72,302 +61,6 @@ def main() -> None:
7261
print()
7362

7463

75-
def migrate_api_mkdocs_mkdocstrings_paths() -> None:
76-
"""Fix the mkdocstrings paths migration for api repositories."""
77-
project_type = read_cookiecutter_str_var("type")
78-
if project_type is None:
79-
manual_step(
80-
"Unable to detect the cookiecutter project type from "
81-
".cookiecutter-replay.json; if this is an api project and "
82-
'`mkdocs.yml` still has `paths: ["py"]` nested under '
83-
"`handlers.python.options`, move it out of `options`."
84-
)
85-
return
86-
87-
if project_type != "api":
88-
print(" Skipping mkdocs.yml (not an api project)")
89-
return
90-
91-
filepath = Path("mkdocs.yml")
92-
if not filepath.exists():
93-
manual_step(
94-
"Unable to find mkdocs.yml; if this project uses mkdocs, "
95-
'make sure the `paths: ["py"]` config is under '
96-
"`handlers.python`, not `handlers.python.options`."
97-
)
98-
return
99-
100-
old = ' options:\n paths: ["py"]'
101-
new = ' paths: ["py"]\n options:'
102-
current_template = (
103-
' handlers:\n paths: ["py"]\n python:\n options:'
104-
)
105-
content = filepath.read_text(encoding="utf-8")
106-
107-
if old in content:
108-
replace_file_contents_atomically(filepath, old, new, count=1)
109-
print(f" Updated {filepath}: moved mkdocstrings api paths out of options")
110-
return
111-
112-
if new in content or current_template in content:
113-
print(f" Skipped {filepath}: mkdocstrings api paths already updated")
114-
return
115-
116-
manual_step(
117-
f"Could not find the api mkdocstrings path pattern in {filepath}. "
118-
'If `paths: ["py"]` is still nested under `handlers.python.options`, '
119-
"move it out of `options` according to the latest template."
120-
)
121-
122-
123-
def migrate_docker_based_runners() -> None:
124-
"""Migrate Docker-based jobs to use ubuntu-24.04 runners.
125-
126-
The ``protolint`` and ``publish-to-pypi`` jobs need Docker, which is not
127-
available on ``ubuntu-slim``. They should therefore run on
128-
``ubuntu-24.04`` instead.
129-
"""
130-
workflows_dir = Path(".github") / "workflows"
131-
protolint_new = (
132-
" protolint:\n"
133-
" name: Check proto files with protolint\n"
134-
" runs-on: ubuntu-24.04"
135-
)
136-
publish_to_pypi_new = (
137-
' needs: ["create-github-release"]\n runs-on: ubuntu-24.04'
138-
)
139-
migrations: dict[str, list[dict[str, Any]]] = {}
140-
141-
protolint_rule = {
142-
"job": "protolint",
143-
"required_for": "api repos",
144-
"job_marker": " protolint:\n",
145-
"old": [
146-
(
147-
" protolint:\n"
148-
" name: Check proto files with protolint\n"
149-
" runs-on: ubuntu-slim"
150-
),
151-
(
152-
" protolint:\n"
153-
" name: Check proto files with protolint\n"
154-
" runs-on: ubuntu-latest"
155-
),
156-
],
157-
"new": protolint_new,
158-
}
159-
project_type = read_cookiecutter_str_var("type")
160-
if project_type is None:
161-
manual_step(
162-
"Unable to detect the cookiecutter project type from "
163-
".cookiecutter-replay.json; cannot determine whether the protolint "
164-
"runner migration applies."
165-
)
166-
elif project_type == "api":
167-
migrations.setdefault("ci-pr.yaml", []).append(protolint_rule)
168-
migrations.setdefault("ci.yaml", []).append(protolint_rule)
169-
else:
170-
print(" Skipping protolint runner migration (not an api project)")
171-
172-
github_org = read_cookiecutter_str_var("github_org")
173-
if github_org is None:
174-
manual_step(
175-
"Unable to detect the cookiecutter GitHub organization from "
176-
".cookiecutter-replay.json; cannot determine whether the "
177-
"publish-to-pypi runner migration applies."
178-
)
179-
elif github_org == "frequenz-floss":
180-
migrations.setdefault("ci.yaml", []).append(
181-
{
182-
"job": "publish-to-pypi",
183-
"required_for": "frequenz-floss repos",
184-
"job_marker": " publish-to-pypi:\n",
185-
"old": [
186-
(' needs: ["create-github-release"]\n runs-on: ubuntu-slim'),
187-
(
188-
' needs: ["create-github-release"]\n'
189-
" runs-on: ubuntu-latest"
190-
),
191-
],
192-
"new": publish_to_pypi_new,
193-
}
194-
)
195-
else:
196-
print(" Skipping publish-to-pypi runner migration (not a frequenz-floss repo)")
197-
198-
for filename, rules in migrations.items():
199-
filepath = workflows_dir / filename
200-
if not filepath.exists():
201-
for rule in rules:
202-
manual_step(
203-
f" Expected to find {filepath} for job {rule['job']} in "
204-
f"{rule['required_for']}. Please add or update that job to use "
205-
"`runs-on: ubuntu-24.04`."
206-
)
207-
continue
208-
209-
for rule in rules:
210-
job = rule["job"]
211-
required_for = rule["required_for"]
212-
job_marker = rule["job_marker"]
213-
new = rule["new"]
214-
content = filepath.read_text(encoding="utf-8")
215-
216-
if job_marker not in content:
217-
manual_step(
218-
f" Expected to find job {job} in {filepath} for "
219-
f"{required_for}. Please update it to use "
220-
"`runs-on: ubuntu-24.04`."
221-
)
222-
continue
223-
224-
if new in content:
225-
print(f" Skipped {filepath}: runner already up to date for job {job}")
226-
continue
227-
228-
for old in rule["old"]:
229-
if old in content:
230-
replace_file_contents_atomically(
231-
filepath, old, new, content=content
232-
)
233-
print(f" Updated {filepath}: migrated runner for job {job}")
234-
break
235-
else:
236-
manual_step(
237-
f" Pattern not found in {filepath}: please switch the runner "
238-
f"for job {job} to `runs-on: ubuntu-24.04`."
239-
)
240-
241-
242-
def migrate_repo_config_migration_merge_group_trigger() -> None:
243-
"""Trigger repo-config migration in the merge queue."""
244-
filepath = Path(".github/workflows/repo-config-migration.yaml")
245-
if not filepath.exists():
246-
manual_step(
247-
"Unable to find .github/workflows/repo-config-migration.yaml; if this "
248-
"project uses the repo-config migration workflow, update it to trigger "
249-
"on `merge_group` and skip the job unless the event is "
250-
"`pull_request_target`."
251-
)
252-
return
253-
254-
content = filepath.read_text(encoding="utf-8")
255-
old_on = (
256-
"on:\n"
257-
" pull_request_target:\n"
258-
" types: [opened, synchronize, reopened, labeled, unlabeled]\n"
259-
)
260-
new_on = (
261-
"on:\n"
262-
" merge_group: # To allow using this as a required check for merging\n"
263-
" pull_request_target:\n"
264-
" types: [opened, synchronize, reopened, labeled, unlabeled]\n"
265-
)
266-
old_if = (
267-
" if: contains(github.event.pull_request.title, 'the repo-config group')"
268-
)
269-
new_if = (
270-
" # Skip if it was triggered by the merge queue. We only need the workflow to\n"
271-
' # be executed to meet the "Required check" condition for merging, but we\n'
272-
" # don't need to actually run the job, having the job present as Skipped is\n"
273-
" # enough.\n"
274-
" if: |\n"
275-
" github.event_name == 'pull_request_target' &&\n"
276-
" contains(github.event.pull_request.title, 'the repo-config group')"
277-
)
278-
279-
updated = content
280-
if old_on in updated:
281-
updated = updated.replace(old_on, new_on, 1)
282-
283-
if old_if in updated:
284-
updated = updated.replace(old_if, new_if, 1)
285-
286-
if updated != content:
287-
replace_file_atomically(filepath, updated)
288-
print(
289-
" Updated .github/workflows/repo-config-migration.yaml: added "
290-
"merge_group trigger"
291-
)
292-
return
293-
294-
if new_on in content and new_if in content:
295-
print(
296-
" Skipped .github/workflows/repo-config-migration.yaml: merge queue "
297-
"trigger already configured"
298-
)
299-
return
300-
301-
manual_step(
302-
"Could not find the expected repo-config migration workflow pattern in "
303-
".github/workflows/repo-config-migration.yaml. If this repository uses "
304-
"that workflow, add the `merge_group` trigger and make the job run only "
305-
"for `pull_request_target` events according to the latest template."
306-
)
307-
308-
309-
def migrate_protect_version_branches_ruleset() -> None:
310-
"""Update the 'Protect version branches' GitHub ruleset.
311-
312-
Uses the GitHub API (via ``gh`` CLI) to check whether the
313-
'Protect version branches' ruleset on the current repository is aligned
314-
with the current template. Recent template changes include:
315-
316-
* Removing the ``copilot_code_review`` rule.
317-
318-
If the ruleset is already aligned, prints an informational message.
319-
If it needs updating, applies the changes via the API without removing
320-
any existing required status checks.
321-
If the ruleset is not found at all, issues a manual-step message that
322-
points the user to the docs.
323-
"""
324-
rule_name = "Protect version branches"
325-
docs_url = (
326-
"https://frequenz-floss.github.io/frequenz-repo-config-python/"
327-
"user-guide/start-a-new-project/configure-github/#rulesets"
328-
)
329-
330-
# Build a link to the repo's ruleset settings for manual-step messages.
331-
ruleset_url = get_ruleset_settings_url() or docs_url
332-
333-
# ── Fetch ruleset details ────────────────────────────────────────
334-
ruleset = get_ruleset(rule_name)
335-
if ruleset is None:
336-
manual_step(
337-
f"The '{rule_name}' GitHub ruleset was not found (or the gh CLI "
338-
"is not available / the API call failed). "
339-
"Please check whether it should exist for this repository. "
340-
f"If it should, import it following the instructions at: {docs_url}"
341-
)
342-
return
343-
344-
# ── Detect and apply changes in-memory ───────────────────────────────
345-
changes: list[str] = []
346-
updated_rules = []
347-
348-
for rule in ruleset.get("rules", []):
349-
if rule.get("type") == "copilot_code_review":
350-
changes.append("remove copilot_code_review")
351-
continue
352-
updated_rules.append(rule)
353-
354-
if not changes:
355-
print(f" Ruleset '{rule_name}' is already up to date")
356-
return
357-
358-
# ── Push the update ───────────────────────────────────────────────────
359-
ruleset["rules"] = updated_rules
360-
if not update_ruleset(ruleset["id"], ruleset):
361-
manual_step(
362-
f"Failed to update the '{rule_name}' ruleset via the GitHub API. "
363-
f"Please apply the following changes manually at {ruleset_url}: "
364-
+ "; ".join(changes)
365-
)
366-
return
367-
368-
print(f" Updated ruleset '{rule_name}': " + ", ".join(changes))
369-
370-
37164
def apply_patch(patch_content: str) -> None:
37265
"""Apply a patch using the patch utility."""
37366
subprocess.run(["patch", "-p1"], input=patch_content.encode(), check=True)

0 commit comments

Comments
 (0)