Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 12 additions & 9 deletions RELEASE_NOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,33 +2,36 @@

## Summary

This release adds a new workflow for Dependabot auto-merge and updates mkdocstrings to v2.
<!-- Here goes a general summary of what this release is about -->

## Upgrading

<!-- Here goes notes on how to upgrade from previous versions, including deprecations and what they should be replaced with -->

### Cookiecutter template

All upgrading should be done via the migration script or regenerating the templates.

```bash
curl -sSL https://raw.githubusercontent.com/frequenz-floss/frequenz-repo-config-python/v0.14/cookiecutter/migrate.py | python3
curl -sSL https://raw.githubusercontent.com/frequenz-floss/frequenz-repo-config-python/v0.12/cookiecutter/migrate.py | python3
```

But you might still need to adapt your code, just have a look at the script output for further instructions.
But you might still need to adapt your code:

<!-- Here upgrade steps for cookiecutter specifically -->

## New Features

* `mkdocsstrings-python` v2 is now supported.
<!-- Here goes the main new features and examples or instructions on how to use them -->

### Cookiecutter template

- Dependencies have been updated.
- New warning ignores for protobuf gencode versions in pytest.
- Added Dependabot auto-merge workflow using `frequenz-floss/dependabot-auto-approve` action.
<!-- Here new features for cookiecutter specifically -->

## Bug Fixes

<!-- Here goes notable bug fixes that are worth a special mention or explanation -->

### Cookiecutter template

- mkdocstrings: Move `paths` key to the right section in `mkdocs.yml`.
- Fix invalid YAML syntax in Dependabot workflow template.
<!-- Here bug fixes for cookiecutter specifically -->
232 changes: 1 addition & 231 deletions cookiecutter/migrate.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,251 +21,21 @@
""" # noqa: E501

import hashlib
import json
import os
import subprocess
import tempfile
from pathlib import Path
from typing import Any, SupportsIndex
from typing import SupportsIndex


def main() -> None:
"""Run the migration steps."""
# Add a separation line like this one after each migration step.
print("=" * 72)
print("Creating Dependabot auto-merge workflow...")
create_dependabot_auto_merge_workflow()
print("=" * 72)
print("Disabling CODEOWNERS review requirement in GitHub ruleset...")
disable_codeowners_review_requirement()
print("=" * 72)
print("Updating the mkdocs.yml for mkdocstrings-python v2 compatibility...")
update_mkdocs_yml_mkdocstrings_python_v2()
print("=" * 72)
print("Migration script finished. Remember to follow any manual instructions.")
print("=" * 72)


def update_mkdocs_yml_mkdocstrings_python_v2() -> None:
"""Rename 'inventories' imports to 'inventory'."""
replace_file_contents_atomically(
filepath=Path("mkdocs.yml"),
old=" import:",
new=" inventories:",
)
replace_file_contents_atomically(
filepath=Path("mkdocs.yml"),
old="""\
options:
paths: ["src"]""",
new="""\
paths: ["src"]
options:""",
)


def create_dependabot_auto_merge_workflow() -> None:
"""Create the Dependabot auto-merge workflow file."""
workflow_dir = Path(".github") / "workflows"
workflow_dir.mkdir(parents=True, exist_ok=True)

workflow_content = """\
name: Auto-merge Dependabot PR

on:
pull_request:

permissions:
contents: write
pull-requests: write

jobs:
auto-merge:
if: github.actor == 'dependabot[bot]'
runs-on: ubuntu-latest
steps:
- name: Auto-merge Dependabot PR
uses: frequenz-floss/dependabot-auto-approve@3cad5f42e79296505473325ac6636be897c8b8a1 # v1.3.2
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
dependency-type: 'all'
auto-merge: 'true'
merge-method: 'merge'
add-label: 'tool:auto-merged'
""" # noqa: E501

workflow_file = workflow_dir / "auto-dependabot.yaml"
workflow_file.write_text(workflow_content, encoding="utf-8")
print(f"Created/Updated Dependabot auto-merge workflow at {workflow_file}")


def get_default_branch() -> str | None:
"""Get the default branch name from GitHub.

Returns:
The default branch name, or None if it cannot be determined.
"""
try:
result = subprocess.run(
["gh", "api", "repos/:owner/:repo", "--jq", ".default_branch"],
capture_output=True,
text=True,
check=True,
)
default_branch = result.stdout.strip()
print(f"Default branch: {default_branch}")
return default_branch
except subprocess.CalledProcessError as e:
print(f"Failed to get default branch: {e}")
return None


def find_version_branch_ruleset() -> dict[str, Any] | None:
"""Find the 'Protect version branches' ruleset.

Returns:
The ruleset configuration, or None if not found.
"""
try:
result = subprocess.run(
["gh", "api", "repos/:owner/:repo/rulesets"],
capture_output=True,
text=True,
check=True,
)
rulesets = json.loads(result.stdout)

for ruleset in rulesets:
if ruleset.get("name") == "Protect version branches":
return ruleset # type: ignore[no-any-return]
return None
except subprocess.CalledProcessError as e:
print(f"Failed to fetch rulesets: {e}")
return None


def update_ruleset(ruleset_id: int, ruleset_config: dict[str, Any]) -> bool:
"""Update a GitHub ruleset configuration.

Args:
ruleset_id: The ID of the ruleset to update.
ruleset_config: The updated ruleset configuration.

Returns:
True if the update was successful, False otherwise.
"""
update_payload = {
"name": ruleset_config["name"],
"target": ruleset_config["target"],
"enforcement": ruleset_config["enforcement"],
"conditions": ruleset_config["conditions"],
"rules": ruleset_config["rules"],
}

if "bypass_actors" in ruleset_config:
update_payload["bypass_actors"] = ruleset_config["bypass_actors"]

with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
json.dump(update_payload, f, indent=2)
temp_file = f.name

try:
subprocess.run(
[
"gh",
"api",
"-X",
"PUT",
f"repos/:owner/:repo/rulesets/{ruleset_id}",
"--input",
temp_file,
],
capture_output=True,
check=True,
)
return True
except subprocess.CalledProcessError as e:
print(f"Error updating ruleset: {e}")
return False
finally:
os.unlink(temp_file)


def disable_codeowners_review_requirement() -> None:
"""Disable CODEOWNERS review requirement in GitHub repository ruleset."""
# Get repository info
try:
result = subprocess.run(
["gh", "repo", "view", "--json", "owner,name"],
capture_output=True,
text=True,
check=True,
)
repo_info = json.loads(result.stdout)
org = repo_info["owner"]["login"]
repo = repo_info["name"]
ruleset_url = f"https://github.com/{org}/{repo}/settings/rules"
except subprocess.CalledProcessError:
ruleset_url = "GitHub repository settings > Rules"

if get_default_branch() is None:
manual_step(
"Failed to get default branch. "
"Please manually disable the CODEOWNERS review requirement in the "
f"'Protect version branches' ruleset at: {ruleset_url}"
)
return

version_branch_ruleset = find_version_branch_ruleset()
if not version_branch_ruleset:
manual_step(
"'Protect version branches' ruleset not found. "
"Please manually disable the CODEOWNERS review requirement at: "
f"{ruleset_url}"
)
return

ruleset_id = version_branch_ruleset["id"]
print(f"Found ruleset ID: {ruleset_id}")

try:
result = subprocess.run(
["gh", "api", f"repos/:owner/:repo/rulesets/{ruleset_id}"],
capture_output=True,
text=True,
check=True,
)
ruleset_config = json.loads(result.stdout)
except subprocess.CalledProcessError as e:
manual_step(
f"Failed to fetch ruleset configuration: {e}. "
"This action requires admin permissions. "
f"Please manually disable the CODEOWNERS review requirement at: {ruleset_url}"
)
return

updated = False
for rule in ruleset_config.get("rules", []):
if rule.get("type") == "pull_request":
if rule.get("parameters", {}).get("require_code_owner_review"):
rule["parameters"]["require_code_owner_review"] = False
updated = True
break

if not updated:
print("CODEOWNERS review requirement already disabled.")
return

if update_ruleset(ruleset_id, ruleset_config):
print("Successfully disabled CODEOWNERS review requirement in GitHub ruleset.")
else:
manual_step(
"Failed to update GitHub ruleset. This action requires admin permissions. "
"Please manually disable the CODEOWNERS review requirement in the "
f"'Protect version branches' ruleset at: {ruleset_url}"
)


def apply_patch(patch_content: str) -> None:
"""Apply a patch using the patch utility."""
subprocess.run(["patch", "-p1"], input=patch_content.encode(), check=True)
Expand Down