diff --git a/.github/workflows/agentic_commands.yml b/.github/workflows/agentic_commands.yml
new file mode 100644
index 0000000000..c2183bdcf6
--- /dev/null
+++ b/.github/workflows/agentic_commands.yml
@@ -0,0 +1,107 @@
+# ___ _ _
+# / _ \ | | (_)
+# | |_| | __ _ ___ _ __ | |_ _ ___
+# | _ |/ _` |/ _ \ '_ \| __| |/ __|
+# | | | | (_| | __/ | | | |_| | (__
+# \_| |_/\__, |\___|_| |_|\__|_|\___|
+# __/ |
+# _ _ |___/
+# | | | | / _| |
+# | | | | ___ _ __ _ __| |_| | _____ ____
+# | |/\| |/ _ \ '__| |/ /| _| |/ _ \ \ /\ / / ___|
+# \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \
+# \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/
+#
+# This file was automatically generated by pkg/workflow/central_slash_command_workflow.go. DO NOT EDIT.
+#
+# To regenerate this workflow, run:
+# gh aw compile
+# Not all edits will cause changes to this file.
+#
+# For more information: https://github.github.com/gh-aw/introduction/overview/
+#
+name: "Agentic Slash Command Trigger"
+
+on:
+ issues:
+ types: [edited, opened, reopened]
+ issue_comment:
+ types: [created, edited]
+ pull_request:
+ types: [edited, opened, reopened]
+ pull_request_review_comment:
+ types: [created, edited]
+ discussion:
+ types: [created, edited]
+ discussion_comment:
+ types: [created, edited]
+
+permissions: {}
+
+jobs:
+ route:
+ runs-on: ubuntu-slim
+ permissions:
+ actions: write
+ contents: read
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+
+ - name: Route slash command
+ uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
+ env:
+ GH_AW_SLASH_ROUTING: '{"archie":[{"workflow":"archie","events":["issue_comment","issues","pull_request","pull_request_comment"]}],"cloclo":[{"workflow":"cloclo","events":["discussion","discussion_comment","issue_comment","issues","pull_request","pull_request_comment","pull_request_review_comment"]}]}'
+ with:
+ script: |
+ const routeMap = JSON.parse(process.env.GH_AW_SLASH_ROUTING || "{}");
+ const bodyByEvent = {
+ issues: context.payload?.issue?.body ?? "",
+ pull_request: context.payload?.pull_request?.body ?? "",
+ issue_comment: context.payload?.comment?.body ?? "",
+ pull_request_review_comment: context.payload?.comment?.body ?? "",
+ discussion: context.payload?.discussion?.body ?? "",
+ discussion_comment: context.payload?.comment?.body ?? "",
+ };
+
+ function eventIdentifier() {
+ if (context.eventName !== "issue_comment") {
+ return context.eventName;
+ }
+ return context.payload?.issue?.pull_request ? "pull_request_comment" : "issue_comment";
+ }
+
+ const text = bodyByEvent[context.eventName] ?? "";
+ const firstWord = String(text).trim().split(/\s+/)[0] ?? "";
+ if (!firstWord.startsWith("/")) {
+ core.info("No slash command found at start of payload text; skipping dispatch.");
+ return;
+ }
+
+ const commandName = firstWord.slice(1);
+ const identifier = eventIdentifier();
+ const routes = (routeMap[commandName] ?? []).filter(route => Array.isArray(route.events) && route.events.includes(identifier));
+ if (routes.length === 0) {
+ core.info("No centralized routes matched command '/" + commandName + "' for event '" + identifier + "'.");
+ return;
+ }
+
+ const { setupGlobals } = require(process.env.GITHUB_WORKSPACE + "/actions/setup/js/setup_globals.cjs");
+ setupGlobals(core, github, context, exec, io, getOctokit);
+ const { buildAwContext } = require(process.env.GITHUB_WORKSPACE + "/actions/setup/js/aw_context.cjs");
+
+ const ref = process.env.GITHUB_HEAD_REF ? "refs/heads/" + process.env.GITHUB_HEAD_REF : (process.env.GITHUB_REF || context.ref || "refs/heads/" + (context.payload?.repository?.default_branch || "main"));
+ for (const route of routes) {
+ const awContext = buildAwContext();
+ awContext.command_name = commandName;
+ await github.rest.actions.createWorkflowDispatch({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ workflow_id: route.workflow + ".lock.yml",
+ ref,
+ inputs: {
+ aw_context: JSON.stringify(awContext),
+ },
+ });
+ core.info("Dispatched '" + route.workflow + "' for '/" + commandName + "'");
+ }
diff --git a/.github/workflows/archie.lock.yml b/.github/workflows/archie.lock.yml
index 99837d07ee..f5e440d1d0 100644
--- a/.github/workflows/archie.lock.yml
+++ b/.github/workflows/archie.lock.yml
@@ -1,4 +1,4 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"25e043a10b12ae51af58407610eb74c0d4d1505510e0ae0ee2a45da4550964b7","strict":true,"agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"e427885be33ad6455b1ebed0e9281d293ee3a8f441a448a2d6c355823d8c6ec8","strict":true,"agent_id":"copilot"}
# gh-aw-manifest: {"version":1,"secrets":["GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GH_AW_OTEL_ENDPOINT","GH_AW_OTEL_HEADERS","GITHUB_TOKEN"],"actions":[{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"3a2844b7e9c422d3c10d287c895573f7108da1b3","version":"v9.0.0"},{"repo":"actions/setup-node","sha":"48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e","version":"v6.4.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.43"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.43"},{"image":"ghcr.io/github/gh-aw-firewall/cli-proxy:0.25.43"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.43"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.3.6","digest":"sha256:2bb8eef86006a4c5963c55616a9c51c32f27bfdecb023b8aa6f91f6718d9171c","pinned_image":"ghcr.io/github/gh-aw-mcpg:v0.3.6@sha256:2bb8eef86006a4c5963c55616a9c51c32f27bfdecb023b8aa6f91f6718d9171c"},{"image":"ghcr.io/github/github-mcp-server:v1.0.3","digest":"sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959","pinned_image":"ghcr.io/github/github-mcp-server:v1.0.3@sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959"},{"image":"ghcr.io/github/serena-mcp-server:latest","digest":"sha256:bf343399e3725c45528f531a230f3a04521d4cdef29f9a5af6282ff0d3c393c5","pinned_image":"ghcr.io/github/serena-mcp-server:latest@sha256:bf343399e3725c45528f531a230f3a04521d4cdef29f9a5af6282ff0d3c393c5"},{"image":"node:lts-alpine","digest":"sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f","pinned_image":"node:lts-alpine@sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f"}]}
# ___ _ _
# / _ \ | | (_)
@@ -57,20 +57,13 @@
name: "Archie"
"on":
- issue_comment:
- types:
- - created
- - edited
- issues:
- types:
- - opened
- - edited
- - reopened
- pull_request:
- types:
- - opened
- - edited
- - reopened
+ workflow_dispatch:
+ inputs:
+ aw_context:
+ default: ""
+ description: Agent caller context (used internally by Agentic Workflows).
+ required: false
+ type: string
permissions: {}
@@ -89,13 +82,11 @@ env:
jobs:
activation:
needs: pre_activation
- if: "needs.pre_activation.outputs.activated == 'true' && (github.event_name == 'issues' && (startsWith(github.event.issue.body, '/archie ') || startsWith(github.event.issue.body, '/archie\n') || github.event.issue.body == '/archie') || github.event_name == 'issue_comment' && (startsWith(github.event.comment.body, '/archie ') || startsWith(github.event.comment.body, '/archie\n') || github.event.comment.body == '/archie') && github.event.issue.pull_request == null || github.event_name == 'issue_comment' && (startsWith(github.event.comment.body, '/archie ') || startsWith(github.event.comment.body, '/archie\n') || github.event.comment.body == '/archie') && github.event.issue.pull_request != null || github.event_name == 'pull_request' && (startsWith(github.event.pull_request.body, '/archie ') || startsWith(github.event.pull_request.body, '/archie\n') || github.event.pull_request.body == '/archie'))"
+ if: needs.pre_activation.outputs.activated == 'true'
runs-on: ubuntu-slim
permissions:
actions: read
contents: read
- issues: write
- pull-requests: write
outputs:
body: ${{ steps.sanitized.outputs.body }}
comment_id: ${{ steps.add-comment.outputs.comment-id }}
@@ -248,20 +239,20 @@ jobs:
run: |
bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh"
{
- cat << 'GH_AW_PROMPT_2e7019d9465d12fb_EOF'
+ cat << 'GH_AW_PROMPT_dcd5b34c2b20e0e6_EOF'
- GH_AW_PROMPT_2e7019d9465d12fb_EOF
+ GH_AW_PROMPT_dcd5b34c2b20e0e6_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_2e7019d9465d12fb_EOF'
+ cat << 'GH_AW_PROMPT_dcd5b34c2b20e0e6_EOF'
Tools: add_comment, missing_tool, missing_data, noop
- GH_AW_PROMPT_2e7019d9465d12fb_EOF
+ GH_AW_PROMPT_dcd5b34c2b20e0e6_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/mcp_cli_tools_prompt.md"
- cat << 'GH_AW_PROMPT_2e7019d9465d12fb_EOF'
+ cat << 'GH_AW_PROMPT_dcd5b34c2b20e0e6_EOF'
The following GitHub context information is available for this workflow:
{{#if __GH_AW_GITHUB_ACTOR__ }}
@@ -290,12 +281,12 @@ jobs:
{{/if}}
- GH_AW_PROMPT_2e7019d9465d12fb_EOF
+ GH_AW_PROMPT_dcd5b34c2b20e0e6_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/cli_proxy_with_safeoutputs_prompt.md"
if [ "$GITHUB_EVENT_NAME" = "issue_comment" ] && [ -n "$GH_AW_IS_PR_COMMENT" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review_comment" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review" ]; then
cat "${RUNNER_TEMP}/gh-aw/prompts/pr_context_prompt.md"
fi
- cat << 'GH_AW_PROMPT_2e7019d9465d12fb_EOF'
+ cat << 'GH_AW_PROMPT_dcd5b34c2b20e0e6_EOF'
## Serena Code Analysis
@@ -332,7 +323,7 @@ jobs:
{{#runtime-import .github/workflows/shared/observability-otlp.md}}
{{#runtime-import .github/workflows/shared/noop-reminder.md}}
{{#runtime-import .github/workflows/archie.md}}
- GH_AW_PROMPT_2e7019d9465d12fb_EOF
+ GH_AW_PROMPT_dcd5b34c2b20e0e6_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
@@ -559,9 +550,9 @@ jobs:
mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs"
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_756f1bb9f50d6955_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_ca474fdaea1f47af_EOF'
{"add_comment":{"max":1},"create_report_incomplete_issue":{},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"report_incomplete":{}}
- GH_AW_SAFE_OUTPUTS_CONFIG_756f1bb9f50d6955_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_ca474fdaea1f47af_EOF
- name: Generate Safe Outputs Tools
env:
GH_AW_TOOLS_META_JSON: |
@@ -749,7 +740,7 @@ jobs:
mkdir -p /home/runner/.copilot
GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node)
- cat << GH_AW_MCP_CONFIG_0dc2d1bd00cd3635_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
+ cat << GH_AW_MCP_CONFIG_d281e39c2bef3170_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
{
"mcpServers": {
"safeoutputs": {
@@ -809,7 +800,7 @@ jobs:
}
}
}
- GH_AW_MCP_CONFIG_0dc2d1bd00cd3635_EOF
+ GH_AW_MCP_CONFIG_d281e39c2bef3170_EOF
- name: Mount MCP servers as CLIs
id: mount-mcp-clis
continue-on-error: true
@@ -1441,7 +1432,6 @@ jobs:
}
pre_activation:
- if: "(github.event_name != 'issue_comment' && github.event_name != 'pull_request_review_comment' || contains(fromJSON('[\"OWNER\",\"MEMBER\",\"COLLABORATOR\"]'), github.event.comment.author_association)) && (github.event_name == 'issues' && (startsWith(github.event.issue.body, '/archie ') || startsWith(github.event.issue.body, '/archie\n') || github.event.issue.body == '/archie') || github.event_name == 'issue_comment' && (startsWith(github.event.comment.body, '/archie ') || startsWith(github.event.comment.body, '/archie\n') || github.event.comment.body == '/archie') && github.event.issue.pull_request == null || github.event_name == 'issue_comment' && (startsWith(github.event.comment.body, '/archie ') || startsWith(github.event.comment.body, '/archie\n') || github.event.comment.body == '/archie') && github.event.issue.pull_request != null || github.event_name == 'pull_request' && (startsWith(github.event.pull_request.body, '/archie ') || startsWith(github.event.pull_request.body, '/archie\n') || github.event.pull_request.body == '/archie'))"
runs-on: ubuntu-slim
permissions:
contents: read
diff --git a/.github/workflows/archie.md b/.github/workflows/archie.md
index 6c3484d3de..cbdb5c90b0 100644
--- a/.github/workflows/archie.md
+++ b/.github/workflows/archie.md
@@ -4,6 +4,7 @@ description: Generates Mermaid diagrams to visualize issue and pull request rela
on:
slash_command:
name: archie
+ strategy: centralized
events: [issues, issue_comment, pull_request, pull_request_comment]
reaction: eyes
status-comment: true
diff --git a/.github/workflows/cloclo.lock.yml b/.github/workflows/cloclo.lock.yml
index dce0792f2f..8fbf91cad1 100644
--- a/.github/workflows/cloclo.lock.yml
+++ b/.github/workflows/cloclo.lock.yml
@@ -1,4 +1,4 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"7580a647bab899ad36421143e1965286e78b15bbb5431b173f3c6afa33b428c2","strict":true,"agent_id":"claude"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"d6432dc724de4e1204e3c0a45bbdd44e700c72795e63ab0638639ebab2b22cec","strict":true,"agent_id":"claude"}
# gh-aw-manifest: {"version":1,"secrets":["ANTHROPIC_API_KEY","GH_AW_CI_TRIGGER_TOKEN","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GH_AW_OTEL_ENDPOINT","GH_AW_OTEL_HEADERS","GITHUB_TOKEN"],"actions":[{"repo":"actions/cache/restore","sha":"27d5ce7f107fe9357f9df03efb73ab90386fccae","version":"v5.0.5"},{"repo":"actions/cache/save","sha":"27d5ce7f107fe9357f9df03efb73ab90386fccae","version":"v5.0.5"},{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"3a2844b7e9c422d3c10d287c895573f7108da1b3","version":"v9.0.0"},{"repo":"actions/setup-go","sha":"4a3601121dd01d1626a1e23e37211e3254c1c06c","version":"v6.4.0"},{"repo":"actions/setup-node","sha":"48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e","version":"v6.4.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"},{"repo":"docker/build-push-action","sha":"bcafcacb16a39f128d818304e6c9c0c18556b85f","version":"v7.1.0"},{"repo":"docker/setup-buildx-action","sha":"4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd","version":"v4.0.0"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.43"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.43"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.43"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.3.6","digest":"sha256:2bb8eef86006a4c5963c55616a9c51c32f27bfdecb023b8aa6f91f6718d9171c","pinned_image":"ghcr.io/github/gh-aw-mcpg:v0.3.6@sha256:2bb8eef86006a4c5963c55616a9c51c32f27bfdecb023b8aa6f91f6718d9171c"},{"image":"ghcr.io/github/github-mcp-server:v1.0.3","digest":"sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959","pinned_image":"ghcr.io/github/github-mcp-server:v1.0.3@sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959"},{"image":"ghcr.io/github/serena-mcp-server:latest","digest":"sha256:bf343399e3725c45528f531a230f3a04521d4cdef29f9a5af6282ff0d3c393c5","pinned_image":"ghcr.io/github/serena-mcp-server:latest@sha256:bf343399e3725c45528f531a230f3a04521d4cdef29f9a5af6282ff0d3c393c5"},{"image":"node:lts-alpine","digest":"sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f","pinned_image":"node:lts-alpine@sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f"}]}
# ___ _ _
# / _ \ | | (_)
@@ -66,33 +66,20 @@ name: "/cloclo"
"on":
discussion:
types:
- - created
- - edited
- labeled
- discussion_comment:
- types:
- - created
- - edited
- issue_comment:
- types:
- - created
- - edited
issues:
types:
- - opened
- - edited
- - reopened
- labeled
pull_request:
types:
- - opened
- - edited
- - reopened
- labeled
- pull_request_review_comment:
- types:
- - created
- - edited
+ workflow_dispatch:
+ inputs:
+ aw_context:
+ default: ""
+ description: Agent caller context (used internally by Agentic Workflows).
+ required: false
+ type: string
permissions: {}
@@ -112,7 +99,10 @@ env:
jobs:
activation:
needs: pre_activation
- if: "needs.pre_activation.outputs.activated == 'true' && (github.event_name == 'issues' && (startsWith(github.event.issue.body, '/cloclo ') || startsWith(github.event.issue.body, '/cloclo\n') || github.event.issue.body == '/cloclo') || github.event_name == 'issue_comment' && (startsWith(github.event.comment.body, '/cloclo ') || startsWith(github.event.comment.body, '/cloclo\n') || github.event.comment.body == '/cloclo') && github.event.issue.pull_request == null || github.event_name == 'issue_comment' && (startsWith(github.event.comment.body, '/cloclo ') || startsWith(github.event.comment.body, '/cloclo\n') || github.event.comment.body == '/cloclo') && github.event.issue.pull_request != null || github.event_name == 'pull_request_review_comment' && (startsWith(github.event.comment.body, '/cloclo ') || startsWith(github.event.comment.body, '/cloclo\n') || github.event.comment.body == '/cloclo') || github.event_name == 'pull_request' && (startsWith(github.event.pull_request.body, '/cloclo ') || startsWith(github.event.pull_request.body, '/cloclo\n') || github.event.pull_request.body == '/cloclo') || github.event_name == 'discussion' && (startsWith(github.event.discussion.body, '/cloclo ') || startsWith(github.event.discussion.body, '/cloclo\n') || github.event.discussion.body == '/cloclo') || github.event_name == 'discussion_comment' && (startsWith(github.event.comment.body, '/cloclo ') || startsWith(github.event.comment.body, '/cloclo\n') || github.event.comment.body == '/cloclo') || github.event_name == 'issues' && github.event.label.name == 'cloclo' || github.event_name == 'pull_request' && github.event.label.name == 'cloclo' || github.event_name == 'discussion' && github.event.label.name == 'cloclo')"
+ if: >
+ needs.pre_activation.outputs.activated == 'true' && ((github.event_name == 'issues' || github.event_name == 'pull_request' ||
+ github.event_name == 'discussion') && github.event.label.name == 'cloclo' || (!(github.event_name == 'issues')) &&
+ (!(github.event_name == 'pull_request')) && (!(github.event_name == 'discussion')))
runs-on: ubuntu-slim
permissions:
actions: read
@@ -293,9 +283,9 @@ jobs:
run: |
bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh"
{
- cat << 'GH_AW_PROMPT_ecf4dc9a517c46f0_EOF'
+ cat << 'GH_AW_PROMPT_a2de0c26aa2bcf08_EOF'
- GH_AW_PROMPT_ecf4dc9a517c46f0_EOF
+ GH_AW_PROMPT_a2de0c26aa2bcf08_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
@@ -303,16 +293,16 @@ jobs:
cat "${RUNNER_TEMP}/gh-aw/prompts/agentic_workflows_guide.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_ecf4dc9a517c46f0_EOF'
+ cat << 'GH_AW_PROMPT_a2de0c26aa2bcf08_EOF'
Tools: add_comment, create_pull_request, missing_tool, missing_data, noop
- GH_AW_PROMPT_ecf4dc9a517c46f0_EOF
+ GH_AW_PROMPT_a2de0c26aa2bcf08_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_create_pull_request.md"
- cat << 'GH_AW_PROMPT_ecf4dc9a517c46f0_EOF'
+ cat << 'GH_AW_PROMPT_a2de0c26aa2bcf08_EOF'
- GH_AW_PROMPT_ecf4dc9a517c46f0_EOF
+ GH_AW_PROMPT_a2de0c26aa2bcf08_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/mcp_cli_tools_prompt.md"
- cat << 'GH_AW_PROMPT_ecf4dc9a517c46f0_EOF'
+ cat << 'GH_AW_PROMPT_a2de0c26aa2bcf08_EOF'
The following GitHub context information is available for this workflow:
{{#if __GH_AW_GITHUB_ACTOR__ }}
@@ -341,12 +331,12 @@ jobs:
{{/if}}
- GH_AW_PROMPT_ecf4dc9a517c46f0_EOF
+ GH_AW_PROMPT_a2de0c26aa2bcf08_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
if [ "$GITHUB_EVENT_NAME" = "issue_comment" ] && [ -n "$GH_AW_IS_PR_COMMENT" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review_comment" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review" ]; then
cat "${RUNNER_TEMP}/gh-aw/prompts/pr_context_prompt.md"
fi
- cat << 'GH_AW_PROMPT_ecf4dc9a517c46f0_EOF'
+ cat << 'GH_AW_PROMPT_a2de0c26aa2bcf08_EOF'
## Serena Code Analysis
@@ -385,7 +375,7 @@ jobs:
{{#runtime-import .github/workflows/shared/observability-otlp.md}}
{{#runtime-import .github/workflows/shared/noop-reminder.md}}
{{#runtime-import .github/workflows/cloclo.md}}
- GH_AW_PROMPT_ecf4dc9a517c46f0_EOF
+ GH_AW_PROMPT_a2de0c26aa2bcf08_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
@@ -712,9 +702,9 @@ jobs:
mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs"
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_576530abb8320c80_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_5b26a7d42b804af3_EOF'
{"add_comment":{"max":1},"create_pull_request":{"excluded_files":[".github/workflows/*.lock.yml"],"expires":48,"labels":["automation","cloclo"],"max":1,"max_patch_files":100,"max_patch_size":1024,"protect_top_level_dot_folders":true,"protected_files":["package.json","bun.lockb","bunfig.toml","deno.json","deno.jsonc","deno.lock","global.json","NuGet.Config","Directory.Packages.props","mix.exs","mix.lock","go.mod","go.sum","stack.yaml","stack.yaml.lock","pom.xml","build.gradle","build.gradle.kts","settings.gradle","settings.gradle.kts","gradle.properties","package-lock.json","yarn.lock","pnpm-lock.yaml","npm-shrinkwrap.json","requirements.txt","Pipfile","Pipfile.lock","pyproject.toml","setup.py","setup.cfg","Gemfile","Gemfile.lock","uv.lock","CODEOWNERS","DESIGN.md","README.md","CONTRIBUTING.md","CHANGELOG.md","SECURITY.md","CODE_OF_CONDUCT.md","CLAUDE.md","AGENTS.md"],"protected_files_policy":"fallback-to-issue","title_prefix":"[cloclo] "},"create_report_incomplete_issue":{},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"report_incomplete":{}}
- GH_AW_SAFE_OUTPUTS_CONFIG_576530abb8320c80_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_5b26a7d42b804af3_EOF
- name: Generate Safe Outputs Tools
env:
GH_AW_TOOLS_META_JSON: |
@@ -947,7 +937,7 @@ jobs:
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GITHUB_AW_OTEL_TRACE_ID -e GITHUB_AW_OTEL_PARENT_SPAN_ID -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.3.6'
GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node)
- cat << GH_AW_MCP_CONFIG_6cb18be545797135_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
+ cat << GH_AW_MCP_CONFIG_e54dacca88c9d411_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
{
"mcpServers": {
"agenticworkflows": {
@@ -1037,7 +1027,7 @@ jobs:
}
}
}
- GH_AW_MCP_CONFIG_6cb18be545797135_EOF
+ GH_AW_MCP_CONFIG_e54dacca88c9d411_EOF
- name: Mount MCP servers as CLIs
id: mount-mcp-clis
continue-on-error: true
@@ -1761,7 +1751,10 @@ jobs:
}
pre_activation:
- if: "(github.event_name != 'issue_comment' && github.event_name != 'pull_request_review_comment' || contains(fromJSON('[\"OWNER\",\"MEMBER\",\"COLLABORATOR\"]'), github.event.comment.author_association)) && (github.event_name == 'issues' && (startsWith(github.event.issue.body, '/cloclo ') || startsWith(github.event.issue.body, '/cloclo\n') || github.event.issue.body == '/cloclo') || github.event_name == 'issue_comment' && (startsWith(github.event.comment.body, '/cloclo ') || startsWith(github.event.comment.body, '/cloclo\n') || github.event.comment.body == '/cloclo') && github.event.issue.pull_request == null || github.event_name == 'issue_comment' && (startsWith(github.event.comment.body, '/cloclo ') || startsWith(github.event.comment.body, '/cloclo\n') || github.event.comment.body == '/cloclo') && github.event.issue.pull_request != null || github.event_name == 'pull_request_review_comment' && (startsWith(github.event.comment.body, '/cloclo ') || startsWith(github.event.comment.body, '/cloclo\n') || github.event.comment.body == '/cloclo') || github.event_name == 'pull_request' && (startsWith(github.event.pull_request.body, '/cloclo ') || startsWith(github.event.pull_request.body, '/cloclo\n') || github.event.pull_request.body == '/cloclo') || github.event_name == 'discussion' && (startsWith(github.event.discussion.body, '/cloclo ') || startsWith(github.event.discussion.body, '/cloclo\n') || github.event.discussion.body == '/cloclo') || github.event_name == 'discussion_comment' && (startsWith(github.event.comment.body, '/cloclo ') || startsWith(github.event.comment.body, '/cloclo\n') || github.event.comment.body == '/cloclo') || github.event_name == 'issues' && github.event.label.name == 'cloclo' || github.event_name == 'pull_request' && github.event.label.name == 'cloclo' || github.event_name == 'discussion' && github.event.label.name == 'cloclo')"
+ if: >
+ (github.event_name == 'issues' || github.event_name == 'pull_request' || github.event_name == 'discussion') &&
+ github.event.label.name == 'cloclo' || (!(github.event_name == 'issues')) && (!(github.event_name == 'pull_request')) &&
+ (!(github.event_name == 'discussion'))
runs-on: ubuntu-slim
permissions:
contents: read
diff --git a/.github/workflows/cloclo.md b/.github/workflows/cloclo.md
index 75fe83b78b..57607e0ca1 100644
--- a/.github/workflows/cloclo.md
+++ b/.github/workflows/cloclo.md
@@ -2,6 +2,7 @@
on:
slash_command:
name: cloclo
+ strategy: centralized
label_command: cloclo
status-comment: true
permissions:
diff --git a/.github/workflows/dependabot-campaign.lock.yml b/.github/workflows/dependabot-campaign.lock.yml
index 2aba5e6065..1bbd297c73 100644
--- a/.github/workflows/dependabot-campaign.lock.yml
+++ b/.github/workflows/dependabot-campaign.lock.yml
@@ -747,9 +747,13 @@ jobs:
GH_AW_NODE_BIN=$(command -v node 2>/dev/null || true)
export GH_AW_NODE_BIN
(umask 177 && touch /tmp/gh-aw/agent-stdio.log)
- printf '%s\n' '{"$schema":"https://github.com/github/gh-aw-firewall/releases/download/v0.25.43/awf-config.schema.json","network":{"allowDomains":["*.pythonhosted.org","anaconda.org","api.business.githubcopilot.com","api.enterprise.githubcopilot.com","api.github.com","api.githubcopilot.com","api.individual.githubcopilot.com","api.npms.io","api.snapcraft.io","archive.ubuntu.com","azure.archive.ubuntu.com","binstar.org","bootstrap.pypa.io","bun.sh","cdn.jsdelivr.net","conda.anaconda.org","conda.binstar.org","crl.geotrust.com","crl.globalsign.com","crl.identrust.com","crl.sectigo.com","crl.thawte.com","crl.usertrust.com","crl.verisign.com","crl3.digicert.com","crl4.digicert.com","crls.ssl.com","deb.nodesource.com","deno.land","esm.sh","files.pythonhosted.org","get.pnpm.io","github.com","go.dev","golang.org","googleapis.deno.dev","googlechromelabs.github.io","goproxy.io","host.docker.internal","json-schema.org","json.schemastore.org","jsr.io","keyserver.ubuntu.com","nodejs.org","npm.pkg.github.com","npmjs.com","npmjs.org","ocsp.digicert.com","ocsp.geotrust.com","ocsp.globalsign.com","ocsp.identrust.com","ocsp.sectigo.com","ocsp.ssl.com","ocsp.thawte.com","ocsp.usertrust.com","ocsp.verisign.com","packagecloud.io","packages.cloud.google.com","packages.microsoft.com","pip.pypa.io","pkg.go.dev","ppa.launchpad.net","proxy.golang.org","pypi.org","pypi.python.org","raw.githubusercontent.com","registry.bower.io","registry.npmjs.com","registry.npmjs.org","registry.yarnpkg.com","repo.anaconda.com","repo.continuum.io","repo.yarnpkg.com","s.symcb.com","s.symcd.com","security.ubuntu.com","skimdb.npmjs.com","storage.googleapis.com","sum.golang.org","telemetry.enterprise.githubcopilot.com","telemetry.vercel.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","www.googleapis.com","www.npmjs.com","www.npmjs.org","yarnpkg.com"]},"apiProxy":{"enabled":true,"maxEffectiveTokens":10000000,"models":{"auto":["large"],"deep-research":["copilot/deep-research*","copilot/o3-deep-research*","copilot/o4-mini-deep-research*","google/deep-research*","gemini/deep-research*","openai/o3-deep-research*","openai/o4-mini-deep-research*"],"gemini-flash":["copilot/gemini-*flash*","google/gemini-*flash*","gemini/gemini-*flash*"],"gemini-flash-lite":["copilot/gemini-*flash*lite*","google/gemini-*flash*lite*","gemini/gemini-*flash*lite*"],"gemini-pro":["copilot/gemini-*pro*","google/gemini-*pro*","gemini/gemini-*pro*"],"gemma":["copilot/gemma*","google/gemma*","gemini/gemma*"],"gpt-4.1":["copilot/gpt-4.1*","openai/gpt-4.1*"],"gpt-5":["copilot/gpt-5*","openai/gpt-5*"],"gpt-5-codex":["copilot/gpt-5*codex*","openai/gpt-5*codex*"],"gpt-5-mini":["copilot/gpt-5*mini*","openai/gpt-5*mini*"],"gpt-5-nano":["copilot/gpt-5*nano*","openai/gpt-5*nano*"],"gpt-5-pro":["copilot/gpt-5*pro*","openai/gpt-5*pro*"],"haiku":["copilot/*haiku*","anthropic/*haiku*"],"large":["sonnet","gpt-5-pro","gpt-5","gemini-pro"],"mini":["haiku","gpt-5-mini","gpt-5-nano","gemini-flash-lite"],"opus":["copilot/*opus*","anthropic/*opus*"],"reasoning":["copilot/o1*","copilot/o3*","copilot/o4*","openai/o1*","openai/o3*","openai/o4*"],"small":["mini"],"sonnet":["copilot/*sonnet*","anthropic/*sonnet*"]}},"container":{"imageTag":"0.25.43"}}' > "${RUNNER_TEMP}/gh-aw/awf-config.json" && cp "${RUNNER_TEMP}/gh-aw/awf-config.json" /tmp/gh-aw/awf-config.json
+ printf '%s\n' '{"$schema":"https://github.com/github/gh-aw-firewall/releases/download/v0.25.43/awf-config.schema.json","network":{"allowDomains":["*.pythonhosted.org","anaconda.org","api.business.githubcopilot.com","api.enterprise.githubcopilot.com","api.github.com","api.githubcopilot.com","api.individual.githubcopilot.com","api.npms.io","api.snapcraft.io","archive.ubuntu.com","azure.archive.ubuntu.com","binstar.org","bootstrap.pypa.io","bun.sh","cdn.jsdelivr.net","conda.anaconda.org","conda.binstar.org","crl.geotrust.com","crl.globalsign.com","crl.identrust.com","crl.sectigo.com","crl.thawte.com","crl.usertrust.com","crl.verisign.com","crl3.digicert.com","crl4.digicert.com","crls.ssl.com","deb.nodesource.com","deno.land","esm.sh","files.pythonhosted.org","get.pnpm.io","github.com","go.dev","golang.org","googleapis.deno.dev","googlechromelabs.github.io","goproxy.io","host.docker.internal","json-schema.org","json.schemastore.org","jsr.io","keyserver.ubuntu.com","nodejs.org","npm.pkg.github.com","npmjs.com","npmjs.org","ocsp.digicert.com","ocsp.geotrust.com","ocsp.globalsign.com","ocsp.identrust.com","ocsp.sectigo.com","ocsp.ssl.com","ocsp.thawte.com","ocsp.usertrust.com","ocsp.verisign.com","packagecloud.io","packages.cloud.google.com","packages.microsoft.com","pip.pypa.io","pkg.go.dev","ppa.launchpad.net","proxy.golang.org","pypi.org","pypi.python.org","raw.githubusercontent.com","registry.bower.io","registry.npmjs.com","registry.npmjs.org","registry.yarnpkg.com","repo.anaconda.com","repo.continuum.io","repo.yarnpkg.com","s.symcb.com","s.symcd.com","security.ubuntu.com","skimdb.npmjs.com","storage.googleapis.com","sum.golang.org","telemetry.enterprise.githubcopilot.com","telemetry.vercel.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","www.googleapis.com","www.npmjs.com","www.npmjs.org","yarnpkg.com"]},"apiProxy":{"enabled":true,"maxRuns":100,"maxEffectiveTokens":25000000,"models":{"auto":["large"],"deep-research":["copilot/deep-research*","copilot/o3-deep-research*","copilot/o4-mini-deep-research*","google/deep-research*","gemini/deep-research*","openai/o3-deep-research*","openai/o4-mini-deep-research*"],"gemini-flash":["copilot/gemini-*flash*","google/gemini-*flash*","gemini/gemini-*flash*"],"gemini-flash-lite":["copilot/gemini-*flash*lite*","google/gemini-*flash*lite*","gemini/gemini-*flash*lite*"],"gemini-pro":["copilot/gemini-*pro*","google/gemini-*pro*","gemini/gemini-*pro*"],"gemma":["copilot/gemma*","google/gemma*","gemini/gemma*"],"gpt-4.1":["copilot/gpt-4.1*","openai/gpt-4.1*"],"gpt-5":["copilot/gpt-5*","openai/gpt-5*"],"gpt-5-codex":["copilot/gpt-5*codex*","openai/gpt-5*codex*"],"gpt-5-mini":["copilot/gpt-5*mini*","openai/gpt-5*mini*"],"gpt-5-nano":["copilot/gpt-5*nano*","openai/gpt-5*nano*"],"gpt-5-pro":["copilot/gpt-5*pro*","openai/gpt-5*pro*"],"haiku":["copilot/*haiku*","anthropic/*haiku*"],"large":["sonnet","gpt-5-pro","gpt-5","gemini-pro"],"mini":["haiku","gpt-5-mini","gpt-5-nano","gemini-flash-lite"],"opus":["copilot/*opus*","anthropic/*opus*"],"reasoning":["copilot/o1*","copilot/o3*","copilot/o4*","openai/o1*","openai/o3*","openai/o4*"],"small":["mini"],"sonnet":["copilot/*sonnet*","anthropic/*sonnet*"]}},"container":{"imageTag":"0.25.43"}}' > "${RUNNER_TEMP}/gh-aw/awf-config.json" && cp "${RUNNER_TEMP}/gh-aw/awf-config.json" /tmp/gh-aw/awf-config.json
+ GH_AW_DOCKER_HOST_PATH_PREFIX_ARGS=""
+ if [[ "${DOCKER_HOST:-}" =~ ^tcp://(localhost|127\.0\.0\.1)(:[0-9]+)?$ ]]; then
+ GH_AW_DOCKER_HOST_PATH_PREFIX_ARGS="--docker-host-path-prefix /tmp/gh-aw"
+ fi
# shellcheck disable=SC1003
- sudo -E awf --config "${RUNNER_TEMP}/gh-aw/awf-config.json" --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --env-all --exclude-env COPILOT_GITHUB_TOKEN --exclude-env GH_TOKEN --exclude-env GITHUB_MCP_SERVER_TOKEN --exclude-env MCP_GATEWAY_API_KEY --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --allow-host-ports 80,443,8080 --skip-pull --difc-proxy-host host.docker.internal:18443 --difc-proxy-ca-cert /tmp/gh-aw/difc-proxy-tls/ca.crt \
+ sudo -E awf --config "${RUNNER_TEMP}/gh-aw/awf-config.json" --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" ${GH_AW_DOCKER_HOST_PATH_PREFIX_ARGS} --env-all --exclude-env COPILOT_GITHUB_TOKEN --exclude-env GH_TOKEN --exclude-env GITHUB_MCP_SERVER_TOKEN --exclude-env MCP_GATEWAY_API_KEY --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --allow-host-ports 80,443,8080 --skip-pull --difc-proxy-host host.docker.internal:18443 --difc-proxy-ca-cert /tmp/gh-aw/difc-proxy-tls/ca.crt \
-- /bin/bash -c 'export PATH="${RUNNER_TEMP}/gh-aw/mcp-cli/bin:$PATH" && export PATH="$(find /opt/hostedtoolcache /home/runner/work/_tool -maxdepth 5 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && GH_AW_NODE_EXEC="${GH_AW_NODE_BIN:-}"; if [ -z "$GH_AW_NODE_EXEC" ] || [ ! -x "$GH_AW_NODE_EXEC" ]; then GH_AW_NODE_EXEC="$(command -v node 2>/dev/null || true)"; fi; if [ -z "$GH_AW_NODE_EXEC" ]; then echo "node runtime missing on this runner — check runtimes.node in workflow YAML" >&2; exit 127; fi; "$GH_AW_NODE_EXEC" ${RUNNER_TEMP}/gh-aw/actions/copilot_harness.cjs /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --no-ask-user --allow-all-tools --allow-all-paths --add-dir "${GITHUB_WORKSPACE}" --prompt-file /tmp/gh-aw/aw-prompts/prompt.txt' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
env:
AWF_REFLECT_ENABLED: 1
@@ -1122,7 +1126,7 @@ jobs:
GH_AW_MISSING_TOOL_REPORT_AS_FAILURE: "true"
GH_AW_MISSING_DATA_REPORT_AS_FAILURE: "true"
GH_AW_TIMEOUT_MINUTES: "15"
- GH_AW_MAX_EFFECTIVE_TOKENS: "10000000"
+ GH_AW_MAX_EFFECTIVE_TOKENS: "25000000"
with:
github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
@@ -1265,9 +1269,13 @@ jobs:
GH_AW_NODE_BIN=$(command -v node 2>/dev/null || true)
export GH_AW_NODE_BIN
(umask 177 && touch /tmp/gh-aw/threat-detection/detection.log)
- printf '%s\n' '{"$schema":"https://github.com/github/gh-aw-firewall/releases/download/v0.25.43/awf-config.schema.json","network":{"allowDomains":["api.business.githubcopilot.com","api.enterprise.githubcopilot.com","api.github.com","api.githubcopilot.com","api.individual.githubcopilot.com","github.com","host.docker.internal","telemetry.enterprise.githubcopilot.com"]},"apiProxy":{"enabled":true,"maxEffectiveTokens":10000000},"container":{"imageTag":"0.25.43"}}' > "${RUNNER_TEMP}/gh-aw/awf-config.json" && cp "${RUNNER_TEMP}/gh-aw/awf-config.json" /tmp/gh-aw/awf-config.json
+ printf '%s\n' '{"$schema":"https://github.com/github/gh-aw-firewall/releases/download/v0.25.43/awf-config.schema.json","network":{"allowDomains":["api.business.githubcopilot.com","api.enterprise.githubcopilot.com","api.github.com","api.githubcopilot.com","api.individual.githubcopilot.com","github.com","host.docker.internal","telemetry.enterprise.githubcopilot.com"]},"apiProxy":{"enabled":true,"maxRuns":100,"maxEffectiveTokens":25000000},"container":{"imageTag":"0.25.43"}}' > "${RUNNER_TEMP}/gh-aw/awf-config.json" && cp "${RUNNER_TEMP}/gh-aw/awf-config.json" /tmp/gh-aw/awf-config.json
+ GH_AW_DOCKER_HOST_PATH_PREFIX_ARGS=""
+ if [[ "${DOCKER_HOST:-}" =~ ^tcp://(localhost|127\.0\.0\.1)(:[0-9]+)?$ ]]; then
+ GH_AW_DOCKER_HOST_PATH_PREFIX_ARGS="--docker-host-path-prefix /tmp/gh-aw"
+ fi
# shellcheck disable=SC1003
- sudo -E awf --config "${RUNNER_TEMP}/gh-aw/awf-config.json" --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --env-all --exclude-env COPILOT_GITHUB_TOKEN --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --allow-host-ports 80,443,8080 --skip-pull \
+ sudo -E awf --config "${RUNNER_TEMP}/gh-aw/awf-config.json" --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" ${GH_AW_DOCKER_HOST_PATH_PREFIX_ARGS} --env-all --exclude-env COPILOT_GITHUB_TOKEN --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --allow-host-ports 80,443,8080 --skip-pull \
-- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache /home/runner/work/_tool -maxdepth 5 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && GH_AW_NODE_EXEC="${GH_AW_NODE_BIN:-}"; if [ -z "$GH_AW_NODE_EXEC" ] || [ ! -x "$GH_AW_NODE_EXEC" ]; then GH_AW_NODE_EXEC="$(command -v node 2>/dev/null || true)"; fi; if [ -z "$GH_AW_NODE_EXEC" ]; then echo "node runtime missing on this runner — check runtimes.node in workflow YAML" >&2; exit 127; fi; "$GH_AW_NODE_EXEC" ${RUNNER_TEMP}/gh-aw/actions/copilot_harness.cjs /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --no-ask-user --allow-all-tools --add-dir "${GITHUB_WORKSPACE}" --prompt-file /tmp/gh-aw/aw-prompts/prompt.txt' 2>&1 | tee -a /tmp/gh-aw/threat-detection/detection.log
env:
AWF_REFLECT_ENABLED: 1
diff --git a/.github/workflows/dependabot-worker.lock.yml b/.github/workflows/dependabot-worker.lock.yml
index 4842fe6ad3..7ff762bb9b 100644
--- a/.github/workflows/dependabot-worker.lock.yml
+++ b/.github/workflows/dependabot-worker.lock.yml
@@ -852,7 +852,6 @@ jobs:
# --allow-tool github
# --allow-tool safeoutputs
# --allow-tool shell(./gh-aw compile --dependabot)
- # --allow-tool shell(cat *)
# --allow-tool shell(cat)
# --allow-tool shell(cd .github/workflows && npm install --package-lock-only)
# --allow-tool shell(date)
@@ -862,7 +861,7 @@ jobs:
# --allow-tool shell(git branch:*)
# --allow-tool shell(git checkout:*)
# --allow-tool shell(git commit:*)
- # --allow-tool shell(git diff *)
+ # --allow-tool shell(git diff)
# --allow-tool shell(git merge:*)
# --allow-tool shell(git rm:*)
# --allow-tool shell(git status)
@@ -875,7 +874,7 @@ jobs:
# --allow-tool shell(make dependabot)
# --allow-tool shell(printf)
# --allow-tool shell(pwd)
- # --allow-tool shell(rg *)
+ # --allow-tool shell(rg)
# --allow-tool shell(safeoutputs:*)
# --allow-tool shell(sort)
# --allow-tool shell(tail)
@@ -890,10 +889,14 @@ jobs:
GH_AW_NODE_BIN=$(command -v node 2>/dev/null || true)
export GH_AW_NODE_BIN
(umask 177 && touch /tmp/gh-aw/agent-stdio.log)
- printf '%s\n' '{"$schema":"https://github.com/github/gh-aw-firewall/releases/download/v0.25.43/awf-config.schema.json","network":{"allowDomains":["*.pythonhosted.org","anaconda.org","api.business.githubcopilot.com","api.enterprise.githubcopilot.com","api.github.com","api.githubcopilot.com","api.individual.githubcopilot.com","api.npms.io","api.snapcraft.io","archive.ubuntu.com","azure.archive.ubuntu.com","binstar.org","bootstrap.pypa.io","bun.sh","cdn.jsdelivr.net","conda.anaconda.org","conda.binstar.org","crl.geotrust.com","crl.globalsign.com","crl.identrust.com","crl.sectigo.com","crl.thawte.com","crl.usertrust.com","crl.verisign.com","crl3.digicert.com","crl4.digicert.com","crls.ssl.com","deb.nodesource.com","deno.land","esm.sh","files.pythonhosted.org","get.pnpm.io","github.com","go.dev","golang.org","googleapis.deno.dev","googlechromelabs.github.io","goproxy.io","host.docker.internal","json-schema.org","json.schemastore.org","jsr.io","keyserver.ubuntu.com","nodejs.org","npm.pkg.github.com","npmjs.com","npmjs.org","ocsp.digicert.com","ocsp.geotrust.com","ocsp.globalsign.com","ocsp.identrust.com","ocsp.sectigo.com","ocsp.ssl.com","ocsp.thawte.com","ocsp.usertrust.com","ocsp.verisign.com","packagecloud.io","packages.cloud.google.com","packages.microsoft.com","pip.pypa.io","pkg.go.dev","ppa.launchpad.net","proxy.golang.org","pypi.org","pypi.python.org","raw.githubusercontent.com","registry.bower.io","registry.npmjs.com","registry.npmjs.org","registry.yarnpkg.com","repo.anaconda.com","repo.continuum.io","repo.yarnpkg.com","s.symcb.com","s.symcd.com","security.ubuntu.com","skimdb.npmjs.com","storage.googleapis.com","sum.golang.org","telemetry.enterprise.githubcopilot.com","telemetry.vercel.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","www.googleapis.com","www.npmjs.com","www.npmjs.org","yarnpkg.com"]},"apiProxy":{"enabled":true,"maxEffectiveTokens":10000000,"models":{"auto":["large"],"deep-research":["copilot/deep-research*","copilot/o3-deep-research*","copilot/o4-mini-deep-research*","google/deep-research*","gemini/deep-research*","openai/o3-deep-research*","openai/o4-mini-deep-research*"],"gemini-flash":["copilot/gemini-*flash*","google/gemini-*flash*","gemini/gemini-*flash*"],"gemini-flash-lite":["copilot/gemini-*flash*lite*","google/gemini-*flash*lite*","gemini/gemini-*flash*lite*"],"gemini-pro":["copilot/gemini-*pro*","google/gemini-*pro*","gemini/gemini-*pro*"],"gemma":["copilot/gemma*","google/gemma*","gemini/gemma*"],"gpt-4.1":["copilot/gpt-4.1*","openai/gpt-4.1*"],"gpt-5":["copilot/gpt-5*","openai/gpt-5*"],"gpt-5-codex":["copilot/gpt-5*codex*","openai/gpt-5*codex*"],"gpt-5-mini":["copilot/gpt-5*mini*","openai/gpt-5*mini*"],"gpt-5-nano":["copilot/gpt-5*nano*","openai/gpt-5*nano*"],"gpt-5-pro":["copilot/gpt-5*pro*","openai/gpt-5*pro*"],"haiku":["copilot/*haiku*","anthropic/*haiku*"],"large":["sonnet","gpt-5-pro","gpt-5","gemini-pro"],"mini":["haiku","gpt-5-mini","gpt-5-nano","gemini-flash-lite"],"opus":["copilot/*opus*","anthropic/*opus*"],"reasoning":["copilot/o1*","copilot/o3*","copilot/o4*","openai/o1*","openai/o3*","openai/o4*"],"small":["mini"],"sonnet":["copilot/*sonnet*","anthropic/*sonnet*"]}},"container":{"imageTag":"0.25.43"}}' > "${RUNNER_TEMP}/gh-aw/awf-config.json" && cp "${RUNNER_TEMP}/gh-aw/awf-config.json" /tmp/gh-aw/awf-config.json
+ printf '%s\n' '{"$schema":"https://github.com/github/gh-aw-firewall/releases/download/v0.25.43/awf-config.schema.json","network":{"allowDomains":["*.pythonhosted.org","anaconda.org","api.business.githubcopilot.com","api.enterprise.githubcopilot.com","api.github.com","api.githubcopilot.com","api.individual.githubcopilot.com","api.npms.io","api.snapcraft.io","archive.ubuntu.com","azure.archive.ubuntu.com","binstar.org","bootstrap.pypa.io","bun.sh","cdn.jsdelivr.net","conda.anaconda.org","conda.binstar.org","crl.geotrust.com","crl.globalsign.com","crl.identrust.com","crl.sectigo.com","crl.thawte.com","crl.usertrust.com","crl.verisign.com","crl3.digicert.com","crl4.digicert.com","crls.ssl.com","deb.nodesource.com","deno.land","esm.sh","files.pythonhosted.org","get.pnpm.io","github.com","go.dev","golang.org","googleapis.deno.dev","googlechromelabs.github.io","goproxy.io","host.docker.internal","json-schema.org","json.schemastore.org","jsr.io","keyserver.ubuntu.com","nodejs.org","npm.pkg.github.com","npmjs.com","npmjs.org","ocsp.digicert.com","ocsp.geotrust.com","ocsp.globalsign.com","ocsp.identrust.com","ocsp.sectigo.com","ocsp.ssl.com","ocsp.thawte.com","ocsp.usertrust.com","ocsp.verisign.com","packagecloud.io","packages.cloud.google.com","packages.microsoft.com","pip.pypa.io","pkg.go.dev","ppa.launchpad.net","proxy.golang.org","pypi.org","pypi.python.org","raw.githubusercontent.com","registry.bower.io","registry.npmjs.com","registry.npmjs.org","registry.yarnpkg.com","repo.anaconda.com","repo.continuum.io","repo.yarnpkg.com","s.symcb.com","s.symcd.com","security.ubuntu.com","skimdb.npmjs.com","storage.googleapis.com","sum.golang.org","telemetry.enterprise.githubcopilot.com","telemetry.vercel.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","www.googleapis.com","www.npmjs.com","www.npmjs.org","yarnpkg.com"]},"apiProxy":{"enabled":true,"maxRuns":100,"maxEffectiveTokens":25000000,"models":{"auto":["large"],"deep-research":["copilot/deep-research*","copilot/o3-deep-research*","copilot/o4-mini-deep-research*","google/deep-research*","gemini/deep-research*","openai/o3-deep-research*","openai/o4-mini-deep-research*"],"gemini-flash":["copilot/gemini-*flash*","google/gemini-*flash*","gemini/gemini-*flash*"],"gemini-flash-lite":["copilot/gemini-*flash*lite*","google/gemini-*flash*lite*","gemini/gemini-*flash*lite*"],"gemini-pro":["copilot/gemini-*pro*","google/gemini-*pro*","gemini/gemini-*pro*"],"gemma":["copilot/gemma*","google/gemma*","gemini/gemma*"],"gpt-4.1":["copilot/gpt-4.1*","openai/gpt-4.1*"],"gpt-5":["copilot/gpt-5*","openai/gpt-5*"],"gpt-5-codex":["copilot/gpt-5*codex*","openai/gpt-5*codex*"],"gpt-5-mini":["copilot/gpt-5*mini*","openai/gpt-5*mini*"],"gpt-5-nano":["copilot/gpt-5*nano*","openai/gpt-5*nano*"],"gpt-5-pro":["copilot/gpt-5*pro*","openai/gpt-5*pro*"],"haiku":["copilot/*haiku*","anthropic/*haiku*"],"large":["sonnet","gpt-5-pro","gpt-5","gemini-pro"],"mini":["haiku","gpt-5-mini","gpt-5-nano","gemini-flash-lite"],"opus":["copilot/*opus*","anthropic/*opus*"],"reasoning":["copilot/o1*","copilot/o3*","copilot/o4*","openai/o1*","openai/o3*","openai/o4*"],"small":["mini"],"sonnet":["copilot/*sonnet*","anthropic/*sonnet*"]}},"container":{"imageTag":"0.25.43"}}' > "${RUNNER_TEMP}/gh-aw/awf-config.json" && cp "${RUNNER_TEMP}/gh-aw/awf-config.json" /tmp/gh-aw/awf-config.json
+ GH_AW_DOCKER_HOST_PATH_PREFIX_ARGS=""
+ if [[ "${DOCKER_HOST:-}" =~ ^tcp://(localhost|127\.0\.0\.1)(:[0-9]+)?$ ]]; then
+ GH_AW_DOCKER_HOST_PATH_PREFIX_ARGS="--docker-host-path-prefix /tmp/gh-aw"
+ fi
# shellcheck disable=SC1003
- sudo -E awf --config "${RUNNER_TEMP}/gh-aw/awf-config.json" --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --env-all --exclude-env COPILOT_GITHUB_TOKEN --exclude-env GH_TOKEN --exclude-env GITHUB_MCP_SERVER_TOKEN --exclude-env MCP_GATEWAY_API_KEY --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --allow-host-ports 80,443,8080 --skip-pull --difc-proxy-host host.docker.internal:18443 --difc-proxy-ca-cert /tmp/gh-aw/difc-proxy-tls/ca.crt \
- -- /bin/bash -c 'export PATH="${RUNNER_TEMP}/gh-aw/mcp-cli/bin:$PATH" && export PATH="$(find /opt/hostedtoolcache /home/runner/work/_tool -maxdepth 5 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && GH_AW_NODE_EXEC="${GH_AW_NODE_BIN:-}"; if [ -z "$GH_AW_NODE_EXEC" ] || [ ! -x "$GH_AW_NODE_EXEC" ]; then GH_AW_NODE_EXEC="$(command -v node 2>/dev/null || true)"; fi; if [ -z "$GH_AW_NODE_EXEC" ]; then echo "node runtime missing on this runner — check runtimes.node in workflow YAML" >&2; exit 127; fi; "$GH_AW_NODE_EXEC" ${RUNNER_TEMP}/gh-aw/actions/copilot_harness.cjs /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --no-ask-user --allow-tool github --allow-tool safeoutputs --allow-tool '\''shell(./gh-aw compile --dependabot)'\'' --allow-tool '\''shell(cat *)'\'' --allow-tool '\''shell(cat)'\'' --allow-tool '\''shell(cd .github/workflows && npm install --package-lock-only)'\'' --allow-tool '\''shell(date)'\'' --allow-tool '\''shell(echo)'\'' --allow-tool '\''shell(gh:*)'\'' --allow-tool '\''shell(git add:*)'\'' --allow-tool '\''shell(git branch:*)'\'' --allow-tool '\''shell(git checkout:*)'\'' --allow-tool '\''shell(git commit:*)'\'' --allow-tool '\''shell(git diff *)'\'' --allow-tool '\''shell(git merge:*)'\'' --allow-tool '\''shell(git rm:*)'\'' --allow-tool '\''shell(git status)'\'' --allow-tool '\''shell(git switch:*)'\'' --allow-tool '\''shell(grep)'\'' --allow-tool '\''shell(head)'\'' --allow-tool '\''shell(ls)'\'' --allow-tool '\''shell(make build)'\'' --allow-tool '\''shell(make dependabot && make build)'\'' --allow-tool '\''shell(make dependabot)'\'' --allow-tool '\''shell(printf)'\'' --allow-tool '\''shell(pwd)'\'' --allow-tool '\''shell(rg *)'\'' --allow-tool '\''shell(safeoutputs:*)'\'' --allow-tool '\''shell(sort)'\'' --allow-tool '\''shell(tail)'\'' --allow-tool '\''shell(uniq)'\'' --allow-tool '\''shell(wc)'\'' --allow-tool '\''shell(yq)'\'' --allow-tool write --allow-all-paths --add-dir "${GITHUB_WORKSPACE}" --prompt-file /tmp/gh-aw/aw-prompts/prompt.txt' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
+ sudo -E awf --config "${RUNNER_TEMP}/gh-aw/awf-config.json" --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" ${GH_AW_DOCKER_HOST_PATH_PREFIX_ARGS} --env-all --exclude-env COPILOT_GITHUB_TOKEN --exclude-env GH_TOKEN --exclude-env GITHUB_MCP_SERVER_TOKEN --exclude-env MCP_GATEWAY_API_KEY --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --allow-host-ports 80,443,8080 --skip-pull --difc-proxy-host host.docker.internal:18443 --difc-proxy-ca-cert /tmp/gh-aw/difc-proxy-tls/ca.crt \
+ -- /bin/bash -c 'export PATH="${RUNNER_TEMP}/gh-aw/mcp-cli/bin:$PATH" && export PATH="$(find /opt/hostedtoolcache /home/runner/work/_tool -maxdepth 5 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && GH_AW_NODE_EXEC="${GH_AW_NODE_BIN:-}"; if [ -z "$GH_AW_NODE_EXEC" ] || [ ! -x "$GH_AW_NODE_EXEC" ]; then GH_AW_NODE_EXEC="$(command -v node 2>/dev/null || true)"; fi; if [ -z "$GH_AW_NODE_EXEC" ]; then echo "node runtime missing on this runner — check runtimes.node in workflow YAML" >&2; exit 127; fi; "$GH_AW_NODE_EXEC" ${RUNNER_TEMP}/gh-aw/actions/copilot_harness.cjs /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --no-ask-user --allow-tool github --allow-tool safeoutputs --allow-tool '\''shell(./gh-aw compile --dependabot)'\'' --allow-tool '\''shell(cat)'\'' --allow-tool '\''shell(cd .github/workflows && npm install --package-lock-only)'\'' --allow-tool '\''shell(date)'\'' --allow-tool '\''shell(echo)'\'' --allow-tool '\''shell(gh:*)'\'' --allow-tool '\''shell(git add:*)'\'' --allow-tool '\''shell(git branch:*)'\'' --allow-tool '\''shell(git checkout:*)'\'' --allow-tool '\''shell(git commit:*)'\'' --allow-tool '\''shell(git diff)'\'' --allow-tool '\''shell(git merge:*)'\'' --allow-tool '\''shell(git rm:*)'\'' --allow-tool '\''shell(git status)'\'' --allow-tool '\''shell(git switch:*)'\'' --allow-tool '\''shell(grep)'\'' --allow-tool '\''shell(head)'\'' --allow-tool '\''shell(ls)'\'' --allow-tool '\''shell(make build)'\'' --allow-tool '\''shell(make dependabot && make build)'\'' --allow-tool '\''shell(make dependabot)'\'' --allow-tool '\''shell(printf)'\'' --allow-tool '\''shell(pwd)'\'' --allow-tool '\''shell(rg)'\'' --allow-tool '\''shell(safeoutputs:*)'\'' --allow-tool '\''shell(sort)'\'' --allow-tool '\''shell(tail)'\'' --allow-tool '\''shell(uniq)'\'' --allow-tool '\''shell(wc)'\'' --allow-tool '\''shell(yq)'\'' --allow-tool write --allow-all-paths --add-dir "${GITHUB_WORKSPACE}" --prompt-file /tmp/gh-aw/aw-prompts/prompt.txt' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
env:
AWF_REFLECT_ENABLED: 1
COPILOT_AGENT_RUNNER_TYPE: STANDALONE
@@ -1242,7 +1245,7 @@ jobs:
GH_AW_MISSING_TOOL_REPORT_AS_FAILURE: "true"
GH_AW_MISSING_DATA_REPORT_AS_FAILURE: "true"
GH_AW_TIMEOUT_MINUTES: "30"
- GH_AW_MAX_EFFECTIVE_TOKENS: "10000000"
+ GH_AW_MAX_EFFECTIVE_TOKENS: "25000000"
with:
github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
@@ -1386,9 +1389,13 @@ jobs:
GH_AW_NODE_BIN=$(command -v node 2>/dev/null || true)
export GH_AW_NODE_BIN
(umask 177 && touch /tmp/gh-aw/threat-detection/detection.log)
- printf '%s\n' '{"$schema":"https://github.com/github/gh-aw-firewall/releases/download/v0.25.43/awf-config.schema.json","network":{"allowDomains":["api.business.githubcopilot.com","api.enterprise.githubcopilot.com","api.github.com","api.githubcopilot.com","api.individual.githubcopilot.com","github.com","host.docker.internal","telemetry.enterprise.githubcopilot.com"]},"apiProxy":{"enabled":true,"maxEffectiveTokens":10000000},"container":{"imageTag":"0.25.43"}}' > "${RUNNER_TEMP}/gh-aw/awf-config.json" && cp "${RUNNER_TEMP}/gh-aw/awf-config.json" /tmp/gh-aw/awf-config.json
+ printf '%s\n' '{"$schema":"https://github.com/github/gh-aw-firewall/releases/download/v0.25.43/awf-config.schema.json","network":{"allowDomains":["api.business.githubcopilot.com","api.enterprise.githubcopilot.com","api.github.com","api.githubcopilot.com","api.individual.githubcopilot.com","github.com","host.docker.internal","telemetry.enterprise.githubcopilot.com"]},"apiProxy":{"enabled":true,"maxRuns":100,"maxEffectiveTokens":25000000},"container":{"imageTag":"0.25.43"}}' > "${RUNNER_TEMP}/gh-aw/awf-config.json" && cp "${RUNNER_TEMP}/gh-aw/awf-config.json" /tmp/gh-aw/awf-config.json
+ GH_AW_DOCKER_HOST_PATH_PREFIX_ARGS=""
+ if [[ "${DOCKER_HOST:-}" =~ ^tcp://(localhost|127\.0\.0\.1)(:[0-9]+)?$ ]]; then
+ GH_AW_DOCKER_HOST_PATH_PREFIX_ARGS="--docker-host-path-prefix /tmp/gh-aw"
+ fi
# shellcheck disable=SC1003
- sudo -E awf --config "${RUNNER_TEMP}/gh-aw/awf-config.json" --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --env-all --exclude-env COPILOT_GITHUB_TOKEN --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --allow-host-ports 80,443,8080 --skip-pull \
+ sudo -E awf --config "${RUNNER_TEMP}/gh-aw/awf-config.json" --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" ${GH_AW_DOCKER_HOST_PATH_PREFIX_ARGS} --env-all --exclude-env COPILOT_GITHUB_TOKEN --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --allow-host-ports 80,443,8080 --skip-pull \
-- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache /home/runner/work/_tool -maxdepth 5 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && GH_AW_NODE_EXEC="${GH_AW_NODE_BIN:-}"; if [ -z "$GH_AW_NODE_EXEC" ] || [ ! -x "$GH_AW_NODE_EXEC" ]; then GH_AW_NODE_EXEC="$(command -v node 2>/dev/null || true)"; fi; if [ -z "$GH_AW_NODE_EXEC" ]; then echo "node runtime missing on this runner — check runtimes.node in workflow YAML" >&2; exit 127; fi; "$GH_AW_NODE_EXEC" ${RUNNER_TEMP}/gh-aw/actions/copilot_harness.cjs /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --no-ask-user --allow-all-tools --add-dir "${GITHUB_WORKSPACE}" --prompt-file /tmp/gh-aw/aw-prompts/prompt.txt' 2>&1 | tee -a /tmp/gh-aw/threat-detection/detection.log
env:
AWF_REFLECT_ENABLED: 1
diff --git a/actions/setup/js/check_command_position.cjs b/actions/setup/js/check_command_position.cjs
index 349ccf1f11..e6f9402e39 100644
--- a/actions/setup/js/check_command_position.cjs
+++ b/actions/setup/js/check_command_position.cjs
@@ -66,6 +66,42 @@ async function main() {
text = context.payload.discussion?.body || "";
} else if (eventName === "discussion_comment") {
text = context.payload.comment?.body || "";
+ } else if (eventName === "workflow_dispatch") {
+ const rawAwContext = context.payload?.inputs?.aw_context ?? "";
+ let inboundCommandName = "";
+ if (typeof rawAwContext === "string" && rawAwContext.trim() !== "") {
+ try {
+ const parsed = JSON.parse(rawAwContext);
+ if (parsed && typeof parsed === "object" && typeof parsed.command_name === "string") {
+ inboundCommandName = parsed.command_name.trim();
+ }
+ } catch {
+ // ignore malformed aw_context and fall back to manual workflow_dispatch behavior
+ }
+ }
+
+ if (inboundCommandName) {
+ if (commands.includes(inboundCommandName)) {
+ core.info(`✓ command_name '${inboundCommandName}' resolved from workflow_dispatch aw_context`);
+ core.setOutput("command_position_ok", "true");
+ core.setOutput("matched_command", inboundCommandName);
+ } else {
+ core.warning(`⚠️ command_name '${inboundCommandName}' from aw_context is not in allowed commands list.`);
+ core.setOutput("command_position_ok", "false");
+ core.setOutput("matched_command", "");
+ await writeDenialSummary(
+ `Workflow dispatch aw_context.command_name '${inboundCommandName}' is not one of the configured commands.`,
+ "Ensure the centralized slash-command trigger dispatches only configured commands."
+ );
+ }
+ return;
+ }
+
+ // Manual workflow_dispatch without aw_context.command_name is still allowed.
+ core.info("workflow_dispatch without aw_context.command_name; skipping command position check");
+ core.setOutput("command_position_ok", "true");
+ core.setOutput("matched_command", "");
+ return;
} else {
// For non-comment events, pass the check
core.info(`Event ${eventName} does not require command position check`);
diff --git a/actions/setup/js/check_command_position.test.cjs b/actions/setup/js/check_command_position.test.cjs
index e71b2a5e16..3c6f02d163 100644
--- a/actions/setup/js/check_command_position.test.cjs
+++ b/actions/setup/js/check_command_position.test.cjs
@@ -76,7 +76,19 @@ const mockCore = {
(mockContext.payload = {}),
await eval(`(async () => { ${checkCommandPositionScript}; await main(); })()`),
expect(mockCore.setOutput).toHaveBeenCalledWith("command_position_ok", "true"),
- expect(mockCore.info).toHaveBeenCalledWith(expect.stringContaining("does not require command position check")));
+ expect(mockCore.info).toHaveBeenCalledWith(expect.stringContaining("without aw_context.command_name")));
+ }),
+ it("should resolve command from workflow_dispatch aw_context", async () => {
+ process.env.GH_AW_COMMANDS = JSON.stringify(["test-bot"]);
+ mockContext.eventName = "workflow_dispatch";
+ mockContext.payload = {
+ inputs: {
+ aw_context: JSON.stringify({ command_name: "test-bot" }),
+ },
+ };
+ await eval(`(async () => { ${checkCommandPositionScript}; await main(); })()`);
+ expect(mockCore.setOutput).toHaveBeenCalledWith("command_position_ok", "true");
+ expect(mockCore.setOutput).toHaveBeenCalledWith("matched_command", "test-bot");
}),
it("should handle pull_request event with command at start", async () => {
((process.env.GH_AW_COMMANDS = JSON.stringify(["review-bot"])),
diff --git a/actions/setup/js/check_membership.cjs b/actions/setup/js/check_membership.cjs
index 64b8a31030..1e4ea3bcaa 100644
--- a/actions/setup/js/check_membership.cjs
+++ b/actions/setup/js/check_membership.cjs
@@ -4,25 +4,113 @@
const { parseRequiredPermissions, parseAllowedBots, checkRepositoryPermission, checkBotStatus, isAllowedBot, isConfusedDeputyAttack } = require("./check_permissions_utils.cjs");
const { writeDenialSummary } = require("./pre_activation_summary.cjs");
+function readWorkflowDispatchAwContext(payload) {
+ try {
+ const rawAwContext = payload?.inputs?.aw_context;
+ if (typeof rawAwContext !== "string" || rawAwContext.trim() === "") {
+ return null;
+ }
+ const parsed = JSON.parse(rawAwContext);
+ if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
+ return null;
+ }
+ return parsed;
+ } catch {
+ return null;
+ }
+}
+
async function main() {
const { eventName } = context;
const actor = context.actor;
const { owner, repo } = context.repo;
const requiredPermissions = parseRequiredPermissions();
const allowedBots = parseAllowedBots();
+ let actorToValidate = actor;
- // For workflow_dispatch, only skip check if "write" is in the allowed roles
- // since workflow_dispatch can be triggered by users with write access
+ // workflow_dispatch is never treated as a trusted event.
+ // For centralized slash-command dispatches, validate the original triggering actor.
if (eventName === "workflow_dispatch") {
- const hasWriteRole = requiredPermissions.includes("write");
- if (hasWriteRole) {
- core.info(`✅ Event ${eventName} does not require validation (write role allowed)`);
- core.setOutput("is_team_member", "true");
- core.setOutput("result", "safe_event");
- return;
+ const awContext = readWorkflowDispatchAwContext(context.payload);
+ const commandName = typeof awContext?.command_name === "string" ? awContext.command_name.trim() : "";
+ const propagatedActor = typeof awContext?.actor === "string" ? awContext.actor.trim() : "";
+
+ if (commandName && actor === "github-actions[bot]") {
+ if (!propagatedActor) {
+ const errorMessage = "Access denied: workflow_dispatch aw_context.actor is required for centralized slash-command dispatches.";
+ core.warning(errorMessage);
+ core.setOutput("is_team_member", "false");
+ core.setOutput("result", "config_error");
+ core.setOutput("error_message", errorMessage);
+ await writeDenialSummary(errorMessage, "Ensure centralized slash-command dispatches include aw_context.actor.");
+ return;
+ }
+
+ actorToValidate = propagatedActor;
+ core.info(`Validating centralized workflow_dispatch against originating actor '${actorToValidate}'`);
+
+ const itemType = typeof awContext?.item_type === "string" ? awContext.item_type.trim() : "";
+ const rawItemNumber = typeof awContext?.item_number === "string" ? awContext.item_number.trim() : "";
+ if (itemType === "pull_request") {
+ if (!/^\d+$/.test(rawItemNumber)) {
+ const errorMessage = "Access denied: centralized slash-command dispatch is missing a valid pull request number.";
+ core.warning(errorMessage);
+ core.setOutput("is_team_member", "false");
+ core.setOutput("result", "fork_pull_request");
+ core.setOutput("error_message", errorMessage);
+ await writeDenialSummary(errorMessage, "Dispatch metadata is incomplete. Re-run from the original PR event.");
+ return;
+ }
+ const pullNumber = Number.parseInt(rawItemNumber, 10);
+ if (!Number.isInteger(pullNumber) || pullNumber <= 0) {
+ const errorMessage = "Access denied: centralized slash-command dispatch is missing a valid pull request number.";
+ core.warning(errorMessage);
+ core.setOutput("is_team_member", "false");
+ core.setOutput("result", "fork_pull_request");
+ core.setOutput("error_message", errorMessage);
+ await writeDenialSummary(errorMessage, "Dispatch metadata is incomplete. Re-run from the original PR event.");
+ return;
+ }
+
+ try {
+ const response = await github.rest.pulls.get({
+ owner,
+ repo,
+ pull_number: pullNumber,
+ });
+ const pullRequest = response?.data;
+ const headRepo = pullRequest?.head?.repo?.full_name;
+ const baseRepo = pullRequest?.base?.repo?.full_name;
+ if (!headRepo || !baseRepo) {
+ const errorMessage = "Access denied: centralized slash-command dispatch pull request repository metadata is unavailable.";
+ core.warning(errorMessage);
+ core.setOutput("is_team_member", "false");
+ core.setOutput("result", "fork_pull_request");
+ core.setOutput("error_message", errorMessage);
+ await writeDenialSummary(errorMessage, "Check the pre_activation log and ensure pull request repository metadata is present.");
+ return;
+ }
+ if (headRepo !== baseRepo) {
+ const errorMessage = "Access denied: centralized slash-command dispatch from fork-based pull requests is not allowed.";
+ core.warning(errorMessage);
+ core.setOutput("is_team_member", "false");
+ core.setOutput("result", "fork_pull_request");
+ core.setOutput("error_message", errorMessage);
+ await writeDenialSummary(errorMessage, "Run slash-command workflows from branches in the base repository.");
+ return;
+ }
+ } catch (error) {
+ const errorMessage = `Repository permission check failed: Unable to verify pull request provenance (${error?.message ?? String(error)}).`;
+ core.warning(errorMessage);
+ core.setOutput("is_team_member", "false");
+ core.setOutput("result", "api_error");
+ core.setOutput("error_message", errorMessage);
+ await writeDenialSummary(errorMessage, "Check the pre_activation log and ensure the workflow token can read pull request metadata.");
+ return;
+ }
+ }
}
- // If write is not allowed, continue with permission check
- core.info(`Event ${eventName} requires validation (write role not allowed)`);
+ core.info(`Event ${eventName} requires validation`);
}
// skip check for other safe events
@@ -56,8 +144,8 @@ async function main() {
// @dependabot show (for issue_comment events) to make dependabot appear as the
// actor, bypassing permission checks that rely solely on github.actor.
// Reference: https://labs.boostsecurity.io/articles/weaponizing-dependabot-pwn-request-at-its-finest/
- if (isConfusedDeputyAttack(actor, eventName, context.payload)) {
- const errorMessage = `Access denied: Potential confused deputy attack detected. Actor '${actor}' does not match the event author. The workflow may have been triggered indirectly via a bot command.`;
+ if (isConfusedDeputyAttack(actorToValidate, eventName, context.payload)) {
+ const errorMessage = `Access denied: Potential confused deputy attack detected. Actor '${actorToValidate}' does not match the event author. The workflow may have been triggered indirectly via a bot command.`;
core.warning(errorMessage);
core.setOutput("is_team_member", "false");
core.setOutput("result", "confused_deputy");
@@ -67,7 +155,7 @@ async function main() {
}
// Check if the actor has the required repository permissions
- const result = await checkRepositoryPermission(actor, owner, repo, requiredPermissions);
+ const result = await checkRepositoryPermission(actorToValidate, owner, repo, requiredPermissions);
if (result.authorized) {
core.setOutput("is_team_member", "true");
@@ -78,23 +166,23 @@ async function main() {
// Always attempt the bot allowlist fallback before giving up, so that GitHub Apps whose
// actor is not a recognized GitHub user (e.g. "Copilot") are not silently denied.
if (allowedBots.length > 0) {
- core.info(`Checking if actor '${actor}' is in allowed bots list: ${allowedBots.join(", ")}`);
+ core.info(`Checking if actor '${actorToValidate}' is in allowed bots list: ${allowedBots.join(", ")}`);
- if (isAllowedBot(actor, allowedBots)) {
- core.info(`Actor '${actor}' is in the allowed bots list`);
+ if (isAllowedBot(actorToValidate, allowedBots)) {
+ core.info(`Actor '${actorToValidate}' is in the allowed bots list`);
// Verify the bot is active/installed on the repository
- const botStatus = await checkBotStatus(actor, owner, repo);
+ const botStatus = await checkBotStatus(actorToValidate, owner, repo);
if (botStatus.isBot && botStatus.isActive) {
- core.info(`✅ Bot '${actor}' is active on the repository and authorized`);
+ core.info(`✅ Bot '${actorToValidate}' is active on the repository and authorized`);
core.setOutput("is_team_member", "true");
core.setOutput("result", "authorized_bot");
core.setOutput("user_permission", "bot");
return;
} else if (botStatus.isBot && !botStatus.isActive) {
- const errorMessage = `Access denied: Bot '${actor}' is not active/installed on this repository`;
- core.warning(`Bot '${actor}' is in the allowed list but not active/installed on ${owner}/${repo}`);
+ const errorMessage = `Access denied: Bot '${actorToValidate}' is not active/installed on this repository`;
+ core.warning(`Bot '${actorToValidate}' is in the allowed list but not active/installed on ${owner}/${repo}`);
core.setOutput("is_team_member", "false");
core.setOutput("result", "bot_not_active");
core.setOutput("user_permission", result.permission ?? "bot");
@@ -102,7 +190,7 @@ async function main() {
await writeDenialSummary(errorMessage, "The bot is in the allowed list but is not installed or active on this repository. Install the GitHub App and try again.");
return;
} else {
- core.info(`Actor '${actor}' is in allowed bots list but bot status check failed`);
+ core.info(`Actor '${actorToValidate}' is in allowed bots list but bot status check failed`);
}
}
}
@@ -116,7 +204,7 @@ async function main() {
await writeDenialSummary(errorMessage, "The permission check failed with a GitHub API error. Check the `pre_activation` job log for details.");
} else {
const errorMessage =
- `Access denied: User '${actor}' is not authorized. Required permissions: ${requiredPermissions.join(", ")}. ` +
+ `Access denied: User '${actorToValidate}' is not authorized. Required permissions: ${requiredPermissions.join(", ")}. ` +
`To allow this user to run the workflow, add their role to the frontmatter. Example: roles: [${requiredPermissions.join(", ")}, ${result.permission}]`;
core.setOutput("is_team_member", "false");
core.setOutput("result", "insufficient_permissions");
diff --git a/actions/setup/js/check_membership.test.cjs b/actions/setup/js/check_membership.test.cjs
index 60429abea9..849b2348a6 100644
--- a/actions/setup/js/check_membership.test.cjs
+++ b/actions/setup/js/check_membership.test.cjs
@@ -27,6 +27,9 @@ describe("check_membership.cjs", () => {
repos: {
getCollaboratorPermissionLevel: vi.fn(),
},
+ pulls: {
+ get: vi.fn(),
+ },
},
};
@@ -126,15 +129,17 @@ describe("check_membership.cjs", () => {
expect(mockCore.setOutput).toHaveBeenCalledWith("result", "safe_event");
});
- it("should skip check for workflow_dispatch when write role is allowed", async () => {
+ it("should validate workflow_dispatch when write role is allowed", async () => {
mockContext.eventName = "workflow_dispatch";
process.env.GH_AW_REQUIRED_ROLES = "write,read";
+ mockGithub.rest.repos.getCollaboratorPermissionLevel.mockResolvedValue({
+ data: { permission: "write" },
+ });
await runScript();
- expect(mockCore.info).toHaveBeenCalledWith("✅ Event workflow_dispatch does not require validation (write role allowed)");
- expect(mockCore.setOutput).toHaveBeenCalledWith("is_team_member", "true");
- expect(mockCore.setOutput).toHaveBeenCalledWith("result", "safe_event");
+ expect(mockCore.info).toHaveBeenCalledWith("Event workflow_dispatch requires validation");
+ expect(mockGithub.rest.repos.getCollaboratorPermissionLevel).toHaveBeenCalled();
});
it("should validate workflow_dispatch when write role is not allowed", async () => {
@@ -147,9 +152,68 @@ describe("check_membership.cjs", () => {
await runScript();
- expect(mockCore.info).toHaveBeenCalledWith("Event workflow_dispatch requires validation (write role not allowed)");
+ expect(mockCore.info).toHaveBeenCalledWith("Event workflow_dispatch requires validation");
expect(mockGithub.rest.repos.getCollaboratorPermissionLevel).toHaveBeenCalled();
});
+
+ it("should validate centralized workflow_dispatch using aw_context actor", async () => {
+ mockContext.eventName = "workflow_dispatch";
+ mockContext.actor = "github-actions[bot]";
+ mockContext.payload = {
+ inputs: {
+ aw_context: JSON.stringify({
+ command_name: "triage",
+ actor: "octocat",
+ }),
+ },
+ };
+ process.env.GH_AW_REQUIRED_ROLES = "write";
+ mockGithub.rest.repos.getCollaboratorPermissionLevel.mockResolvedValue({
+ data: { permission: "write" },
+ });
+
+ await runScript();
+
+ expect(mockCore.info).toHaveBeenCalledWith("Validating centralized workflow_dispatch against originating actor 'octocat'");
+ expect(mockGithub.rest.repos.getCollaboratorPermissionLevel).toHaveBeenCalledWith({
+ owner: "testorg",
+ repo: "testrepo",
+ username: "octocat",
+ });
+ expect(mockCore.setOutput).toHaveBeenCalledWith("result", "authorized");
+ });
+
+ it("should deny centralized workflow_dispatch from fork-based pull requests", async () => {
+ mockContext.eventName = "workflow_dispatch";
+ mockContext.actor = "github-actions[bot]";
+ mockContext.payload = {
+ inputs: {
+ aw_context: JSON.stringify({
+ command_name: "triage",
+ actor: "octocat",
+ item_type: "pull_request",
+ item_number: "42",
+ }),
+ },
+ };
+ process.env.GH_AW_REQUIRED_ROLES = "write";
+ mockGithub.rest.pulls.get.mockResolvedValue({
+ data: {
+ head: { repo: { full_name: "someone/fork" } },
+ base: { repo: { full_name: "testorg/testrepo" } },
+ },
+ });
+
+ await runScript();
+
+ expect(mockGithub.rest.pulls.get).toHaveBeenCalledWith({
+ owner: "testorg",
+ repo: "testrepo",
+ pull_number: 42,
+ });
+ expect(mockGithub.rest.repos.getCollaboratorPermissionLevel).not.toHaveBeenCalled();
+ expect(mockCore.setOutput).toHaveBeenCalledWith("result", "fork_pull_request");
+ });
});
describe("configuration validation", () => {
diff --git a/docs/adr/31605-centralized-slash-command-routing.md b/docs/adr/31605-centralized-slash-command-routing.md
new file mode 100644
index 0000000000..b19259435a
--- /dev/null
+++ b/docs/adr/31605-centralized-slash-command-routing.md
@@ -0,0 +1,97 @@
+# ADR-31605: Centralized Slash-Command Routing via Generated Agentic Router Workflow
+
+**Date**: 2026-05-12
+**Status**: Draft
+**Deciders**: Unknown — *[TODO: PR author to confirm]*
+
+---
+
+## Part 1 — Narrative (Human-Friendly)
+
+### Context
+
+Each slash-command workflow in this repository previously registered its own listeners for `issues`, `issue_comment`, `pull_request`, `pull_request_review_comment`, `discussion`, and `discussion_comment`. With many such workflows (e.g. `/archie`, `/cloclo`, and a growing fleet), this caused every comment, issue, or PR event to wake up many lock-files, each evaluating long `if:` expressions to decide whether its slash command matched. The duplication inflated GitHub Actions usage, made permissions sprawl across workflows, and produced large compiled `if:` predicates that were hard to read and maintain. The compiler also lacked guidance to nudge authors toward a shared router as the slash-command fleet grew.
+
+### Decision
+
+We will introduce a `centralized` strategy for `on.slash_command` and have the compiler generate a single shared router workflow at `.github/workflows/agentic_commands.yml` that owns the merged set of slash-command events and dispatches matching target workflows via `workflow_dispatch` with an `aw_context` input. Participating workflows (those declaring `strategy: centralized`) compile to `workflow_dispatch`-only triggers, retaining their non-slash events (e.g. label-only triggers) but delegating slash detection to the central router. The compiler additionally emits a warning recommending `strategy: centralized` once three or more slash commands are detected and some remain non-centralized, so the convention scales as the fleet grows.
+
+### Alternatives Considered
+
+#### Alternative 1: Keep per-workflow inline listeners (status quo)
+
+Each slash-command workflow continues to declare its own event listeners and inline `if:` predicate. This is simple and decentralized — each workflow is self-contained — but it scales poorly: every comment/issue/PR event fans out to N workflows, each compiling N progressively longer `if:` expressions. It was rejected because the cost is visible today (duplicated runs, large generated predicates) and grows linearly with the slash-command fleet.
+
+#### Alternative 2: One static, hand-maintained router workflow
+
+Maintain `agentic_commands.yml` by hand and require workflow authors to register their command in it manually. This avoids compiler complexity but reintroduces a long-running coordination problem: every new slash command requires editing a shared file, and the registry can drift from the per-workflow frontmatter. Rejected because compiler-generation of the router from frontmatter (`strategy: centralized`) preserves a single source of truth and avoids merge conflicts on the shared router.
+
+#### Alternative 3: Use a `repository_dispatch` or external broker
+
+Forward slash events to an external service (or `repository_dispatch`) that then triggers the right workflow. This decouples GitHub event listeners entirely but adds an out-of-repo dependency, new auth surface, and operational risk. Rejected because `workflow_dispatch` + a generated router stays inside GitHub Actions and requires no new infrastructure.
+
+### Consequences
+
+#### Positive
+- One shared workflow (`agentic_commands.yml`) handles slash-event listening for all participating commands, replacing N copies of the same listener set.
+- Generated `if:` predicates on participating workflows shrink from large slash-text expressions to simple `workflow_dispatch`-gated activations, improving readability of lock files.
+- Workflow-level permissions on participating lock files are reduced (e.g. dropping `issues: write` / `pull-requests: write` from activation jobs that no longer need them) because routing logic lives in the central router with its own scoped `actions: write` job permission.
+- A compile-time warning nudges authors toward `strategy: centralized` once three or more slash commands exist, making the convention discoverable without a manual migration push.
+
+#### Negative
+- Adds a new generated file (`.github/workflows/agentic_commands.yml`) whose lifecycle is owned by the compiler — contributors must understand it is regenerated and not edited by hand.
+- Introduces an indirection: a slash command now arrives via `workflow_dispatch` triggered by another workflow, which complicates debugging (two runs to inspect instead of one) and shifts some auth context onto `aw_context`.
+- The router holds `actions: write` permission to dispatch other workflows; a bug in routing logic could dispatch the wrong workflow, so the route map and event-matching filter must remain trustworthy.
+- Legacy trigger-file handling for `agentics-slash-command-trigger.yml` was removed; any external references to that file name become stale.
+
+#### Neutral
+- Two existing workflows (`/archie`, `/cloclo`) were migrated as part of this PR; remaining slash workflows continue to use the default inline strategy until opted in.
+- The router serializes inbound command resolution via `aw_context.command_name`, requiring `check_command_position.cjs` to learn a new `workflow_dispatch` code path (added in this PR with tests).
+- Documentation under `docs/src/content/docs/reference/command-triggers.md` was updated to describe both strategies side-by-side.
+
+---
+
+## Part 2 — Normative Specification (RFC 2119)
+
+> The key words **MUST**, **MUST NOT**, **REQUIRED**, **SHALL**, **SHALL NOT**, **SHOULD**, **SHOULD NOT**, **RECOMMENDED**, **MAY**, and **OPTIONAL** in this section are to be interpreted as described in [RFC 2119](https://www.rfc-editor.org/rfc/rfc2119).
+
+### Centralized Strategy Selection
+
+1. A slash-command workflow **MAY** opt into centralized routing by setting `on.slash_command.strategy: centralized` in its frontmatter.
+2. The compiler **MUST** treat `strategy: centralized` as the participation flag — a workflow without that key **MUST NOT** be wired into the central router.
+3. When at least one workflow opts into `strategy: centralized`, the compiler **MUST** generate exactly one router workflow file at `.github/workflows/agentic_commands.yml`.
+4. The generated router file **MUST** be regenerable from frontmatter alone and **MUST NOT** be hand-edited; contributors **SHOULD** treat it as compiler output.
+
+### Router Workflow Structure
+
+1. The generated router **MUST** declare `permissions: {}` at the top level (no workflow-wide permissions).
+2. The router job named `route` **MUST** declare scoped job-level permissions of at minimum `actions: write` and `contents: read`, and **MUST NOT** declare broader permissions than required to dispatch participating workflows.
+3. The router **MUST** listen on the **union** of slash-event types declared by participating workflows (e.g. `issues`, `issue_comment`, `pull_request`, `pull_request_review_comment`, `discussion`, `discussion_comment`) and **MUST NOT** listen on events for which no participating workflow has subscribed.
+4. The router **MUST** dispatch a participating workflow only when both the command name (parsed from the first token of the payload body) and the inbound event identifier match an entry in the generated route map.
+5. The router **MUST** pass an `aw_context` JSON input containing at least `command_name` to the dispatched workflow.
+
+### Participating Workflow Compilation
+
+1. A workflow with `strategy: centralized` **MUST** compile with `workflow_dispatch` as a trigger and **MUST** accept an `aw_context` string input.
+2. A workflow with `strategy: centralized` **MUST NOT** re-declare slash-text matching on `issue_comment`, `pull_request_review_comment`, `discussion`, or `discussion_comment` in its compiled lock file; slash matching is the router's responsibility.
+3. A workflow with `strategy: centralized` **MAY** retain non-slash listeners that do not collide with slash routing (for example, label-only triggers on `issues`, `pull_request`, or `discussion`).
+4. The compiled activation `if:` predicate of a centralized workflow **MUST NOT** include slash-text inspection of payload bodies.
+
+### Inbound Command Resolution
+
+1. Setup logic processing `workflow_dispatch` events **MUST** read `command_name` from `aw_context.inputs.aw_context` JSON when present.
+2. If `aw_context.command_name` is present, the setup logic **MUST** verify it is in the configured commands list and **MUST** fail the command-position check (emit a denial summary) when it is not.
+3. Manual `workflow_dispatch` invocations without `aw_context.command_name` **SHOULD** pass the command-position check to preserve existing manual-run behavior.
+
+### Compiler Guidance
+
+1. The compiler **SHOULD** emit a warning recommending `strategy: centralized` when three or more slash commands are detected in the repository and at least one of them does not declare `strategy: centralized`.
+2. The warning **MUST NOT** block compilation; it is advisory only.
+
+### Conformance
+
+An implementation is considered conformant with this ADR if it satisfies all **MUST** and **MUST NOT** requirements above. Failure to meet any **MUST** or **MUST NOT** requirement constitutes non-conformance.
+
+---
+
+*This is a DRAFT ADR generated by the [Design Decision Gate](https://github.com/github/gh-aw/actions/runs/25712590786) workflow. The PR author must review, complete, and finalize this document before the PR can merge.*
diff --git a/docs/src/content/docs/reference/command-triggers.md b/docs/src/content/docs/reference/command-triggers.md
index dc64089232..7b03f0b579 100644
--- a/docs/src/content/docs/reference/command-triggers.md
+++ b/docs/src/content/docs/reference/command-triggers.md
@@ -68,7 +68,21 @@ on:
schedule: weekly on monday
```
-**Note**: You cannot combine `slash_command` with `issues`, `issue_comment`, or `pull_request` as they would conflict.
+### Centralized trigger strategy
+
+Set `on.slash_command.strategy: centralized` to opt a workflow into centralized slash-command routing.
+When enabled, the workflow compiles as `workflow_dispatch`-centric, and the compiler generates one
+shared `agentic_commands.yml` workflow that listens to merged slash-command events and
+dispatches matching target workflows with `aw_context`.
+
+```yaml wrap
+on:
+ slash_command:
+ name: my-bot
+ strategy: centralized
+```
+
+**Note**: With default inline strategy, you cannot combine `slash_command` with `issues`, `issue_comment`, or `pull_request` as they would conflict. With `strategy: centralized`, non-slash events are preserved because slash matching is handled in the generated central trigger workflow.
**Exception for Label-Only Events**: You CAN combine `slash_command` with `issues` or `pull_request` if those events are configured for label-only triggers (`labeled` or `unlabeled` types only). This allows workflows to respond to slash commands while also reacting to label changes.
diff --git a/docs/src/content/docs/reference/frontmatter-full.md b/docs/src/content/docs/reference/frontmatter-full.md
index 4be4ea8db6..1d1c7da487 100644
--- a/docs/src/content/docs/reference/frontmatter-full.md
+++ b/docs/src/content/docs/reference/frontmatter-full.md
@@ -152,6 +152,14 @@ on:
name: []
# Array items: Command name without leading slash
+ # Trigger compilation strategy for slash commands.
+ # - "inline" (default): compile comment/body listeners directly in this workflow
+ # - "centralized" (experimental): compile this workflow as
+ # workflow_dispatch-centric and route slash command events via the generated
+ # central trigger workflow.
+ # (optional)
+ strategy: "centralized"
+
# Events where the command should be active. Default is all comment-related events
# ('*'). Use GitHub Actions event names.
# (optional)
diff --git a/docs/src/content/docs/reference/triggers.md b/docs/src/content/docs/reference/triggers.md
index fd85b18a9b..acfa535f3b 100644
--- a/docs/src/content/docs/reference/triggers.md
+++ b/docs/src/content/docs/reference/triggers.md
@@ -395,6 +395,7 @@ on:
slash_command:
name: investigate
events: [issues, issue_comment] # Only respond in issue contexts
+ # strategy: centralized # Optional: route via generated central trigger workflow
```
See [Command Triggers](/gh-aw/reference/command-triggers/) for complete documentation including event filtering, context text, reactions, and examples.
diff --git a/pkg/cli/compile_pipeline.go b/pkg/cli/compile_pipeline.go
index 23e5dc72a1..3089398230 100644
--- a/pkg/cli/compile_pipeline.go
+++ b/pkg/cli/compile_pipeline.go
@@ -346,6 +346,9 @@ func compileAllFilesInDirectory(
}
}
+ // Emit recommendation when many slash commands are present without centralized strategy.
+ displayCentralizedSlashCommandRecommendation(compiler, workflowDataList, config.JSONOutput)
+
// Get warning count from compiler
stats.Warnings = compiler.GetWarningCount()
@@ -525,6 +528,11 @@ func runPostProcessingForDirectory(
return err
}
}
+ if err := generateCentralSlashCommandWorkflowWrapper(workflowDataList, absWorkflowDir, config.Strict); err != nil {
+ if config.Strict {
+ return err
+ }
+ }
}
// Prune stale gh-aw-actions entries before saving
diff --git a/pkg/cli/compile_post_processing.go b/pkg/cli/compile_post_processing.go
index 4746c45fd0..e08f6c9bb6 100644
--- a/pkg/cli/compile_post_processing.go
+++ b/pkg/cli/compile_post_processing.go
@@ -100,6 +100,25 @@ func generateMaintenanceWorkflowWrapper(
return nil
}
+// generateCentralSlashCommandWorkflowWrapper generates a single centralized
+// slash-command trigger workflow for all participating workflows.
+func generateCentralSlashCommandWorkflowWrapper(
+ workflowDataList []*workflow.WorkflowData,
+ workflowsDir string,
+ strict bool,
+) error {
+ compilePostProcessingLog.Print("Generating centralized slash-command workflow")
+
+ if err := workflow.GenerateCentralSlashCommandWorkflow(workflowDataList, workflowsDir); err != nil {
+ if strict {
+ return fmt.Errorf("failed to generate centralized slash-command workflow: %w", err)
+ }
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to generate centralized slash-command workflow: %v", err)))
+ }
+
+ return nil
+}
+
// purgeOrphanedLockFiles removes orphaned .lock.yml files
// These are lock files that exist but don't have a corresponding .md file
func purgeOrphanedLockFiles(workflowsDir string, expectedLockFiles []string, verbose bool) error {
@@ -217,6 +236,38 @@ func displaySafeUpdateWarnings(compiler *workflow.Compiler, jsonOutput bool) {
}
}
+// displayCentralizedSlashCommandRecommendation warns when a repository has many
+// slash commands still using non-centralized strategy.
+func displayCentralizedSlashCommandRecommendation(compiler *workflow.Compiler, workflowDataList []*workflow.WorkflowData, jsonOutput bool) {
+ if jsonOutput {
+ return
+ }
+
+ totalSlashCommands := 0
+ nonCentralizedSlashCommands := 0
+ for _, wd := range workflowDataList {
+ if wd == nil || len(wd.Command) == 0 {
+ continue
+ }
+ totalSlashCommands += len(wd.Command)
+ if !wd.CommandCentralized {
+ nonCentralizedSlashCommands += len(wd.Command)
+ }
+ }
+
+ if totalSlashCommands < 3 || nonCentralizedSlashCommands == 0 {
+ return
+ }
+
+ msg := fmt.Sprintf(
+ "Detected %d slash_command entries in this repository; %d are not using centralized routing. Consider setting `on.slash_command.strategy: centralized` to reduce duplicate triggers and route through `agentic_commands.yml`.",
+ totalSlashCommands,
+ nonCentralizedSlashCommands,
+ )
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(msg))
+ compiler.IncrementWarningCount()
+}
+
// pruneStaleActionCacheEntries removes stale gh-aw-actions entries from the
// action cache whose version does not match the compiler's current version.
// This prevents actions-lock.json from accumulating entries for old compiler
diff --git a/pkg/cli/compile_post_processing_warning_test.go b/pkg/cli/compile_post_processing_warning_test.go
new file mode 100644
index 0000000000..fa9ac5fd2e
--- /dev/null
+++ b/pkg/cli/compile_post_processing_warning_test.go
@@ -0,0 +1,81 @@
+//go:build !integration
+
+package cli
+
+import (
+ "testing"
+
+ "github.com/github/gh-aw/pkg/testutil"
+ "github.com/github/gh-aw/pkg/workflow"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDisplayCentralizedSlashCommandRecommendation(t *testing.T) {
+ tests := []struct {
+ name string
+ workflows []*workflow.WorkflowData
+ jsonOutput bool
+ expectWarning bool
+ expectedWarnCount int
+ }{
+ {
+ name: "warns when three slash commands include non centralized workflows",
+ workflows: []*workflow.WorkflowData{
+ {Command: []string{"a"}, CommandCentralized: false},
+ {Command: []string{"b"}, CommandCentralized: false},
+ {Command: []string{"c"}, CommandCentralized: true},
+ },
+ expectWarning: true,
+ expectedWarnCount: 1,
+ },
+ {
+ name: "does not warn when fewer than three slash commands exist",
+ workflows: []*workflow.WorkflowData{
+ {Command: []string{"a"}, CommandCentralized: false},
+ {Command: []string{"b"}, CommandCentralized: false},
+ },
+ expectWarning: false,
+ expectedWarnCount: 0,
+ },
+ {
+ name: "does not warn when all slash commands are centralized",
+ workflows: []*workflow.WorkflowData{
+ {Command: []string{"a"}, CommandCentralized: true},
+ {Command: []string{"b"}, CommandCentralized: true},
+ {Command: []string{"c"}, CommandCentralized: true},
+ },
+ expectWarning: false,
+ expectedWarnCount: 0,
+ },
+ {
+ name: "does not warn for json output mode",
+ workflows: []*workflow.WorkflowData{
+ {Command: []string{"a"}, CommandCentralized: false},
+ {Command: []string{"b"}, CommandCentralized: false},
+ {Command: []string{"c"}, CommandCentralized: false},
+ },
+ jsonOutput: true,
+ expectWarning: false,
+ expectedWarnCount: 0,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ compiler := workflow.NewCompiler()
+
+ stderrOutput := testutil.CaptureStderr(t, func() {
+ displayCentralizedSlashCommandRecommendation(compiler, tt.workflows, tt.jsonOutput)
+ })
+
+ if tt.expectWarning {
+ require.Contains(t, stderrOutput, "Consider setting `on.slash_command.strategy: centralized`")
+ require.Contains(t, stderrOutput, "Detected 3 slash_command entries")
+ } else {
+ require.NotContains(t, stderrOutput, "on.slash_command.strategy: centralized")
+ }
+
+ require.Equal(t, tt.expectedWarnCount, compiler.GetWarningCount())
+ })
+ }
+}
diff --git a/pkg/parser/schemas/main_workflow_schema.json b/pkg/parser/schemas/main_workflow_schema.json
index 9c81d81df2..e25007923a 100644
--- a/pkg/parser/schemas/main_workflow_schema.json
+++ b/pkg/parser/schemas/main_workflow_schema.json
@@ -459,6 +459,11 @@
"maxItems": 25
}
]
+ },
+ "strategy": {
+ "type": "string",
+ "description": "Slash command trigger compilation strategy. 'inline' (default) compiles direct comment listeners in this workflow. 'centralized' compiles this workflow as workflow_dispatch-centric and routes slash events via the generated central trigger workflow.",
+ "enum": ["inline", "centralized"]
}
},
"additionalProperties": false
@@ -9900,9 +9905,24 @@
{
"properties": {
"slash_command": {
- "not": {
- "type": "null"
- }
+ "allOf": [
+ {
+ "not": {
+ "type": "null"
+ }
+ },
+ {
+ "not": {
+ "type": "object",
+ "properties": {
+ "strategy": {
+ "const": "centralized"
+ }
+ },
+ "required": ["strategy"]
+ }
+ }
+ ]
}
},
"required": ["slash_command"]
diff --git a/pkg/workflow/central_slash_command_workflow.go b/pkg/workflow/central_slash_command_workflow.go
new file mode 100644
index 0000000000..d490fd1177
--- /dev/null
+++ b/pkg/workflow/central_slash_command_workflow.go
@@ -0,0 +1,256 @@
+package workflow
+
+import (
+ "encoding/json"
+ "fmt"
+ "os"
+ "path/filepath"
+ "slices"
+ "sort"
+ "strings"
+
+ "github.com/github/gh-aw/pkg/logger"
+)
+
+var centralSlashCommandWorkflowLog = logger.New("workflow:central_slash_command_workflow")
+
+const (
+ centralSlashCommandWorkflowFilename = "agentic_commands.yml"
+ legacyCentralSlashCommandWorkflowFilename = "agentic_slash_commands.yml"
+)
+
+type slashCommandRoute struct {
+ Workflow string `json:"workflow"`
+ Events []string `json:"events"`
+}
+
+// GenerateCentralSlashCommandWorkflow generates a single centralized slash-command trigger
+// workflow for workflows that opt into on.slash_command.strategy: centralized.
+// When no centralized slash-command workflows are found, any existing generated file is deleted.
+func GenerateCentralSlashCommandWorkflow(workflowDataList []*WorkflowData, workflowDir string) error {
+ centralSlashCommandWorkflowLog.Printf("Generating centralized slash-command workflow from %d workflow(s)", len(workflowDataList))
+ routesByCommand, mergedEvents := collectCentralSlashCommandRoutes(workflowDataList)
+
+ triggerFile := filepath.Join(workflowDir, centralSlashCommandWorkflowFilename)
+ legacyTriggerFile := filepath.Join(workflowDir, legacyCentralSlashCommandWorkflowFilename)
+ if len(routesByCommand) == 0 || len(mergedEvents) == 0 {
+ centralSlashCommandWorkflowLog.Print("No centralized slash-command participants found")
+ if err := removeIfExists(triggerFile); err != nil {
+ return fmt.Errorf("failed to delete centralized slash-command workflow: %w", err)
+ }
+ if err := cleanupLegacyCentralSlashCommandWorkflow(legacyTriggerFile); err != nil {
+ return err
+ }
+ return nil
+ }
+
+ content, err := buildCentralSlashCommandWorkflowYAML(routesByCommand, mergedEvents)
+ if err != nil {
+ return err
+ }
+
+ if err := os.WriteFile(triggerFile, []byte(content), 0644); err != nil {
+ return fmt.Errorf("failed to write centralized slash-command workflow: %w", err)
+ }
+ if err := cleanupLegacyCentralSlashCommandWorkflow(legacyTriggerFile); err != nil {
+ return err
+ }
+ centralSlashCommandWorkflowLog.Printf("Wrote centralized slash-command workflow: %s", triggerFile)
+ return nil
+}
+
+func cleanupLegacyCentralSlashCommandWorkflow(path string) error {
+ if err := removeIfExists(path); err != nil {
+ return fmt.Errorf("failed to delete legacy centralized slash-command workflow: %w", err)
+ }
+ return nil
+}
+
+func removeIfExists(path string) error {
+ if _, err := os.Stat(path); err == nil {
+ return os.Remove(path)
+ } else if !os.IsNotExist(err) {
+ return err
+ }
+ return nil
+}
+
+func collectCentralSlashCommandRoutes(workflowDataList []*WorkflowData) (map[string][]slashCommandRoute, map[string]map[string]bool) {
+ routesByCommand := make(map[string][]slashCommandRoute)
+ mergedEvents := make(map[string]map[string]bool)
+
+ for _, wd := range workflowDataList {
+ if wd == nil || !wd.CommandCentralized || len(wd.Command) == 0 {
+ continue
+ }
+
+ filteredEvents := FilterCommentEvents(wd.CommandEvents)
+ if len(filteredEvents) == 0 {
+ continue
+ }
+
+ routeEvents := GetCommentEventNames(filteredEvents)
+ routeEvents = uniqueSorted(routeEvents)
+ if len(routeEvents) == 0 {
+ continue
+ }
+
+ // Merge workflow-level subscriptions using YAML-ready GitHub event names.
+ for _, event := range MergeEventsForYAML(filteredEvents) {
+ if mergedEvents[event.EventName] == nil {
+ mergedEvents[event.EventName] = make(map[string]bool)
+ }
+ for _, t := range event.Types {
+ mergedEvents[event.EventName][t] = true
+ }
+ }
+
+ for _, commandName := range wd.Command {
+ route := slashCommandRoute{
+ Workflow: wd.WorkflowID,
+ Events: slices.Clone(routeEvents),
+ }
+ routesByCommand[commandName] = append(routesByCommand[commandName], route)
+ }
+ }
+
+ // Stable ordering for deterministic output.
+ for commandName := range routesByCommand {
+ sort.Slice(routesByCommand[commandName], func(i, j int) bool {
+ return routesByCommand[commandName][i].Workflow < routesByCommand[commandName][j].Workflow
+ })
+ }
+
+ return routesByCommand, mergedEvents
+}
+
+func buildCentralSlashCommandWorkflowYAML(routesByCommand map[string][]slashCommandRoute, mergedEvents map[string]map[string]bool) (string, error) {
+ routesJSON, err := json.Marshal(routesByCommand)
+ if err != nil {
+ return "", fmt.Errorf("failed to marshal centralized slash-command routes: %w", err)
+ }
+
+ header := GenerateWorkflowHeader("", "pkg/workflow/central_slash_command_workflow.go", "")
+
+ var b strings.Builder
+ b.WriteString(header)
+ b.WriteString(`name: "Agentic Slash Command Trigger"
+
+on:
+`)
+ writeCentralSlashEventsYAML(&b, mergedEvents)
+ b.WriteString(`
+permissions: {}
+
+jobs:
+ route:
+ runs-on: ubuntu-slim
+ permissions:
+ actions: write
+ contents: read
+ steps:
+ - name: Checkout repository
+ uses: ` + getActionPin("actions/checkout") + `
+
+ - name: Route slash command
+ uses: ` + getActionPin("actions/github-script") + `
+ env:
+ GH_AW_SLASH_ROUTING: '` + escapeSingleQuotedYAMLString(string(routesJSON)) + `'
+ with:
+ script: |
+ const routeMap = JSON.parse(process.env.GH_AW_SLASH_ROUTING || "{}");
+ const bodyByEvent = {
+ issues: context.payload?.issue?.body ?? "",
+ pull_request: context.payload?.pull_request?.body ?? "",
+ issue_comment: context.payload?.comment?.body ?? "",
+ pull_request_review_comment: context.payload?.comment?.body ?? "",
+ discussion: context.payload?.discussion?.body ?? "",
+ discussion_comment: context.payload?.comment?.body ?? "",
+ };
+
+ function eventIdentifier() {
+ if (context.eventName !== "issue_comment") {
+ return context.eventName;
+ }
+ return context.payload?.issue?.pull_request ? "pull_request_comment" : "issue_comment";
+ }
+
+ const text = bodyByEvent[context.eventName] ?? "";
+ const firstWord = String(text).trim().split(/\s+/)[0] ?? "";
+ if (!firstWord.startsWith("/")) {
+ core.info("No slash command found at start of payload text; skipping dispatch.");
+ return;
+ }
+
+ const commandName = firstWord.slice(1);
+ const identifier = eventIdentifier();
+ const routes = (routeMap[commandName] ?? []).filter(route => Array.isArray(route.events) && route.events.includes(identifier));
+ if (routes.length === 0) {
+ core.info("No centralized routes matched command '/" + commandName + "' for event '" + identifier + "'.");
+ return;
+ }
+
+ const { setupGlobals } = require(process.env.GITHUB_WORKSPACE + "/actions/setup/js/setup_globals.cjs");
+ setupGlobals(core, github, context, exec, io, getOctokit);
+ const { buildAwContext } = require(process.env.GITHUB_WORKSPACE + "/actions/setup/js/aw_context.cjs");
+
+ const ref = process.env.GITHUB_HEAD_REF ? "refs/heads/" + process.env.GITHUB_HEAD_REF : (process.env.GITHUB_REF || context.ref || "refs/heads/" + (context.payload?.repository?.default_branch || "main"));
+ for (const route of routes) {
+ const awContext = buildAwContext();
+ awContext.command_name = commandName;
+ await github.rest.actions.createWorkflowDispatch({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ workflow_id: route.workflow + ".lock.yml",
+ ref,
+ inputs: {
+ aw_context: JSON.stringify(awContext),
+ },
+ });
+ core.info("Dispatched '" + route.workflow + "' for '/" + commandName + "'");
+ }
+`)
+ return b.String(), nil
+}
+
+func writeCentralSlashEventsYAML(b *strings.Builder, mergedEvents map[string]map[string]bool) {
+ eventOrder := []string{
+ "issues",
+ "issue_comment",
+ "pull_request",
+ "pull_request_review_comment",
+ "discussion",
+ "discussion_comment",
+ }
+
+ for _, eventName := range eventOrder {
+ typeSet := mergedEvents[eventName]
+ if len(typeSet) == 0 {
+ continue
+ }
+ types := make([]string, 0, len(typeSet))
+ for t := range typeSet {
+ types = append(types, t)
+ }
+ sort.Strings(types)
+ b.WriteString(" " + eventName + ":\n")
+ b.WriteString(" types: [" + strings.Join(types, ", ") + "]\n")
+ }
+}
+
+func uniqueSorted(values []string) []string {
+ seen := make(map[string]bool, len(values))
+ for _, v := range values {
+ seen[v] = true
+ }
+ result := make([]string, 0, len(seen))
+ for v := range seen {
+ result = append(result, v)
+ }
+ sort.Strings(result)
+ return result
+}
+
+func escapeSingleQuotedYAMLString(input string) string {
+ return strings.ReplaceAll(input, "'", "''")
+}
diff --git a/pkg/workflow/central_slash_command_workflow_test.go b/pkg/workflow/central_slash_command_workflow_test.go
new file mode 100644
index 0000000000..def700a360
--- /dev/null
+++ b/pkg/workflow/central_slash_command_workflow_test.go
@@ -0,0 +1,135 @@
+//go:build !integration
+
+package workflow
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/github/gh-aw/pkg/testutil"
+ "github.com/stretchr/testify/require"
+)
+
+func TestGenerateCentralSlashCommandWorkflow_GeneratesWorkflow(t *testing.T) {
+ tmpDir := testutil.TempDir(t, "central-slash-workflow-test")
+
+ data := []*WorkflowData{
+ {
+ WorkflowID: "triage-issue",
+ Command: []string{"triage"},
+ CommandEvents: []string{"issue_comment", "issues"},
+ CommandCentralized: true,
+ },
+ {
+ WorkflowID: "triage-pr",
+ Command: []string{"triage"},
+ CommandEvents: []string{"pull_request", "pull_request_comment"},
+ CommandCentralized: true,
+ },
+ {
+ WorkflowID: "cloclo",
+ Command: []string{"cloclo"},
+ CommandEvents: []string{"discussion_comment"},
+ CommandCentralized: true,
+ },
+ }
+
+ require.NoError(t, GenerateCentralSlashCommandWorkflow(data, tmpDir))
+
+ generatedPath := filepath.Join(tmpDir, centralSlashCommandWorkflowFilename)
+ content, err := os.ReadFile(generatedPath)
+ require.NoError(t, err)
+ text := string(content)
+
+ require.Contains(t, text, "name: \"Agentic Slash Command Trigger\"")
+ require.Contains(t, text, "permissions: {}")
+ require.Contains(t, text, " permissions:\n actions: write\n contents: read")
+ require.Contains(t, text, "issues:")
+ require.Contains(t, text, "issue_comment:")
+ require.Contains(t, text, "pull_request:")
+ require.Contains(t, text, "discussion_comment:")
+ require.Contains(t, text, `"triage":[{"workflow":"triage-issue","events":["issue_comment","issues"]},{"workflow":"triage-pr","events":["pull_request","pull_request_comment"]}]`)
+ require.Contains(t, text, `"cloclo":[{"workflow":"cloclo","events":["discussion_comment"]}]`)
+ require.Contains(t, text, `const routes = (routeMap[commandName] ?? []).filter(route => Array.isArray(route.events) && route.events.includes(identifier));`)
+ require.NotContains(t, text, `trustedAuthorAssociations`)
+ require.NotContains(t, text, `isForkBasedPullRequestEvent`)
+ require.Contains(t, text, `workflow_id: route.workflow + ".lock.yml"`)
+}
+
+func TestGenerateCentralSlashCommandWorkflow_DeletesWhenUnused(t *testing.T) {
+ tmpDir := testutil.TempDir(t, "central-slash-workflow-delete-test")
+ generatedPath := filepath.Join(tmpDir, centralSlashCommandWorkflowFilename)
+ require.NoError(t, os.WriteFile(generatedPath, []byte("stale"), 0644))
+
+ data := []*WorkflowData{
+ {
+ WorkflowID: "regular",
+ Command: []string{"regular"},
+ CommandEvents: []string{"issue_comment"},
+ CommandCentralized: false,
+ },
+ }
+
+ require.NoError(t, GenerateCentralSlashCommandWorkflow(data, tmpDir))
+ _, err := os.Stat(generatedPath)
+ require.Error(t, err)
+ require.True(t, os.IsNotExist(err))
+}
+
+func TestRemoveIfExists(t *testing.T) {
+ tmpDir := testutil.TempDir(t, "remove-if-exists-test")
+ existingPath := filepath.Join(tmpDir, "existing.txt")
+ missingPath := filepath.Join(tmpDir, "missing.txt")
+
+ require.NoError(t, os.WriteFile(existingPath, []byte("content"), 0644))
+ require.NoError(t, removeIfExists(existingPath))
+ _, err := os.Stat(existingPath)
+ require.Error(t, err)
+ require.True(t, os.IsNotExist(err))
+
+ require.NoError(t, removeIfExists(missingPath))
+}
+
+func TestCollectCentralSlashCommandRoutes_UnionizesMergedEvents(t *testing.T) {
+ data := []*WorkflowData{
+ {
+ WorkflowID: "triage-issue",
+ Command: []string{"triage"},
+ CommandEvents: []string{"issues", "issue_comment"},
+ CommandCentralized: true,
+ },
+ {
+ WorkflowID: "triage-pr",
+ Command: []string{"triage"},
+ CommandEvents: []string{"pull_request", "pull_request_comment"},
+ CommandCentralized: true,
+ },
+ {
+ WorkflowID: "non-centralized",
+ Command: []string{"triage"},
+ CommandEvents: []string{"discussion"},
+ CommandCentralized: false,
+ },
+ }
+
+ routesByCommand, mergedEvents := collectCentralSlashCommandRoutes(data)
+
+ require.Equal(t, []slashCommandRoute{
+ {Workflow: "triage-issue", Events: []string{"issue_comment", "issues"}},
+ {Workflow: "triage-pr", Events: []string{"pull_request", "pull_request_comment"}},
+ }, routesByCommand["triage"])
+
+ require.ElementsMatch(t, []string{"opened", "edited", "reopened"}, typeSetKeys(mergedEvents["issues"]))
+ require.ElementsMatch(t, []string{"created", "edited"}, typeSetKeys(mergedEvents["issue_comment"]))
+ require.ElementsMatch(t, []string{"opened", "edited", "reopened"}, typeSetKeys(mergedEvents["pull_request"]))
+ require.NotContains(t, mergedEvents, "discussion")
+}
+
+func typeSetKeys(typeSet map[string]bool) []string {
+ out := make([]string, 0, len(typeSet))
+ for key := range typeSet {
+ out = append(out, key)
+ }
+ return out
+}
diff --git a/pkg/workflow/compiler_orchestrator_workflow.go b/pkg/workflow/compiler_orchestrator_workflow.go
index 40e03f2cd1..3873be7ab8 100644
--- a/pkg/workflow/compiler_orchestrator_workflow.go
+++ b/pkg/workflow/compiler_orchestrator_workflow.go
@@ -334,7 +334,7 @@ func (c *Compiler) extractAdditionalConfigurations(
workflowData.RepoMemoryConfig = repoMemoryConfig
// Extract and process mcp-scripts and safe-outputs
- workflowData.Command, workflowData.CommandEvents = c.extractCommandConfig(frontmatter)
+ workflowData.Command, workflowData.CommandEvents, workflowData.CommandCentralized = c.extractCommandConfig(frontmatter)
workflowData.LabelCommand, workflowData.LabelCommandEvents, workflowData.LabelCommandRemoveLabel = c.extractLabelCommandConfig(frontmatter)
workflowData.Jobs = c.extractJobsFromFrontmatter(frontmatter)
diff --git a/pkg/workflow/compiler_safe_outputs.go b/pkg/workflow/compiler_safe_outputs.go
index 00937cc30f..d6b6c2499c 100644
--- a/pkg/workflow/compiler_safe_outputs.go
+++ b/pkg/workflow/compiler_safe_outputs.go
@@ -150,15 +150,19 @@ func (c *Compiler) parseOnSection(frontmatter map[string]any, workflowData *Work
baseName := strings.TrimSuffix(filepath.Base(markdownPath), ".md")
workflowData.Command = []string{baseName}
}
- // Check for conflicting events (but allow issues/pull_request with non-conflicting types: labeled/unlabeled/ready_for_review)
- conflictingEvents := []string{"issues", "issue_comment", "pull_request", "pull_request_review_comment"}
- for _, eventName := range conflictingEvents {
- if eventValue, hasConflict := onMap[eventName]; hasConflict {
- // Special case: allow issues/pull_request with non-conflicting types
- if (eventName == "issues" || eventName == "pull_request") && parser.IsNonConflictingCommandEvent(eventValue) {
- continue // Allow this - it doesn't conflict with command triggers
+ // In centralized mode slash_command no longer compiles broad comment listeners,
+ // so slash/non-slash event co-existence is allowed.
+ if !workflowData.CommandCentralized {
+ // Check for conflicting events (but allow issues/pull_request with non-conflicting types: labeled/unlabeled/ready_for_review)
+ conflictingEvents := []string{"issues", "issue_comment", "pull_request", "pull_request_review_comment"}
+ for _, eventName := range conflictingEvents {
+ if eventValue, hasConflict := onMap[eventName]; hasConflict {
+ // Special case: allow issues/pull_request with non-conflicting types
+ if (eventName == "issues" || eventName == "pull_request") && parser.IsNonConflictingCommandEvent(eventValue) {
+ continue // Allow this - it doesn't conflict with command triggers
+ }
+ return fmt.Errorf("cannot use 'slash_command' with '%s' in the same workflow", eventName)
}
- return fmt.Errorf("cannot use 'slash_command' with '%s' in the same workflow", eventName)
}
}
diff --git a/pkg/workflow/compiler_safe_outputs_test.go b/pkg/workflow/compiler_safe_outputs_test.go
index 6735e33324..b820e3135c 100644
--- a/pkg/workflow/compiler_safe_outputs_test.go
+++ b/pkg/workflow/compiler_safe_outputs_test.go
@@ -21,6 +21,7 @@ func TestParseOnSection(t *testing.T) {
expectedReaction string
expectedLockAgent bool
expectedOn string
+ expectedCentralized bool
checkCommandEvents bool
expectedOtherEvents map[string]any
}{
@@ -129,6 +130,26 @@ func TestParseOnSection(t *testing.T) {
markdownPath: "/path/to/test.md",
expectedError: true,
},
+ {
+ name: "slash_command centralized strategy allows non-slash events",
+ frontmatter: map[string]any{
+ "on": map[string]any{
+ "slash_command": map[string]any{
+ "strategy": "centralized",
+ },
+ "issue_comment": map[string]any{
+ "types": []string{"created"},
+ },
+ },
+ },
+ workflowData: &WorkflowData{CommandCentralized: true},
+ markdownPath: "/path/to/test.md",
+ expectedError: false,
+ expectedCommand: []string{"test"},
+ expectedReaction: "eyes",
+ expectedCentralized: true,
+ checkCommandEvents: true,
+ },
{
name: "slash_command conflicts with issues",
frontmatter: map[string]any{
@@ -277,6 +298,7 @@ func TestParseOnSection(t *testing.T) {
if tt.expectedReaction != "" {
assert.Equal(t, tt.expectedReaction, tt.workflowData.AIReaction, "Reaction mismatch")
}
+ assert.Equal(t, tt.expectedCentralized, tt.workflowData.CommandCentralized, "CommandCentralized mismatch")
assert.Equal(t, tt.expectedLockAgent, tt.workflowData.LockForAgent, "LockForAgent mismatch")
if tt.checkCommandEvents {
assert.NotNil(t, tt.workflowData.CommandOtherEvents, "CommandOtherEvents should be set")
@@ -369,6 +391,23 @@ func TestCompilerMergeSafeJobsFromIncludedConfigs(t *testing.T) {
}
}
+func TestExtractCommandConfig_CentralizedStrategy(t *testing.T) {
+ c := &Compiler{}
+ names, events, centralized := c.extractCommandConfig(map[string]any{
+ "on": map[string]any{
+ "slash_command": map[string]any{
+ "name": "deploy",
+ "events": []any{"issue_comment"},
+ "strategy": "centralized",
+ },
+ },
+ })
+
+ assert.Equal(t, []string{"deploy"}, names)
+ assert.Equal(t, []string{"issue_comment"}, events)
+ assert.True(t, centralized)
+}
+
// TestApplyDefaultTools tests default tool application logic
func TestApplyDefaultTools(t *testing.T) {
tests := []struct {
diff --git a/pkg/workflow/compiler_types.go b/pkg/workflow/compiler_types.go
index bc80d1d609..4ba571a1fc 100644
--- a/pkg/workflow/compiler_types.go
+++ b/pkg/workflow/compiler_types.go
@@ -475,6 +475,7 @@ type WorkflowData struct {
ManualApproval string // environment name for manual approval from on: section
Command []string // for /command trigger support - multiple command names
CommandEvents []string // events where command should be active (nil = all events)
+ CommandCentralized bool // when true, slash_command uses centralized dispatch routing via workflow_dispatch
CommandOtherEvents map[string]any // for merging command with other events
LabelCommand []string // for label-command trigger support - label names that act as commands
LabelCommandEvents []string // events where label-command should be active (nil = all: issues, pull_request, discussion)
diff --git a/pkg/workflow/compiler_validators.go b/pkg/workflow/compiler_validators.go
index 5c3a185c6b..1c459548a5 100644
--- a/pkg/workflow/compiler_validators.go
+++ b/pkg/workflow/compiler_validators.go
@@ -299,6 +299,12 @@ func (c *Compiler) validateToolConfiguration(workflowData *WorkflowData, markdow
c.IncrementWarningCount()
}
+ // Emit experimental warning for centralized slash-command routing strategy
+ if workflowData.CommandCentralized {
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage("Using experimental feature: slash_command.strategy: centralized"))
+ c.IncrementWarningCount()
+ }
+
// Warn when slash_command and bots are both configured: if a bot listed in bots: posts
// a comment that starts with the slash command text (e.g. /command-name), the
// check_command_position check will pass and the bot will trigger the workflow —
diff --git a/pkg/workflow/frontmatter_extraction_yaml.go b/pkg/workflow/frontmatter_extraction_yaml.go
index 13f82bc39f..75be714a9c 100644
--- a/pkg/workflow/frontmatter_extraction_yaml.go
+++ b/pkg/workflow/frontmatter_extraction_yaml.go
@@ -957,8 +957,9 @@ func (c *Compiler) extractExpressionFromIfString(ifString string) string {
return ifString
}
-// extractCommandConfig extracts command configuration from frontmatter including name and events
-func (c *Compiler) extractCommandConfig(frontmatter map[string]any) (commandNames []string, commandEvents []string) {
+// extractCommandConfig extracts command configuration from frontmatter including name, events,
+// and centralized routing strategy for slash_command.
+func (c *Compiler) extractCommandConfig(frontmatter map[string]any) (commandNames []string, commandEvents []string, commandCentralized bool) {
frontmatterLog.Print("Extracting command configuration from frontmatter")
// Check new format: on.slash_command or on.slash_command.name (preferred)
// Also check legacy format: on.command or on.command.name (deprecated)
@@ -990,12 +991,13 @@ func (c *Compiler) extractCommandConfig(frontmatter map[string]any) (commandName
// Check if command is a string (shorthand format)
if commandStr, ok := commandValue.(string); ok {
frontmatterLog.Printf("Extracted command name (shorthand): %s", commandStr)
- return []string{commandStr}, nil // nil means default (all events)
+ return []string{commandStr}, nil, false // nil means default (all events)
}
// Check if command is a map with a name key (object format)
if commandMap, ok := commandValue.(map[string]any); ok {
var names []string
var events []string
+ centralized := false
if nameValue, hasName := commandMap["name"]; hasName {
// Handle string or array of strings
@@ -1015,14 +1017,20 @@ func (c *Compiler) extractCommandConfig(frontmatter map[string]any) (commandName
events = ParseCommandEvents(eventsValue)
}
- frontmatterLog.Printf("Extracted command config: names=%v, events=%v", names, events)
- return names, events
+ if strategyRaw, hasStrategy := commandMap["strategy"]; hasStrategy {
+ if strategy, ok := strategyRaw.(string); ok && strings.EqualFold(strings.TrimSpace(strategy), "centralized") {
+ centralized = true
+ }
+ }
+
+ frontmatterLog.Printf("Extracted command config: names=%v, events=%v, centralized=%v", names, events, centralized)
+ return names, events, centralized
}
}
}
}
- return nil, nil
+ return nil, nil, false
}
// extractLabelCommandConfig extracts the label-command configuration from frontmatter
diff --git a/pkg/workflow/slash_command_centralized_compile_test.go b/pkg/workflow/slash_command_centralized_compile_test.go
new file mode 100644
index 0000000000..e58706bc9d
--- /dev/null
+++ b/pkg/workflow/slash_command_centralized_compile_test.go
@@ -0,0 +1,84 @@
+//go:build !integration
+
+package workflow
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/github/gh-aw/pkg/stringutil"
+ "github.com/github/gh-aw/pkg/testutil"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCompileWorkflow_SlashCommandCentralizedStrategy(t *testing.T) {
+ tmpDir := testutil.TempDir(t, "workflow-centralized-slash-test")
+
+ markdownPath := filepath.Join(tmpDir, "deploy.md")
+ content := `---
+on:
+ slash_command:
+ name: deploy
+ strategy: centralized
+ push:
+ branches: [main]
+tools:
+ github:
+ allowed: [list_issues]
+---
+
+# Deploy
+`
+ require.NoError(t, os.WriteFile(markdownPath, []byte(content), 0644))
+
+ compiler := NewCompiler()
+ require.NoError(t, compiler.CompileWorkflow(markdownPath))
+
+ lockPath := stringutil.MarkdownToLockFile(markdownPath)
+ lockContent, err := os.ReadFile(lockPath)
+ require.NoError(t, err)
+ compiled := string(lockContent)
+
+ require.Contains(t, compiled, "workflow_dispatch:")
+ require.Contains(t, compiled, "push:")
+ require.NotContains(t, compiled, "issue_comment:")
+ require.NotContains(t, compiled, "pull_request_review_comment:")
+ require.NotContains(t, compiled, "startsWith(github.event.comment.body")
+}
+
+func TestCompileWorkflow_SlashCommandCentralizedWithLabelCommand(t *testing.T) {
+ tmpDir := testutil.TempDir(t, "workflow-centralized-slash-label-test")
+
+ markdownPath := filepath.Join(tmpDir, "triage.md")
+ content := `---
+on:
+ slash_command:
+ name: triage
+ strategy: centralized
+ label_command:
+ name: triage
+ events: [issues]
+tools:
+ github:
+ allowed: [list_issues]
+---
+
+# Triage
+`
+ require.NoError(t, os.WriteFile(markdownPath, []byte(content), 0644))
+
+ compiler := NewCompiler()
+ require.NoError(t, compiler.CompileWorkflow(markdownPath))
+
+ lockPath := stringutil.MarkdownToLockFile(markdownPath)
+ lockContent, err := os.ReadFile(lockPath)
+ require.NoError(t, err)
+ compiled := string(lockContent)
+
+ require.Contains(t, compiled, "workflow_dispatch:")
+ require.Contains(t, compiled, "issues:")
+ require.Contains(t, compiled, "types:\n - labeled")
+ require.Contains(t, compiled, "github.event.label.name == 'triage'")
+ require.Contains(t, compiled, "|| !(github.event_name == 'issues')")
+}
diff --git a/pkg/workflow/slash_command_centralized_experimental_warning_test.go b/pkg/workflow/slash_command_centralized_experimental_warning_test.go
new file mode 100644
index 0000000000..569b6778ce
--- /dev/null
+++ b/pkg/workflow/slash_command_centralized_experimental_warning_test.go
@@ -0,0 +1,80 @@
+//go:build integration
+
+package workflow
+
+import (
+ "bytes"
+ "io"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/github/gh-aw/pkg/testutil"
+ "github.com/stretchr/testify/require"
+)
+
+func TestSlashCommandCentralizedExperimentalWarning(t *testing.T) {
+ tests := []struct {
+ name string
+ content string
+ expectWarning bool
+ }{
+ {
+ name: "centralized strategy emits warning",
+ content: `---
+on:
+ slash_command:
+ name: triage
+ strategy: centralized
+---
+
+# Test Workflow
+`,
+ expectWarning: true,
+ },
+ {
+ name: "inline strategy does not emit warning",
+ content: `---
+on:
+ slash_command:
+ name: triage
+---
+
+# Test Workflow
+`,
+ expectWarning: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ tmpDir := testutil.TempDir(t, "slash-command-centralized-warning-test")
+ workflowPath := filepath.Join(tmpDir, "test-workflow.md")
+ require.NoError(t, os.WriteFile(workflowPath, []byte(tt.content), 0644))
+
+ oldStderr := os.Stderr
+ r, w, _ := os.Pipe()
+ os.Stderr = w
+
+ compiler := NewCompiler()
+ compiler.SetStrictMode(false)
+ err := compiler.CompileWorkflow(workflowPath)
+
+ w.Close()
+ os.Stderr = oldStderr
+
+ var buf bytes.Buffer
+ _, _ = io.Copy(&buf, r)
+ stderrOutput := buf.String()
+ require.NoError(t, err)
+
+ expected := "Using experimental feature: slash_command.strategy: centralized"
+ if tt.expectWarning {
+ require.Contains(t, stderrOutput, expected)
+ require.Greater(t, compiler.GetWarningCount(), 0)
+ } else {
+ require.NotContains(t, stderrOutput, expected)
+ }
+ })
+ }
+}
diff --git a/pkg/workflow/tools.go b/pkg/workflow/tools.go
index e19d6cfa34..7466211ccd 100644
--- a/pkg/workflow/tools.go
+++ b/pkg/workflow/tools.go
@@ -82,24 +82,37 @@ func (c *Compiler) applyDefaults(data *WorkflowData, markdownPath string) error
if isCommandTrigger {
toolsLog.Print("Workflow is command trigger, configuring command events")
- // Get the filtered command events based on CommandEvents field
- filteredEvents := FilterCommentEvents(data.CommandEvents)
+ commandEventsMap := make(map[string]any)
- // Merge events for YAML generation (combines pull_request_comment and issue_comment into issue_comment)
- yamlEvents := MergeEventsForYAML(filteredEvents)
+ // In centralized slash-command mode, compile slash workflows as
+ // workflow_dispatch-centric targets and preserve only non-slash events.
+ var filteredEvents []CommentEventMapping
+ if data.CommandCentralized {
+ if len(data.CommandOtherEvents) > 0 {
+ maps.Copy(commandEventsMap, data.CommandOtherEvents)
+ }
+ if _, hasWorkflowDispatch := commandEventsMap["workflow_dispatch"]; !hasWorkflowDispatch {
+ commandEventsMap["workflow_dispatch"] = nil
+ }
+ } else {
+ // Get the filtered command events based on CommandEvents field
+ filteredEvents = FilterCommentEvents(data.CommandEvents)
- // Build command events map from merged events
- commandEventsMap := make(map[string]any)
- for _, event := range yamlEvents {
- commandEventsMap[event.EventName] = map[string]any{
- "types": event.Types,
+ // Merge events for YAML generation (combines pull_request_comment and issue_comment into issue_comment)
+ yamlEvents := MergeEventsForYAML(filteredEvents)
+
+ // Build command events map from merged events
+ for _, event := range yamlEvents {
+ commandEventsMap[event.EventName] = map[string]any{
+ "types": event.Types,
+ }
}
- }
- // Check if there are other events to merge
- if len(data.CommandOtherEvents) > 0 {
- // Merge other events into command events
- maps.Copy(commandEventsMap, data.CommandOtherEvents)
+ // Check if there are other events to merge
+ if len(data.CommandOtherEvents) > 0 {
+ // Merge other events into command events
+ maps.Copy(commandEventsMap, data.CommandOtherEvents)
+ }
}
// If label_command is also configured alongside slash_command, merge label events
@@ -141,45 +154,43 @@ func (c *Compiler) applyDefaults(data *WorkflowData, markdownPath string) error
// Keep "on" quoted as it's a YAML boolean keyword
data.On = yamlStr
} else {
- // If conversion fails, build a basic YAML string manually
- var builder strings.Builder
- builder.WriteString(`"on":`)
- for _, event := range filteredEvents {
- builder.WriteString("\n ")
- builder.WriteString(event.EventName)
- builder.WriteString(":\n types: [")
- for i, t := range event.Types {
- if i > 0 {
- builder.WriteString(", ")
- }
- builder.WriteString(t)
- }
- builder.WriteString("]")
- }
- data.On = builder.String()
+ return fmt.Errorf("failed to marshal command events: %w", err)
}
- // Add conditional logic to check for command in issue content
- // Use event-aware condition that only applies command checks to comment-related events
- // Pass the filtered events to buildEventAwareCommandCondition
- hasOtherEvents := len(data.CommandOtherEvents) > 0
- commandConditionTree, err := buildEventAwareCommandCondition(data.Command, data.CommandEvents, hasOtherEvents)
- if err != nil {
- return fmt.Errorf("failed to build command condition: %w", err)
- }
+ // Add conditional logic for command workflows unless centralized mode is enabled.
+ if !data.CommandCentralized {
+ // Add conditional logic to check for command in issue content
+ // Use event-aware condition that only applies command checks to comment-related events
+ // Pass the filtered events to buildEventAwareCommandCondition
+ hasOtherEvents := len(data.CommandOtherEvents) > 0
+ commandConditionTree, err := buildEventAwareCommandCondition(data.Command, data.CommandEvents, hasOtherEvents)
+ if err != nil {
+ return fmt.Errorf("failed to build command condition: %w", err)
+ }
- if data.If == "" {
- if len(data.LabelCommand) > 0 {
- // Combine: (slash_command condition) OR (label_command condition)
- // This allows the workflow to activate via either mechanism.
- labelConditionTree, err := buildLabelCommandCondition(data.LabelCommand, data.LabelCommandEvents, false)
- if err != nil {
- return fmt.Errorf("failed to build combined label-command condition: %w", err)
+ if data.If == "" {
+ if len(data.LabelCommand) > 0 {
+ // Combine: (slash_command condition) OR (label_command condition)
+ // This allows the workflow to activate via either mechanism.
+ labelConditionTree, err := buildLabelCommandCondition(data.LabelCommand, data.LabelCommandEvents, false)
+ if err != nil {
+ return fmt.Errorf("failed to build combined label-command condition: %w", err)
+ }
+ combined := &OrNode{Left: commandConditionTree, Right: labelConditionTree}
+ data.If = RenderCondition(combined)
+ } else {
+ data.If = RenderCondition(commandConditionTree)
}
- combined := &OrNode{Left: commandConditionTree, Right: labelConditionTree}
- data.If = RenderCondition(combined)
+ }
+ } else if data.If == "" && len(data.LabelCommand) > 0 {
+ // Centralized command mode bypasses slash-command content checks.
+ // If label_command is also configured, keep label gating logic.
+ // hasOtherEvents=true keeps router workflow_dispatch runs eligible.
+ labelConditionTree, err := buildLabelCommandCondition(data.LabelCommand, data.LabelCommandEvents, true)
+ if err != nil {
+ return fmt.Errorf("failed to build label-command condition: %w", err)
} else {
- data.If = RenderCondition(commandConditionTree)
+ data.If = RenderCondition(labelConditionTree)
}
}
} else if isLabelCommandTrigger {