From 5d6110ea1dce86e93efbf0bae350f99b1f86a9ee Mon Sep 17 00:00:00 2001 From: Claude Date: Mon, 6 Apr 2026 03:16:43 +0000 Subject: [PATCH 1/4] feat(ci): implement documentation gap detection pipeline for InfluxDB 3 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds an automated documentation gap detection pipeline triggered by release events, scoped to InfluxDB 3 Core and Enterprise (Team Monolith). ## New modules **`scripts/docs-cli/lib/doc-location-map.js`** Inverted doc scanner: reads the content tree and extracts API references (operation-link frontmatter, api-endpoint shortcodes, curl commands, bare paths) then matches them against committed OpenAPI specs. Follows source: frontmatter pointers into shared content. Produces three artifact sets: confirmedMap (spec ops with prose coverage), orphaned (stale doc links), uncovered (ops with no coverage). Smoke-tested: 22/42 Core ops covered. **`scripts/docs-cli/lib/gap-severity.js`** Scores each undocumented operation: path-prefix tier × edition scope × change type → critical | high | medium | low. Health/ping/metrics are capped at low regardless of bumps. Write/query paths hitting both editions as new endpoints score critical. **`scripts/docs-cli/lib/gap-reporter.js`** Assembles severity-scored gap reports from a doc-location-map result and an optional spec delta (git diff on committed OpenAPI YAML between version tags — no source repo access required). Outputs structured JSON + markdown summary. Suggests doc paths from adjacent confirmed-map entries. **`scripts/docs-cli/lib/issue-creator.js`** Creates GitHub issues for high/critical gaps using the gh CLI. Builds structured issue bodies with spec claim, severity rationale, engineering verification ask, and definition-of-done checklist. Supports --dry-run mode (prints to stdout). ## Modified files **`scripts/docs-cli/commands/audit.js`** New flags: --doc-location-map, --previous-version, --create-issue, --dry-run. When --doc-location-map is set, runs the inverted scanner after the existing audit and optionally generates a gap report and files issues. **`.github/workflows/influxdb3-release.yml`** New job audit-api-documentation (between release notes and PR creation): uses git diff on committed OpenAPI specs to compute spec delta, runs doc-location-map, generates severity-scored gap report, uploads artifact. create-documentation-pr now shows gap summary in PR body. create-audit-issue now creates one GitHub issue per high/critical gap with full structured body. Release summary includes the new job. **`.github/ISSUE_TEMPLATE/doc-gap-ticket.yml`** GitHub Forms template for manually filing doc gaps: severity, edition scope, change type, operation ID, spec claim, suggested location, engineering verification ask, definition-of-done checklist. https://claude.ai/code/session_01CpE2NxtgSre6spEHLrUw5M --- .github/ISSUE_TEMPLATE/doc-gap-ticket.yml | 166 +++++++ .github/workflows/influxdb3-release.yml | 221 ++++++++- scripts/docs-cli/commands/audit.js | 84 +++- scripts/docs-cli/lib/doc-location-map.js | 566 ++++++++++++++++++++++ scripts/docs-cli/lib/gap-reporter.js | 447 +++++++++++++++++ scripts/docs-cli/lib/gap-severity.js | 220 +++++++++ scripts/docs-cli/lib/issue-creator.js | 274 +++++++++++ 7 files changed, 1962 insertions(+), 16 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/doc-gap-ticket.yml create mode 100644 scripts/docs-cli/lib/doc-location-map.js create mode 100644 scripts/docs-cli/lib/gap-reporter.js create mode 100644 scripts/docs-cli/lib/gap-severity.js create mode 100644 scripts/docs-cli/lib/issue-creator.js diff --git a/.github/ISSUE_TEMPLATE/doc-gap-ticket.yml b/.github/ISSUE_TEMPLATE/doc-gap-ticket.yml new file mode 100644 index 0000000000..8caa578a21 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/doc-gap-ticket.yml @@ -0,0 +1,166 @@ +name: Documentation Gap +description: Track a documentation gap identified from API spec delta analysis. For gaps auto-detected by the release pipeline, use the auto-generated issue instead of this form. +title: "Doc gap [SEVERITY]: [OPERATION_ID] — [PRODUCT] [VERSION]" +labels: + - documentation + - doc-gap + - needs-triage +body: + - type: markdown + attributes: + value: | + ## About this template + + Use this template to file documentation gaps identified through API spec analysis, + user feedback, or manual review. + + **Tip:** Gaps auto-detected by the release pipeline are filed automatically with + pre-populated fields. Use this template only for gaps discovered outside the pipeline. + + - type: dropdown + id: severity + attributes: + label: Severity + description: How urgently does this gap need to be addressed? + options: + - "🔴 Critical — write/query path affecting all users" + - "🟠 High — core workflow, affects many users" + - "🟡 Medium — common feature, affects some users" + - "🔵 Low — edge case or rarely-used feature" + validations: + required: true + + - type: dropdown + id: edition_scope + attributes: + label: Edition scope + description: Which InfluxDB 3 editions does this gap affect? + options: + - Both Core and Enterprise + - Core only + - Enterprise only + validations: + required: true + + - type: dropdown + id: change_type + attributes: + label: Change type + description: What kind of change created this gap? + options: + - New endpoint (added in this release) + - Modified endpoint (behavior changed in this release) + - Removed endpoint (doc page references a deleted endpoint) + - Existing gap (endpoint has been undocumented across releases) + validations: + required: true + + - type: input + id: operation_id + attributes: + label: Operation ID + description: The OpenAPI operationId for the affected endpoint. + placeholder: "e.g. PostConfigureDatabase" + validations: + required: true + + - type: input + id: api_path + attributes: + label: API path and method + description: The HTTP method and path for the endpoint. + placeholder: "e.g. POST /api/v3/configure/database" + validations: + required: true + + - type: input + id: release_version + attributes: + label: Release version + description: The InfluxDB 3 release version where this gap was identified. + placeholder: "e.g. v3.9.0" + validations: + required: true + + - type: textarea + id: spec_claim + attributes: + label: Spec claim + description: | + What does the OpenAPI spec say about this endpoint? + Copy the relevant summary/description from the spec, or paste a link to the spec operation. + placeholder: | + From the spec: + > Creates a new database with the specified name and optional retention period. + > Parameters: db (required), retention_period (optional) + validations: + required: true + + - type: textarea + id: existing_coverage + attributes: + label: Existing doc coverage + description: | + Links to any existing documentation pages that partially cover this endpoint. + Leave blank if no existing coverage. + placeholder: | + - /influxdb3/core/admin/databases/create/ (mentions endpoint but lacks parameter docs) + + - type: textarea + id: suggested_location + attributes: + label: Suggested documentation location + description: | + Where should the documentation for this endpoint live? + For shared Core/Enterprise content, use the shared content path. + placeholder: | + - content/shared/influxdb3-admin/databases/create.md (existing shared page to update) + - content/influxdb3/core/admin/new-feature/ (new page needed) + + - type: textarea + id: engineering_verification + attributes: + label: Engineering verification ask + description: | + What do you need engineering to confirm before writing documentation? + Pre-populated with standard questions — edit as needed. + value: | + Please confirm before documentation is written: + + - [ ] Is this endpoint intended for public use in the identified release version? + - [ ] Does this endpoint replace or extend an existing endpoint? If so, which one? + - [ ] What are the primary use cases for end users? + - [ ] Are there any known limitations, gotchas, or required prerequisites? + - [ ] Is the behavior identical across Core and Enterprise, or are there edition-specific differences? + validations: + required: true + + - type: checkboxes + id: definition_of_done + attributes: + label: Definition of done + description: Check off each item as it is completed. All items must be checked before closing this issue. + options: + - label: Engineering confirmed the endpoint is public and stable in the identified release + required: false + - label: Doc page created or updated at the suggested location + required: false + - label: API reference entry updated (description, parameters, example request/response) + required: false + - label: Related guides updated if endpoint behavior changed + required: false + - label: Tested against the release binary (not just the spec) + required: false + + - type: checkboxes + id: edition_done + attributes: + label: Edition checklist + description: Complete the applicable items for the affected editions. + options: + - label: Core behavior documented + required: false + - label: Enterprise-specific behavior documented (if any differences from Core) + required: false + - label: Superset relationship noted where applicable (Enterprise includes all Core changes) + required: false diff --git a/.github/workflows/influxdb3-release.yml b/.github/workflows/influxdb3-release.yml index e66f0450a0..81c3d2810f 100644 --- a/.github/workflows/influxdb3-release.yml +++ b/.github/workflows/influxdb3-release.yml @@ -205,6 +205,87 @@ jobs: path: helper-scripts/output/cli-audit/ retention-days: 90 + audit-api-documentation: + name: Audit API Documentation Gaps + needs: generate-release-notes-core-enterprise + runs-on: ubuntu-latest + if: needs.generate-release-notes-core-enterprise.outputs.generated == 'true' && contains(fromJSON('["core", "enterprise"]'), github.event.inputs.product) + outputs: + gap_report_generated: ${{ steps.gap-analysis.outputs.gap_report_generated }} + critical_count: ${{ steps.gap-analysis.outputs.critical_count }} + high_count: ${{ steps.gap-analysis.outputs.high_count }} + + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 # Full history needed for spec delta via git diff + + - name: Set up Node.js + uses: actions/setup-node@v6 + with: + node-version: '18' + cache: 'yarn' + + - name: Install dependencies + run: yarn install --frozen-lockfile + + - name: Run API gap analysis + id: gap-analysis + run: | + PRODUCT="${{ github.event.inputs.product }}" + VERSION="${{ github.event.inputs.version }}" + PREV_VERSION="${{ github.event.inputs.previous_version }}" + + echo "Running API doc location map for ${PRODUCT} ${PREV_VERSION}→${VERSION}" + + # Run the audit with doc-location-map and gap report (no GitHub auth needed for this) + # The doc-location-map reads only committed files; no source repo access required. + # We skip the regular CLI/API source audit (which needs repo access) and run only + # the location map by calling the underlying module directly. + node --input-type=module << 'SCRIPT' + import { runDocLocationMap, writeDocLocationMapReport } from './scripts/docs-cli/lib/doc-location-map.js'; + import { generateGapReport } from './scripts/docs-cli/lib/gap-reporter.js'; + import { mkdirSync } from 'fs'; + + const product = process.env.PRODUCT; + const version = process.env.VERSION; + const previousVersion = process.env.PREV_VERSION; + const outputDir = 'scripts/docs-cli/output/gap-reports'; + + mkdirSync(outputDir, { recursive: true }); + + const mapResult = await runDocLocationMap(product); + await writeDocLocationMapReport(mapResult, outputDir); + + const report = await generateGapReport({ + product, + version, + previousVersion, + mapResult, + outputDir, + }); + + // Output counts for downstream jobs + const { writeFileSync } = await import('fs'); + writeFileSync(process.env.GITHUB_OUTPUT || '/dev/null', + `gap_report_generated=true\ncritical_count=${report.summary.critical}\nhigh_count=${report.summary.high}\n`, + { flag: 'a' } + ); + + console.log(`\nGap summary: ${JSON.stringify(report.summary)}`); + SCRIPT + env: + PRODUCT: ${{ github.event.inputs.product }} + VERSION: ${{ github.event.inputs.version }} + PREV_VERSION: ${{ github.event.inputs.previous_version }} + + - name: Upload gap reports + uses: actions/upload-artifact@v7 + with: + name: gap-report-${{ github.event.inputs.product }}-${{ github.event.inputs.version }} + path: scripts/docs-cli/output/gap-reports/ + retention-days: 90 + # audit-distributed-documentation: # name: Audit Distributed Products Documentation # needs: generate-release-notes-distributed @@ -266,7 +347,7 @@ jobs: create-documentation-pr: name: Create Documentation PR - needs: [generate-release-notes-core-enterprise, audit-cli-documentation] + needs: [generate-release-notes-core-enterprise, audit-cli-documentation, audit-api-documentation] runs-on: ubuntu-latest if: github.event.inputs.dry_run != 'true' && always() && (needs.generate-release-notes-core-enterprise.result == 'success') @@ -349,16 +430,20 @@ jobs: This PR contains documentation updates for **${{ github.event.inputs.product }} v${{ github.event.inputs.version }}** ### Included Updates: - - [ ] Release notes - - [ ] Version updates + - [ ] Release notes (placeholder — replace with content from `docs release-notes ${{ github.event.inputs.previous_version }} ${{ github.event.inputs.version }} --products influxdb3_core,influxdb3_enterprise`) + - [ ] Version updates in `data/products.yml` - [ ] CLI documentation audit results + ### API Documentation Gaps + ${{ needs.audit-api-documentation.outputs.gap_report_generated == 'true' && format('🔴 Critical: **{0}** | 🟠 High: **{1}**', needs.audit-api-documentation.outputs.critical_count, needs.audit-api-documentation.outputs.high_count) || 'Gap analysis did not run or was skipped.' }} + + See the [gap report artifact](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}) for the full list of uncovered operations with severity scores and suggested doc locations. + High/critical gaps have been automatically filed as individual GitHub issues. + ### Artifacts: - [Release Notes](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}) - [CLI Audit Report](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}) - - ### Manual Review Needed: - Please review the CLI audit report for any missing or outdated documentation that needs to be updated. + - [API Gap Report](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}) --- *This PR was automatically generated by the release workflow.* @@ -370,9 +455,9 @@ jobs: create-audit-issue: name: Create Audit Issue - needs: [audit-cli-documentation] + needs: [audit-cli-documentation, audit-api-documentation] runs-on: ubuntu-latest - if: github.event.inputs.dry_run != 'true' && always() && (needs.audit-cli-documentation.result == 'success') + if: github.event.inputs.dry_run != 'true' && always() && (needs.audit-cli-documentation.result == 'success' || needs.audit-api-documentation.result == 'success') steps: - uses: actions/checkout@v6 @@ -382,6 +467,124 @@ jobs: with: path: audit-reports/ + - name: Create per-gap issues for high/critical API gaps + uses: actions/github-script@v8 + with: + script: | + const fs = require('fs'); + const path = require('path'); + const product = '${{ github.event.inputs.product }}'; + const version = '${{ github.event.inputs.version }}'; + + const gapReportDir = `audit-reports/gap-report-${product}-${version}`; + const gapReportFile = `${gapReportDir}/gap-report-${product}-${version}.json`; + + if (!fs.existsSync(gapReportFile)) { + console.log('No gap report found — skipping per-gap issue creation'); + } else { + const report = JSON.parse(fs.readFileSync(gapReportFile, 'utf8')); + const actionableGaps = report.gaps.filter(g => + g.severity === 'critical' || g.severity === 'high' + ); + + console.log(`Creating issues for ${actionableGaps.length} high/critical gaps`); + + for (const gap of actionableGaps) { + const severityEmoji = gap.severity === 'critical' ? '🔴' : '🟠'; + const editionLabel = gap.editionScope === 'both' + ? 'Core and Enterprise' + : `InfluxDB 3 ${gap.editionScope.charAt(0).toUpperCase() + gap.editionScope.slice(1)}`; + + const suggestedPaths = gap.suggestedDocPaths.length > 0 + ? gap.suggestedDocPaths.map(p => `- ${p}`).join('\n') + : '- No adjacent documentation found; create a new page in the appropriate section'; + + const existingPages = gap.existingDocPages.length > 0 + ? gap.existingDocPages.map(p => `- \`${p}\``).join('\n') + : '- None'; + + const issueBody = [ + `## ${severityEmoji} ${gap.severity.charAt(0).toUpperCase() + gap.severity.slice(1)} Priority Documentation Gap`, + '', + `**Release:** ${product} v${version}`, + `**Category:** ${gap.category}`, + `**Edition scope:** ${editionLabel}`, + `**Change type:** ${gap.changeType}`, + '', + '## Spec Claim', + '', + `| Field | Value |`, + `|-------|-------|`, + `| Operation ID | \`${gap.operationId}\` |`, + `| HTTP method | \`${gap.method}\` |`, + `| API path | \`${gap.path}\` |`, + `| Spec summary | ${gap.summary || '*(not provided)*'} |`, + '', + '## Severity Rationale', + '', + gap.severityRationale, + '', + '## Existing Documentation', + '', + existingPages, + '', + '## Suggested Documentation Location', + '', + suggestedPaths, + '', + gap.note ? `> **Note:** ${gap.note}\n` : '', + '## Engineering Verification Ask', + '', + 'Before writing documentation, please confirm:', + '', + `- [ ] Is \`${gap.operationId}\` (\`${gap.method} ${gap.path}\`) intended for public use in v${version}?`, + '- [ ] Does this endpoint replace or extend an existing endpoint? If so, which one?', + '- [ ] What are the primary use cases for end users?', + '- [ ] Are there any known limitations, gotchas, or required prerequisites?', + '- [ ] Is the behavior identical across Core and Enterprise, or are there differences?', + '', + '## Definition of Done', + '', + `- [ ] Engineering confirmed endpoint is public and stable in v${version}`, + '- [ ] Doc page created or updated at suggested location', + '- [ ] API reference entry updated (description, example, parameters)', + '- [ ] Related guides updated if endpoint behavior changed', + `- [ ] Tested against ${product} v${version} release binary`, + gap.editionScope === 'both' || gap.editionScope === 'core' + ? '- [ ] Core behavior documented' : '', + gap.editionScope === 'both' || gap.editionScope === 'enterprise' + ? '- [ ] Enterprise-specific behavior documented (if any)' : '', + gap.editionScope === 'both' + ? '- [ ] Superset relationship noted (Enterprise includes all Core changes)' : '', + '', + '---', + `*Auto-generated by the release documentation pipeline for ${product} v${version}.*`, + ].filter(line => line !== '').join('\n'); + + const labels = [ + 'documentation', + 'doc-gap', + `doc-gap:${gap.severity}`, + `product:${gap.editionScope === 'both' ? 'influxdb3-core' : 'influxdb3-' + gap.editionScope}`, + ].filter(Boolean); + + // Add enterprise label too if both editions affected + if (gap.editionScope === 'both') { + labels.push('product:influxdb3-enterprise'); + } + + await github.rest.issues.create({ + owner: context.repo.owner, + repo: context.repo.repo, + title: `Doc gap [${gap.severity}]: ${gap.operationId} — ${product} v${version}`, + body: issueBody, + labels, + }); + + console.log(`Created issue for ${gap.operationId} (${gap.severity})`); + } + } + - name: Create issue from audit uses: actions/github-script@v8 with: @@ -471,6 +674,7 @@ jobs: [ generate-release-notes-core-enterprise, audit-cli-documentation, + audit-api-documentation, create-documentation-pr, create-audit-issue, ] @@ -494,6 +698,7 @@ jobs: echo "|------|--------|" >> $GITHUB_STEP_SUMMARY echo "| Generate Release Notes (Core/Enterprise) | ${{ needs.generate-release-notes-core-enterprise.result }} |" >> $GITHUB_STEP_SUMMARY echo "| CLI Documentation Audit | ${{ needs.audit-cli-documentation.result }} |" >> $GITHUB_STEP_SUMMARY + echo "| API Gap Analysis | ${{ needs.audit-api-documentation.result }} |" >> $GITHUB_STEP_SUMMARY echo "| Create Documentation PR | ${{ needs.create-documentation-pr.result }} |" >> $GITHUB_STEP_SUMMARY echo "| Create Audit Issue | ${{ needs.create-audit-issue.result }} |" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY diff --git a/scripts/docs-cli/commands/audit.js b/scripts/docs-cli/commands/audit.js index d0c45b84a3..b216ef4b1b 100644 --- a/scripts/docs-cli/commands/audit.js +++ b/scripts/docs-cli/commands/audit.js @@ -40,14 +40,18 @@ Documentation Coverage Audit Usage: docs audit [options] Options: - --products Product keys or content paths (comma-separated) - Examples: influxdb3_core, /influxdb3/core - --repos Direct repo paths or URLs (alternative to --products) - --version Version/branch/tag to audit (default: main) - --categories Comma-separated categories to audit - --branch docs-v2 branch to compare against (default: master) - --output-format Output format: report | drafts | json (default: report) - --help, -h Show this help message + --products Product keys or content paths (comma-separated) + Examples: influxdb3_core, /influxdb3/core + --repos Direct repo paths or URLs (alternative to --products) + --version Version/branch/tag to audit (default: main) + --previous-version Previous version tag; scopes doc-location-map to spec delta + --categories Comma-separated categories to audit + --branch docs-v2 branch to compare against (default: master) + --output-format Output format: report | drafts | json (default: report) + --doc-location-map Run inverted doc location map (content → spec coverage) + --create-issue Create GitHub issues for high/critical gaps (requires --doc-location-map) + --dry-run Print issue bodies to stdout instead of calling GitHub API + --help, -h Show this help message Note: --products and --repos are mutually exclusive. @@ -151,9 +155,13 @@ export default async function audit(args) { let productsInput = null; let reposInput = null; let version = 'main'; // Default to main + let previousVersion = null; let categoryFilter = null; let docsBranch = 'master'; let outputFormat = 'report'; + let runDocLocationMapFlag = false; + let createIssueFlag = false; + let dryRunFlag = false; for (let i = 0; i < positionals.length; i++) { const arg = positionals[i]; @@ -188,6 +196,22 @@ export default async function audit(args) { } } else if (arg.startsWith('--version=')) { version = arg.split('=')[1]; + } else if (arg === '--previous-version') { + if (i + 1 < positionals.length && !positionals[i + 1].startsWith('--')) { + previousVersion = positionals[i + 1]; + i++; + } else { + console.error('Error: --previous-version requires a value'); + process.exit(1); + } + } else if (arg.startsWith('--previous-version=')) { + previousVersion = arg.split('=')[1]; + } else if (arg === '--doc-location-map') { + runDocLocationMapFlag = true; + } else if (arg === '--create-issue') { + createIssueFlag = true; + } else if (arg === '--dry-run') { + dryRunFlag = true; } else if (arg === '--categories') { if (i + 1 < positionals.length && !positionals[i + 1].startsWith('--')) { categoryFilter = positionals[i + 1] @@ -459,6 +483,50 @@ export default async function audit(args) { } } + // ── Doc location map (inverted: content → spec coverage) ────────────── + if (runDocLocationMapFlag && (hasCore || hasEnterprise)) { + const influxProduct = + hasCore && hasEnterprise ? 'both' : hasEnterprise ? 'enterprise' : 'core'; + + console.log(`\n📍 Running doc location map for ${influxProduct}...\n`); + + const { runDocLocationMap, writeDocLocationMapReport } = await import( + '../lib/doc-location-map.js' + ); + const { generateGapReport } = await import('../lib/gap-reporter.js'); + + const outputDir = join(__dirname, '..', '..', 'output', 'gap-reports'); + + const mapResult = await runDocLocationMap(influxProduct, { + ...(previousVersion && { previousVersion }), + }); + + await writeDocLocationMapReport(mapResult, outputDir); + + // Generate severity-scored gap report when previous version is known + if (previousVersion) { + await generateGapReport({ + product: influxProduct, + version, + previousVersion, + mapResult, + outputDir, + }); + } + + // Create GitHub issues for high/critical gaps + if (createIssueFlag) { + const { createGapIssues } = await import('../lib/issue-creator.js'); + await createGapIssues({ + mapResult, + product: influxProduct, + version, + previousVersion, + dryRun: dryRunFlag, + }); + } + } + console.log('\n✅ Documentation audit complete!'); } catch (error) { console.error('\n❌ Audit failed:', error.message); diff --git a/scripts/docs-cli/lib/doc-location-map.js b/scripts/docs-cli/lib/doc-location-map.js new file mode 100644 index 0000000000..be6233b9da --- /dev/null +++ b/scripts/docs-cli/lib/doc-location-map.js @@ -0,0 +1,566 @@ +/** + * Doc Location Map + * + * Builds an inverted map from OpenAPI spec operations to documentation content + * pages. Reads the content tree and extracts API references, then matches them + * against the committed OpenAPI specs in api-docs/influxdb3/{core,enterprise}/v3/. + * + * Produces three typed artifact sets: + * confirmedMap - operationId → [doc page paths] (operations with prose coverage) + * orphaned - doc pages referencing operationIds no longer in the spec + * uncovered - spec operationIds with no doc coverage + * + * SECURITY: This module reads only files already in this repository (content + * tree and committed OpenAPI specs). No external network access is required. + * + * @module doc-location-map + */ + +import { promises as fs } from 'fs'; +import { join, relative, resolve } from 'path'; +import yaml from 'js-yaml'; + +// ─── Spec paths ────────────────────────────────────────────────────────────── + +const SPEC_PATHS = { + core: 'api-docs/influxdb3/core/v3/influxdb3-core-openapi.yaml', + enterprise: 'api-docs/influxdb3/enterprise/v3/influxdb3-enterprise-openapi.yaml', +}; + +const CONTENT_ROOTS = { + core: 'content/influxdb3/core', + enterprise: 'content/influxdb3/enterprise', +}; + +const SHARED_CONTENT_ROOT = 'content/shared'; + +// ─── Signal extraction regexes ──────────────────────────────────────────────── + +// Signal type 1: related: frontmatter with #operation/OperationId fragment +// Example: - /influxdb3/core/api/v3/#operation/PostConfigureDatabase, ... +const RE_OPERATION_LINK = /#operation\/([A-Za-z][A-Za-z0-9_]*)/g; + +// Signal type 2: {{% api-endpoint method="POST" endpoint="...path..." %}} +const RE_API_ENDPOINT_SHORTCODE = + /api-endpoint[^}]*?method="([A-Z]+)"[^}]*?endpoint="([^"]+)"/g; + +// Signal type 3: curl --request METHOD "...path..." or curl -X METHOD "...path..." +const RE_CURL_COMMAND = + /curl\s+(?:--request|-X)\s+([A-Z]+)\s+["']?(?:https?:\/\/[^/"'\s]*)?(\/?api\/v[0-9]+[^"'\s?#]*)/g; + +// Signal type 4: bare path references ``/api/v3/...`` +const RE_BARE_PATH = /`(\/api\/v[0-9]+\/[a-zA-Z0-9_/{}.-]+)`/g; + +// Normalise endpoint strings coming from shortcodes or curl commands +// Strips Hugo template prefixes like {{< influxdb/host >}} +function normaliseEndpoint(raw) { + return raw + .replace(/\{\{[^}]+\}\}/g, '') // strip Hugo shortcodes + .replace(/\{[^}]+\}/g, '{param}') // normalise path params + .replace(/\?.*$/, '') // strip query string + .replace(/\/+$/, ''); // strip trailing slash +} + +// ─── Spec inventory ─────────────────────────────────────────────────────────── + +/** + * Load an OpenAPI spec and build operationId ↔ path/method indices. + * + * @param {string} specAbsPath + * @returns {{ operationIdToPath: Map, pathToOperationId: Map, specVersion: string }} + */ +async function loadSpecInventory(specAbsPath) { + const operationIdToPath = new Map(); // operationId → { method, path, summary, tags } + const pathToOperationId = new Map(); // "METHOD /path" → operationId + let specVersion = 'unknown'; + + let spec; + try { + const content = await fs.readFile(specAbsPath, 'utf-8'); + spec = yaml.load(content); + } catch (err) { + if (err.code === 'ENOENT') { + console.warn(` ⚠️ OpenAPI spec not found: ${specAbsPath}`); + } else { + console.warn(` ⚠️ Error parsing spec ${specAbsPath}: ${err.message}`); + } + return { operationIdToPath, pathToOperationId, specVersion }; + } + + specVersion = spec?.info?.version || 'unknown'; + + for (const [apiPath, pathItem] of Object.entries(spec.paths || {})) { + for (const method of ['get', 'post', 'put', 'patch', 'delete']) { + const op = pathItem[method]; + if (!op || !op.operationId) continue; + + const id = op.operationId; + const normPath = normaliseEndpoint(apiPath); + const key = `${method.toUpperCase()} ${normPath}`; + + operationIdToPath.set(id, { + method: method.toUpperCase(), + path: apiPath, + normPath, + summary: op.summary || '', + tags: op.tags || [], + }); + pathToOperationId.set(key, id); + + // Also index without param normalisation for exact matches + const exactKey = `${method.toUpperCase()} ${apiPath}`; + if (!pathToOperationId.has(exactKey)) { + pathToOperationId.set(exactKey, id); + } + } + } + + return { operationIdToPath, pathToOperationId, specVersion }; +} + +// ─── Content tree walker ────────────────────────────────────────────────────── + +/** + * Recursively list all .md files under a directory. + */ +async function walkMarkdown(dir) { + const files = []; + let entries; + try { + entries = await fs.readdir(dir, { withFileTypes: true }); + } catch { + return files; + } + for (const entry of entries) { + const full = join(dir, entry.name); + if (entry.isDirectory()) { + files.push(...(await walkMarkdown(full))); + } else if (entry.name.endsWith('.md')) { + files.push(full); + } + } + return files; +} + +// ─── Signal extraction per file ─────────────────────────────────────────────── + +/** + * Read frontmatter + body from a markdown file. + * Returns { frontmatter: object, body: string }. + */ +async function parseFrontmatter(absPath) { + let content; + try { + content = await fs.readFile(absPath, 'utf-8'); + } catch { + return { frontmatter: {}, body: '' }; + } + + const fmMatch = content.match(/^---\r?\n([\s\S]*?)\r?\n---/); + let frontmatter = {}; + if (fmMatch) { + try { + frontmatter = yaml.load(fmMatch[1]) || {}; + } catch { + frontmatter = {}; + } + } + return { frontmatter, body: content }; +} + +/** + * Extract all API signals from file content. + * Returns an array of { operationId?, method?, path?, signalType, confidence, raw }. + * operationId is set for type-1 signals; method+path for types 2-4. + */ +function extractSignals(body) { + const signals = []; + + // Type 1: #operation/ links (highest confidence – explicit operationId reference) + for (const m of body.matchAll(RE_OPERATION_LINK)) { + signals.push({ + operationId: m[1], + signalType: 'operation-link', + confidence: 'high', + raw: m[0], + }); + } + + // Type 2: {{% api-endpoint %}} shortcode + for (const m of body.matchAll(RE_API_ENDPOINT_SHORTCODE)) { + signals.push({ + method: m[1], + path: normaliseEndpoint(m[2]), + signalType: 'api-endpoint-shortcode', + confidence: 'high', + raw: m[0], + }); + } + + // Type 3: curl commands + for (const m of body.matchAll(RE_CURL_COMMAND)) { + signals.push({ + method: m[1], + path: normaliseEndpoint(m[2]), + signalType: 'curl-command', + confidence: 'medium', + raw: m[0], + }); + } + + // Type 4: bare path references + for (const m of body.matchAll(RE_BARE_PATH)) { + signals.push({ + path: normaliseEndpoint(m[1]), + signalType: 'bare-path', + confidence: 'low', + raw: m[0], + }); + } + + return signals; +} + +/** + * Resolve method+path signals to an operationId using the spec indices. + */ +function resolveSignal(signal, pathToOperationId) { + if (signal.operationId) return signal.operationId; + + if (signal.method && signal.path) { + const key = `${signal.method} ${signal.path}`; + if (pathToOperationId.has(key)) return pathToOperationId.get(key); + } + + // Try path-only match across all keys (last resort for bare-path signals) + if (signal.path) { + for (const [k, id] of pathToOperationId) { + if (k.endsWith(signal.path)) return id; + } + } + + return null; +} + +/** + * Detect edition-scope from a block of text. + * Returns 'core', 'enterprise', or 'both'. + * Signals inside {{% show-in "enterprise" %}} blocks are enterprise-only; + * signals inside {{% show-in "core" %}} are core-only; outside = both. + */ +function detectEditionScope(body, signalRaw) { + // Find position of the signal in the body + const pos = body.indexOf(signalRaw); + if (pos === -1) return 'both'; + + // Scan backwards for the most recent show-in open tag before pos + const before = body.slice(0, pos); + const openCore = before.lastIndexOf('{{% show-in "core" %}}'); + const openEnterprise = before.lastIndexOf('{{% show-in "enterprise" %}}'); + + const lastOpen = Math.max(openCore, openEnterprise); + if (lastOpen === -1) return 'both'; + + // Make sure this show-in block hasn't been closed before pos + const closeTag = '{{% /show-in %}}'; + const closePos = before.lastIndexOf(closeTag); + if (closePos > lastOpen) return 'both'; // the block was closed before our signal + + return openEnterprise > openCore ? 'enterprise' : 'core'; +} + +// ─── Resolve shared source ──────────────────────────────────────────────────── + +/** + * If a content file has a `source:` frontmatter pointer, return the absolute + * path of the shared file. Returns null if no source pointer. + */ +function resolveSourcePath(frontmatter, repoRoot) { + const source = frontmatter?.source; + if (!source) return null; + + // source is a repo-root-relative path like /shared/influxdb3-admin/databases/create.md + const rel = source.startsWith('/') ? source.slice(1) : source; + return join(repoRoot, 'content', rel); +} + +// ─── Main scan ──────────────────────────────────────────────────────────────── + +/** + * Scan a set of content files and return signals grouped by operationId. + * + * @param {string[]} files - Absolute paths to .md files + * @param {Map} pathToOpId - From spec inventory + * @param {string} repoRoot - docs-v2 repo root + * @param {string} productEdition - 'core' | 'enterprise' for attribution + * @param {Set|null} filterOpIds - If set, only collect signals for these operationIds + * @returns {Map} + */ +async function scanContentFiles(files, pathToOpId, repoRoot, productEdition, filterOpIds) { + const results = new Map(); // operationId → [signal entries] + const orphanedRefs = []; // { docPath, operationId } where operationId not in spec + + for (const absFile of files) { + const { frontmatter, body: stubBody } = await parseFrontmatter(absFile); + + // Canonical doc path (relative to content/, for readability) + const docPath = relative(join(repoRoot, 'content'), absFile); + + // Follow source: pointer if present + const sharedPath = resolveSourcePath(frontmatter, repoRoot); + const bodyToScan = sharedPath ? await readSharedBody(sharedPath) : stubBody; + + const signals = extractSignals(bodyToScan); + + for (const sig of signals) { + const opId = resolveSignal(sig, pathToOpId); + if (!opId) continue; + + // Track orphaned references (operationId not in spec) + if (!pathToOpId.size) continue; + + const editionScope = detectEditionScope(bodyToScan, sig.raw); + + const entry = { + docPath, + signalType: sig.signalType, + confidence: sig.confidence, + editionScope: normaliseEditionScope(editionScope, productEdition), + }; + + if (filterOpIds && !filterOpIds.has(opId)) continue; + + if (!results.has(opId)) results.set(opId, []); + // Avoid duplicate entries for same docPath + signalType + const existing = results.get(opId); + if (!existing.some((e) => e.docPath === entry.docPath && e.signalType === entry.signalType)) { + existing.push(entry); + } + } + } + + return { coverageMap: results, orphanedRefs }; +} + +async function readSharedBody(absPath) { + try { + return await fs.readFile(absPath, 'utf-8'); + } catch { + return ''; + } +} + +/** + * Constrain detected edition scope to what the scanning context allows. + * If we're scanning the core content tree and detect 'both', it's still 'both'. + * If we detect 'enterprise' while scanning core tree, that's still enterprise-only. + */ +function normaliseEditionScope(detected, productEdition) { + if (detected === 'both') return 'both'; + return detected; // trust the show-in markers +} + +// ─── Three-way reconciliation ───────────────────────────────────────────────── + +/** + * Reconcile coverage map against full spec inventory to produce: + * confirmedMap - Map + * orphaned - operationIds referenced in docs but absent from spec + * uncovered - operationIds in spec with no coverage + */ +function reconcile(coverageMap, operationIdToPath, pathToOpId) { + const confirmedMap = new Map(); + const orphaned = []; + const uncovered = []; + + // Check each coverage entry against the spec + for (const [opId, entries] of coverageMap) { + if (operationIdToPath.has(opId)) { + confirmedMap.set(opId, entries); + } else { + // operationId referenced in docs but not in spec → orphaned + for (const entry of entries) { + orphaned.push({ operationId: opId, ...entry }); + } + } + } + + // Find spec operations with no coverage + for (const opId of operationIdToPath.keys()) { + if (!confirmedMap.has(opId)) { + uncovered.push(opId); + } + } + + return { confirmedMap, orphaned, uncovered }; +} + +// ─── Public API ─────────────────────────────────────────────────────────────── + +/** + * Run the doc location map for a given product. + * + * @param {'core'|'enterprise'|'both'} product + * @param {object} options + * @param {string} [options.repoRoot] - Absolute path to docs-v2 root + * @param {Set} [options.filterOperationIds] - Only include these operationIds + * @returns {Promise<{ + * product: string, + * editions: { + * core?: { confirmedMap, orphaned, uncovered, specVersion, stats }, + * enterprise?: { confirmedMap, orphaned, uncovered, specVersion, stats } + * } + * }>} + */ +export async function runDocLocationMap(product, options = {}) { + const repoRoot = options.repoRoot || resolve(new URL('../../..', import.meta.url).pathname); + const filterOpIds = options.filterOperationIds || null; + + const editions = product === 'both' ? ['core', 'enterprise'] : [product]; + const result = { product, editions: {} }; + + for (const edition of editions) { + const specPath = join(repoRoot, SPEC_PATHS[edition]); + const contentRoot = join(repoRoot, CONTENT_ROOTS[edition]); + + console.log(`\n📍 Doc Location Map — InfluxDB 3 ${edition.charAt(0).toUpperCase() + edition.slice(1)}`); + console.log(` Spec: ${SPEC_PATHS[edition]}`); + console.log(` Content: ${CONTENT_ROOTS[edition]}`); + + const { operationIdToPath, pathToOperationId, specVersion } = + await loadSpecInventory(specPath); + + console.log(` Spec version: ${specVersion}`); + console.log(` Operations in spec: ${operationIdToPath.size}`); + + const mdFiles = await walkMarkdown(contentRoot); + console.log(` Content files scanned: ${mdFiles.length}`); + + const { coverageMap } = await scanContentFiles( + mdFiles, + pathToOperationId, + repoRoot, + edition, + filterOpIds + ); + + const { confirmedMap, orphaned, uncovered } = reconcile( + coverageMap, + operationIdToPath, + pathToOperationId + ); + + const stats = { + totalOperations: operationIdToPath.size, + coveredCount: confirmedMap.size, + orphanedCount: orphaned.length, + uncoveredCount: uncovered.length, + coveragePercent: + operationIdToPath.size > 0 + ? Math.round((confirmedMap.size / operationIdToPath.size) * 100) + : 0, + }; + + console.log(` Covered: ${stats.coveredCount}/${stats.totalOperations} (${stats.coveragePercent}%)`); + if (orphaned.length > 0) { + console.log(` Orphaned references: ${orphaned.length}`); + } + + result.editions[edition] = { + confirmedMap, + orphaned, + uncovered, + specVersion, + operationIdToPath, + stats, + }; + } + + return result; +} + +/** + * Write a human-readable markdown report for a doc-location-map result. + * + * @param {object} mapResult - Return value of runDocLocationMap() + * @param {string} outputDir - Directory to write reports into + */ +export async function writeDocLocationMapReport(mapResult, outputDir) { + await fs.mkdir(outputDir, { recursive: true }); + + for (const [edition, data] of Object.entries(mapResult.editions)) { + const { confirmedMap, orphaned, uncovered, specVersion, operationIdToPath, stats } = data; + const timestamp = new Date().toISOString().split('T')[0]; + const filename = `doc-location-map-${edition}-${specVersion}.md`; + const outputPath = join(outputDir, filename); + + const lines = []; + lines.push(`# Doc Location Map — InfluxDB 3 ${edition.charAt(0).toUpperCase() + edition.slice(1)}`); + lines.push(''); + lines.push(`**Spec version:** ${specVersion}`); + lines.push(`**Generated:** ${timestamp}`); + lines.push(''); + + lines.push('## Coverage Summary'); + lines.push(''); + lines.push(`| Metric | Count |`); + lines.push(`|--------|-------|`); + lines.push(`| Total operations in spec | ${stats.totalOperations} |`); + lines.push(`| Operations with doc coverage | ${stats.coveredCount} (${stats.coveragePercent}%) |`); + lines.push(`| Operations with no coverage | ${stats.uncoveredCount} |`); + lines.push(`| Orphaned doc references | ${stats.orphanedCount} |`); + lines.push(''); + + // Confirmed map + lines.push('## Confirmed Coverage'); + lines.push(''); + if (confirmedMap.size === 0) { + lines.push('No confirmed coverage found.'); + } else { + lines.push('| Operation ID | Method | Path | Doc Pages | Signal |'); + lines.push('|---|---|---|---|---|'); + for (const [opId, entries] of confirmedMap) { + const opInfo = operationIdToPath.get(opId); + const pages = entries.map((e) => `\`${e.docPath}\``).join(', '); + const signal = entries[0]?.signalType || ''; + lines.push( + `| ${opId} | ${opInfo?.method || ''} | \`${opInfo?.path || ''}\` | ${pages} | ${signal} |` + ); + } + } + lines.push(''); + + // Uncovered operations + if (uncovered.length > 0) { + lines.push('## Uncovered Operations (No Doc Coverage)'); + lines.push(''); + lines.push('These spec operations have no corresponding documentation page.'); + lines.push(''); + lines.push('| Operation ID | Method | Path | Tags |'); + lines.push('|---|---|---|---|'); + for (const opId of uncovered) { + const op = operationIdToPath.get(opId); + const tags = op?.tags?.join(', ') || ''; + lines.push(`| ${opId} | ${op?.method || ''} | \`${op?.path || ''}\` | ${tags} |`); + } + lines.push(''); + } + + // Orphaned references + if (orphaned.length > 0) { + lines.push('## Orphaned References (Stale Doc Links)'); + lines.push(''); + lines.push('These doc pages reference operationIds that are no longer in the spec.'); + lines.push(''); + lines.push('| Operation ID (missing from spec) | Doc Page |'); + lines.push('|---|---|'); + for (const ref of orphaned) { + lines.push(`| ${ref.operationId} | \`${ref.docPath}\` |`); + } + lines.push(''); + } + + await fs.writeFile(outputPath, lines.join('\n'), 'utf-8'); + console.log(`\n📄 Doc location map report written: ${outputPath}`); + } +} diff --git a/scripts/docs-cli/lib/gap-reporter.js b/scripts/docs-cli/lib/gap-reporter.js new file mode 100644 index 0000000000..d96925bab3 --- /dev/null +++ b/scripts/docs-cli/lib/gap-reporter.js @@ -0,0 +1,447 @@ +/** + * Gap Reporter + * + * Assembles a severity-scored documentation gap report from: + * - A doc location map result (from doc-location-map.js) + * - An optional spec delta (operationIds that changed between two version tags) + * + * Outputs: + * - Structured JSON gap report (for machine consumption / issue creation) + * - Markdown summary (for PR bodies and human review) + * + * SECURITY: Uses only local git history and committed spec files. No external + * network access required. + * + * @module gap-reporter + */ + +import { promises as fs } from 'fs'; +import { join, resolve } from 'path'; +import { execSync } from 'child_process'; +import yaml from 'js-yaml'; +import { scoreSeverity, deriveCategoryLabel, sortBySeverity } from './gap-severity.js'; + +// ─── Spec delta computation ─────────────────────────────────────────────────── + +/** + * Compute the set of operationIds that changed between two git refs by diffing + * the committed OpenAPI spec files. + * + * Requires a full git history (fetch-depth: 0 in CI). + * + * @param {string} specRelPath - Repo-relative path to the spec YAML file + * @param {string} fromRef - Previous version tag or commit SHA + * @param {string} toRef - New version tag, branch, or 'HEAD' + * @param {string} repoRoot - Absolute path to docs-v2 repo root + * @returns {{ added: string[], modified: string[], removed: string[] }} + */ +export function computeSpecDelta(specRelPath, fromRef, toRef = 'HEAD', repoRoot) { + repoRoot = repoRoot || resolve(new URL('../../..', import.meta.url).pathname); + + let oldContent = ''; + let newContent = ''; + + try { + oldContent = execSync(`git -C "${repoRoot}" show "${fromRef}:${specRelPath}"`, { + encoding: 'utf-8', + stdio: ['pipe', 'pipe', 'pipe'], + }); + } catch { + // File didn't exist at fromRef — all operations are 'added' + } + + try { + if (toRef === 'HEAD') { + newContent = execSync(`cat "${join(repoRoot, specRelPath)}"`, { + encoding: 'utf-8', + }); + } else { + newContent = execSync(`git -C "${repoRoot}" show "${toRef}:${specRelPath}"`, { + encoding: 'utf-8', + stdio: ['pipe', 'pipe', 'pipe'], + }); + } + } catch { + // File doesn't exist at toRef — all operations are 'removed' + } + + const oldOpIds = extractOperationIds(oldContent); + const newOpIds = extractOperationIds(newContent); + + const added = [...newOpIds].filter((id) => !oldOpIds.has(id)); + const removed = [...oldOpIds].filter((id) => !newOpIds.has(id)); + + // Modified: in both but spec content changed (simplified: check if summary/description differs) + const modified = []; + if (oldContent && newContent) { + const oldSpec = safeParseYaml(oldContent); + const newSpec = safeParseYaml(newContent); + for (const opId of newOpIds) { + if (oldOpIds.has(opId)) { + const oldOp = findOperationInSpec(oldSpec, opId); + const newOp = findOperationInSpec(newSpec, opId); + if (oldOp && newOp && hasOperationChanged(oldOp, newOp)) { + modified.push(opId); + } + } + } + } + + return { added, modified, removed }; +} + +function extractOperationIds(specContent) { + const ids = new Set(); + if (!specContent) return ids; + const matches = specContent.matchAll(/^\s+operationId:\s+(\S+)/gm); + for (const m of matches) ids.add(m[1]); + return ids; +} + +function safeParseYaml(content) { + try { + return yaml.load(content) || {}; + } catch { + return {}; + } +} + +function findOperationInSpec(spec, operationId) { + for (const pathItem of Object.values(spec.paths || {})) { + for (const op of Object.values(pathItem)) { + if (op && op.operationId === operationId) return op; + } + } + return null; +} + +function hasOperationChanged(oldOp, newOp) { + // Consider changed if summary, description, or parameter count changed + if (oldOp.summary !== newOp.summary) return true; + if (oldOp.description !== newOp.description) return true; + const oldParamCount = (oldOp.parameters || []).length; + const newParamCount = (newOp.parameters || []).length; + if (oldParamCount !== newParamCount) return true; + return false; +} + +// ─── Suggested doc paths ────────────────────────────────────────────────────── + +/** + * Derive suggested documentation paths for an uncovered operation by looking + * at what adjacent operations (same path prefix) are already documented. + * + * @param {string} operationId + * @param {object} opInfo - { path, method, tags } + * @param {Map} confirmedMap - From doc-location-map result + * @param {Map} operationIdToPath + * @returns {string[]} + */ +function suggestDocPaths(operationId, opInfo, confirmedMap, operationIdToPath) { + const suggestions = new Set(); + const pathPrefix = opInfo.path.split('/').slice(0, 4).join('/'); // e.g. /api/v3/configure + + for (const [coveredOpId, entries] of confirmedMap) { + const coveredOp = operationIdToPath.get(coveredOpId); + if (coveredOp && coveredOp.path.startsWith(pathPrefix)) { + for (const entry of entries) { + // Convert content path to URL-style suggestion + const url = '/' + entry.docPath + .replace(/\/_index\.md$/, '/') + .replace(/\.md$/, '/') + .replace(/\/index\.md$/, '/'); + suggestions.add(url); + } + } + } + + return [...suggestions].slice(0, 3); +} + +// ─── Gap assembly ───────────────────────────────────────────────────────────── + +/** + * Build the flat list of gap entries from a doc-location-map result and spec + * delta information. + * + * @param {object} params + * @param {object} params.mapResult - Return value of runDocLocationMap() + * @param {object} [params.specDelta] - { added, modified, removed } per edition + * @returns {object[]} Gap entries + */ +function buildGapEntries({ mapResult, specDelta }) { + const gaps = []; + + for (const [edition, data] of Object.entries(mapResult.editions)) { + const { uncovered, orphaned, confirmedMap, operationIdToPath, specVersion } = data; + const editionDelta = specDelta?.[edition] || { added: [], modified: [], removed: [] }; + + // Uncovered operations + for (const opId of uncovered) { + const opInfo = operationIdToPath.get(opId); + if (!opInfo) continue; + + let changeType = 'existing'; + if (editionDelta.added.includes(opId)) changeType = 'new'; + else if (editionDelta.modified.includes(opId)) changeType = 'modified'; + + // Determine edition scope + let editionScope = edition; + if (mapResult.editions.core && mapResult.editions.enterprise) { + const inCore = mapResult.editions.core.operationIdToPath.has(opId); + const inEnterprise = mapResult.editions.enterprise.operationIdToPath.has(opId); + editionScope = inCore && inEnterprise ? 'both' : inCore ? 'core' : 'enterprise'; + } + + const { severity, rationale } = scoreSeverity(opInfo, editionScope, changeType); + const suggestedDocPaths = suggestDocPaths(opId, opInfo, confirmedMap, operationIdToPath); + const category = deriveCategoryLabel(opInfo.path, opInfo.tags); + + gaps.push({ + operationId: opId, + method: opInfo.method, + path: opInfo.path, + summary: opInfo.summary || '', + tags: opInfo.tags || [], + category, + edition, + editionScope, + specVersion, + changeType, + severity, + severityRationale: rationale, + existingDocPages: [], + suggestedDocPaths, + }); + } + + // Partially covered (in confirmedMap but also delta-modified — may need updates) + for (const opId of editionDelta.modified) { + if (confirmedMap.has(opId)) { + const opInfo = operationIdToPath.get(opId); + if (!opInfo) continue; + + const editionScope = edition; + const { severity, rationale } = scoreSeverity(opInfo, editionScope, 'modified'); + const category = deriveCategoryLabel(opInfo.path, opInfo.tags); + const existingPages = confirmedMap.get(opId)?.map((e) => e.docPath) || []; + + gaps.push({ + operationId: opId, + method: opInfo.method, + path: opInfo.path, + summary: opInfo.summary || '', + tags: opInfo.tags || [], + category, + edition, + editionScope, + specVersion, + changeType: 'modified', + severity, + severityRationale: rationale, + existingDocPages: existingPages, + suggestedDocPaths: existingPages.map((p) => '/' + p.replace(/\/_index\.md$/, '/').replace(/\.md$/, '/')), + note: 'Existing docs may need updates for this changed endpoint.', + }); + } + } + + // Orphaned references + for (const ref of orphaned) { + gaps.push({ + operationId: ref.operationId, + method: '', + path: '', + summary: '', + tags: [], + category: 'Removed endpoint', + edition, + editionScope: edition, + specVersion, + changeType: 'removed', + severity: 'medium', + severityRationale: 'Doc page references a removed endpoint; page needs update or removal', + existingDocPages: [ref.docPath], + suggestedDocPaths: [], + }); + } + } + + // Deduplicate by operationId (can appear in both editions) + const seen = new Map(); + const deduped = []; + for (const gap of gaps) { + const key = `${gap.operationId}:${gap.changeType}`; + if (seen.has(key)) { + // Merge edition scopes + const existing = seen.get(key); + if (existing.editionScope !== gap.editionScope) { + existing.editionScope = 'both'; + // Re-score with wider scope + const opInfo = { path: existing.path, method: existing.method, tags: existing.tags }; + const { severity, rationale } = scoreSeverity(opInfo, 'both', existing.changeType); + existing.severity = severity; + existing.severityRationale = rationale; + } + } else { + seen.set(key, gap); + deduped.push(gap); + } + } + + return sortBySeverity(deduped); +} + +// ─── Report generation ──────────────────────────────────────────────────────── + +/** + * Generate a severity-scored gap report. + * + * @param {object} params + * @param {'core'|'enterprise'|'both'} params.product + * @param {string} params.version - Release version (e.g. "v3.9.0") + * @param {string} [params.previousVersion] - Previous version tag (e.g. "v3.8.0") + * @param {object} params.mapResult - From runDocLocationMap() + * @param {string} params.outputDir - Directory for output files + * @param {string} [params.repoRoot] - docs-v2 repo root + * @returns {Promise} The gap report object + */ +export async function generateGapReport({ + product, + version, + previousVersion, + mapResult, + outputDir, + repoRoot, +}) { + repoRoot = repoRoot || resolve(new URL('../../..', import.meta.url).pathname); + await fs.mkdir(outputDir, { recursive: true }); + + console.log(`\n📊 Generating gap report for ${product} ${version}...`); + + // Compute spec delta per edition + let specDelta = null; + if (previousVersion) { + specDelta = {}; + const specFiles = { + core: 'api-docs/influxdb3/core/v3/influxdb3-core-openapi.yaml', + enterprise: 'api-docs/influxdb3/enterprise/v3/influxdb3-enterprise-openapi.yaml', + }; + + for (const edition of Object.keys(mapResult.editions)) { + if (specFiles[edition]) { + console.log(` Computing spec delta ${previousVersion}→${version} for ${edition}...`); + try { + specDelta[edition] = computeSpecDelta( + specFiles[edition], + previousVersion, + version === 'HEAD' || !version ? 'HEAD' : version, + repoRoot + ); + const d = specDelta[edition]; + console.log( + ` Delta: +${d.added.length} added, ~${d.modified.length} modified, -${d.removed.length} removed` + ); + } catch (err) { + console.warn(` ⚠️ Could not compute spec delta for ${edition}: ${err.message}`); + specDelta[edition] = { added: [], modified: [], removed: [] }; + } + } + } + } + + const gaps = buildGapEntries({ mapResult, specDelta }); + + const summary = { + critical: gaps.filter((g) => g.severity === 'critical').length, + high: gaps.filter((g) => g.severity === 'high').length, + medium: gaps.filter((g) => g.severity === 'medium').length, + low: gaps.filter((g) => g.severity === 'low').length, + total: gaps.length, + }; + + const report = { + product, + version: version || 'HEAD', + previousVersion: previousVersion || null, + generatedAt: new Date().toISOString(), + summary, + gaps, + }; + + // Write JSON + const jsonFilename = `gap-report-${product}-${version || 'HEAD'}.json`; + const jsonPath = join(outputDir, jsonFilename); + await fs.writeFile(jsonPath, JSON.stringify(report, null, 2), 'utf-8'); + console.log(` Gap report JSON: ${jsonPath}`); + + // Write markdown summary + const mdPath = join(outputDir, jsonFilename.replace('.json', '.md')); + await writeMdSummary(report, mdPath); + console.log(` Gap report markdown: ${mdPath}`); + + return report; +} + +async function writeMdSummary(report, outputPath) { + const lines = []; + const titleEdition = report.product.charAt(0).toUpperCase() + report.product.slice(1); + + lines.push(`# Documentation Gap Report — InfluxDB 3 ${titleEdition}`); + lines.push(''); + lines.push(`**Version:** ${report.version}`); + if (report.previousVersion) { + lines.push(`**Delta from:** ${report.previousVersion}`); + } + lines.push(`**Generated:** ${report.generatedAt.split('T')[0]}`); + lines.push(''); + + // Summary + lines.push('## Summary'); + lines.push(''); + lines.push('| Severity | Count |'); + lines.push('|----------|-------|'); + lines.push(`| 🔴 Critical | ${report.summary.critical} |`); + lines.push(`| 🟠 High | ${report.summary.high} |`); + lines.push(`| 🟡 Medium | ${report.summary.medium} |`); + lines.push(`| 🔵 Low | ${report.summary.low} |`); + lines.push(`| **Total** | **${report.summary.total}** |`); + lines.push(''); + + if (report.gaps.length === 0) { + lines.push('No documentation gaps detected.'); + await fs.writeFile(outputPath, lines.join('\n'), 'utf-8'); + return; + } + + // Group by severity + for (const sev of ['critical', 'high', 'medium', 'low']) { + const sevGaps = report.gaps.filter((g) => g.severity === sev); + if (sevGaps.length === 0) continue; + + const emoji = { critical: '🔴', high: '🟠', medium: '🟡', low: '🔵' }[sev]; + lines.push(`## ${emoji} ${sev.charAt(0).toUpperCase() + sev.slice(1)} Priority (${sevGaps.length})`); + lines.push(''); + + for (const gap of sevGaps) { + lines.push(`### ${gap.operationId}`); + lines.push(''); + lines.push(`- **Method/Path:** \`${gap.method} ${gap.path}\``); + lines.push(`- **Category:** ${gap.category}`); + lines.push(`- **Edition scope:** ${gap.editionScope}`); + lines.push(`- **Change type:** ${gap.changeType}`); + lines.push(`- **Rationale:** ${gap.severityRationale}`); + if (gap.summary) lines.push(`- **Summary:** ${gap.summary}`); + if (gap.existingDocPages.length > 0) { + lines.push(`- **Existing docs:** ${gap.existingDocPages.map((p) => `\`${p}\``).join(', ')}`); + } + if (gap.suggestedDocPaths.length > 0) { + lines.push(`- **Suggested location:** ${gap.suggestedDocPaths.join(', ')}`); + } + if (gap.note) lines.push(`- **Note:** ${gap.note}`); + lines.push(''); + } + } + + await fs.writeFile(outputPath, lines.join('\n'), 'utf-8'); +} diff --git a/scripts/docs-cli/lib/gap-severity.js b/scripts/docs-cli/lib/gap-severity.js new file mode 100644 index 0000000000..d1d78ea714 --- /dev/null +++ b/scripts/docs-cli/lib/gap-severity.js @@ -0,0 +1,220 @@ +/** + * Gap Severity Scorer + * + * Assigns a severity tier to each undocumented API operation based on: + * 1. API category (derived from path prefix and tags) + * 2. Edition scope (both editions = wider user impact) + * 3. Change type (new endpoint, modified, removed) + * + * Severity tiers: critical | high | medium | low + * + * @module gap-severity + */ + +// ─── Category tier lookup ───────────────────────────────────────────────────── + +/** + * Path prefix → base severity tier. + * More specific prefixes must come before more general ones. + */ +const PATH_TIER = [ + // Write paths — core user workflow + { prefix: '/write', tier: 'high' }, + { prefix: '/api/v2/write', tier: 'high' }, + { prefix: '/api/v3/write_lp', tier: 'high' }, + + // Query paths — core user workflow + { prefix: '/query', tier: 'high' }, + { prefix: '/api/v2/query', tier: 'high' }, + { prefix: '/api/v3/query_sql', tier: 'high' }, + { prefix: '/api/v3/query_influxql', tier: 'high' }, + + // Database / table management — primary admin workflow + { prefix: '/api/v3/configure/database', tier: 'high' }, + { prefix: '/api/v3/configure/table', tier: 'high' }, + + // Token management + { prefix: '/api/v3/configure/token', tier: 'medium' }, + { prefix: '/api/v3/configure/token', tier: 'medium' }, + + // Caching + { prefix: '/api/v3/configure/distinct_cache', tier: 'medium' }, + { prefix: '/api/v3/configure/last_cache', tier: 'medium' }, + + // Processing engine / triggers + { prefix: '/api/v3/configure/processing_engine_trigger', tier: 'medium' }, + { prefix: '/api/v3/engine', tier: 'medium' }, + + // Export (Enterprise-specific) + { prefix: '/api/v3/export', tier: 'medium' }, + { prefix: '/api/v3/enterprise', tier: 'medium' }, + + // Plugin management + { prefix: '/api/v3/configure/plugin_environment', tier: 'low' }, + { prefix: '/api/v3/plugins', tier: 'low' }, + { prefix: '/api/v3/configure/plugin', tier: 'low' }, + + // Health / metrics / ping — self-explanatory; capped at low regardless of bumps + { prefix: '/health', tier: 'low', cap: 'low' }, + { prefix: '/ping', tier: 'low', cap: 'low' }, + { prefix: '/metrics', tier: 'low', cap: 'low' }, + + // Plugin test endpoints — internal / developer tooling + { prefix: '/api/v3/configure/processing_engine_trigger/test', tier: 'low', cap: 'low' }, +]; + +/** + * Tag → base severity tier override (used when path prefix doesn't match). + */ +const TAG_TIER = { + 'Write data': 'high', + 'Query data': 'high', + 'Manage databases': 'high', + 'Manage tables': 'high', + 'Authenticate': 'medium', + 'Manage tokens': 'medium', + 'Cache data': 'medium', + 'Processing engine': 'medium', + 'Export data': 'medium', + 'System': 'low', +}; + +// Tier ordering for cap enforcement +const TIER_ORDER = { critical: 3, high: 2, medium: 1, low: 0 }; +const TIER_FROM_ORDER = ['low', 'medium', 'high', 'critical']; + +function capTier(tier, cap) { + if (!cap) return tier; + const capIdx = TIER_ORDER[cap] ?? 0; + const tierIdx = TIER_ORDER[tier] ?? 0; + return TIER_FROM_ORDER[Math.min(tierIdx, capIdx)]; +} + +/** + * Derive base severity tier from operation path and tags. + * Returns { tier, cap? } where cap is the maximum allowed severity. + * @param {string} path - API path, e.g. "/api/v3/configure/database" + * @param {string[]} tags - Operation tags from the spec + * @returns {{ tier: 'high'|'medium'|'low', cap?: string }} + */ +function baseTierFromPathAndTags(path, tags = []) { + // Check path prefixes (most specific first) + for (const entry of PATH_TIER) { + if (path.startsWith(entry.prefix)) return { tier: entry.tier, cap: entry.cap }; + } + + // Fall back to tag-based lookup + for (const tag of tags) { + if (TAG_TIER[tag]) return { tier: TAG_TIER[tag] }; + } + + return { tier: 'low' }; // default +} + +// ─── Score computation ──────────────────────────────────────────────────────── + +/** + * Score the severity of a documentation gap. + * + * @param {object} operation - Operation descriptor from the spec inventory + * @param {string} operation.path + * @param {string} operation.method + * @param {string[]} operation.tags + * @param {'core'|'enterprise'|'both'} editionScope + * @param {'new'|'modified'|'removed'|'existing'} changeType + * - 'new' gap for a newly added endpoint + * - 'modified' gap for a changed endpoint + * - 'removed' doc references a removed endpoint (orphan) + * - 'existing' gap for an endpoint that has been in the spec unchanged + * @returns {{ severity: 'critical'|'high'|'medium'|'low', rationale: string }} + */ +export function scoreSeverity(operation, editionScope, changeType) { + const { path, method, tags = [] } = operation; + const { tier: baseTier, cap } = baseTierFromPathAndTags(path, tags); + let tier = baseTier; + + const reasons = []; + + // Change type adjustments + if (changeType === 'new') { + // New endpoints always warrant documentation + reasons.push('new endpoint in release'); + if (tier === 'low') tier = 'medium'; // bump low → medium for new endpoints + } else if (changeType === 'modified') { + reasons.push('endpoint changed in release'); + } else if (changeType === 'removed') { + reasons.push('endpoint removed — doc page needs update or removal'); + } else { + reasons.push('existing undocumented endpoint'); + } + + // Edition scope + if (editionScope === 'both') { + reasons.push('affects both Core and Enterprise users'); + if (tier === 'medium') tier = 'high'; // bump medium → high for cross-edition + if (tier === 'low') tier = 'medium'; // bump low → medium for cross-edition + } else if (editionScope === 'enterprise') { + reasons.push('Enterprise only'); + } else { + reasons.push('Core only'); + } + + // Promote 'high' to 'critical' for new write/query endpoints affecting both editions + if ( + tier === 'high' && + changeType === 'new' && + editionScope === 'both' && + (path.includes('/write') || path.includes('/query')) + ) { + tier = 'critical'; + reasons.push('write/query path affecting both editions'); + } + + // Apply path-level cap (e.g. health/ping always stay ≤ low) + tier = capTier(tier, cap); + + const categoryLabel = deriveCategoryLabel(path, tags); + reasons.unshift(categoryLabel); + + return { + severity: tier, + rationale: reasons.join('; '), + }; +} + +/** + * Derive a human-readable category label for an operation. + */ +export function deriveCategoryLabel(path, tags = []) { + if (path.startsWith('/write') || path.startsWith('/api/v2/write') || path.startsWith('/api/v3/write')) { + return 'Write data'; + } + if (path.startsWith('/query') || path.startsWith('/api/v2/query') || path.startsWith('/api/v3/query')) { + return 'Query data'; + } + if (path.startsWith('/api/v3/configure/database')) return 'Database management'; + if (path.startsWith('/api/v3/configure/table')) return 'Table management'; + if (path.startsWith('/api/v3/configure/token') || path.startsWith('/api/v3/configure/token')) return 'Token management'; + if (path.startsWith('/api/v3/configure/distinct_cache')) return 'Distinct Value Cache'; + if (path.startsWith('/api/v3/configure/last_cache')) return 'Last Value Cache'; + if (path.startsWith('/api/v3/configure/processing_engine_trigger')) return 'Processing engine triggers'; + if (path.startsWith('/api/v3/engine')) return 'Processing engine'; + if (path.startsWith('/api/v3/export')) return 'Export data'; + if (path.startsWith('/api/v3/enterprise')) return 'Enterprise administration'; + if (path.startsWith('/api/v3/plugins')) return 'Plugin management'; + if (path.startsWith('/health') || path.startsWith('/ping') || path.startsWith('/metrics')) return 'System'; + + // Fall back to first tag + if (tags.length > 0) return tags[0]; + return 'API'; +} + +/** + * Sort gap entries by severity (critical first). + * @param {Array} gaps - Array of objects with a .severity field + * @returns {Array} + */ +export function sortBySeverity(gaps) { + const order = { critical: 0, high: 1, medium: 2, low: 3 }; + return [...gaps].sort((a, b) => (order[a.severity] ?? 4) - (order[b.severity] ?? 4)); +} diff --git a/scripts/docs-cli/lib/issue-creator.js b/scripts/docs-cli/lib/issue-creator.js new file mode 100644 index 0000000000..c522b603ab --- /dev/null +++ b/scripts/docs-cli/lib/issue-creator.js @@ -0,0 +1,274 @@ +/** + * Issue Creator + * + * Creates GitHub issues for high/critical documentation gaps found by the + * gap reporter. Uses the `gh` CLI (GitHub CLI) to avoid adding an @octokit + * dependency. + * + * Supports --dry-run mode which prints issue bodies to stdout instead of + * calling the GitHub API. + * + * SECURITY: No private repo names or URLs are hardcoded. Repository context + * is read from `gh repo view` or from environment variables. + * + * @module issue-creator + */ + +import { execSync, execFileSync } from 'child_process'; +import { scoreSeverity, deriveCategoryLabel } from './gap-severity.js'; + +// ─── Label management ───────────────────────────────────────────────────────── + +/** + * Ensure required labels exist in the repo. Creates them if missing. + * Labels are created with appropriate colours for easy visual scanning. + * Silently skips labels that already exist. + */ +async function ensureLabels(dryRun) { + const labelsToCreate = [ + { name: 'doc-gap', color: 'd93f0b', description: 'Documentation gap detected by automated pipeline' }, + { name: 'doc-gap:critical', color: 'b60205', description: 'Critical priority documentation gap' }, + { name: 'doc-gap:high', color: 'e4e669', description: 'High priority documentation gap' }, + { name: 'doc-gap:medium', color: '0075ca', description: 'Medium priority documentation gap' }, + { name: 'doc-gap:low', color: 'cfd3d7', description: 'Low priority documentation gap' }, + ]; + + if (dryRun) return; // Skip label creation in dry-run mode + + for (const label of labelsToCreate) { + try { + execSync( + `gh label create "${label.name}" --color "${label.color}" --description "${label.description}" 2>/dev/null || true`, + { stdio: 'pipe' } + ); + } catch { + // Label likely already exists; ignore + } + } +} + +// ─── Issue body builder ─────────────────────────────────────────────────────── + +/** + * Build the markdown body for a gap issue. + * + * @param {object} gap - Gap entry from gap-reporter output + * @param {string} product - 'core' | 'enterprise' | 'both' + * @param {string} version - Release version string + * @returns {string} + */ +function buildIssueBody(gap, product, version) { + const severityEmoji = { critical: '🔴', high: '🟠', medium: '🟡', low: '🔵' }[gap.severity] || '⚪'; + const editionLabel = + gap.editionScope === 'both' + ? 'Core and Enterprise' + : `InfluxDB 3 ${gap.editionScope.charAt(0).toUpperCase() + gap.editionScope.slice(1)}`; + + const suggestedPaths = + gap.suggestedDocPaths?.length > 0 + ? gap.suggestedDocPaths.map((p) => `- ${p}`).join('\n') + : '- No adjacent documentation found — create a new page in the appropriate section.'; + + const existingPages = + gap.existingDocPages?.length > 0 + ? gap.existingDocPages.map((p) => `- \`${p}\``).join('\n') + : '- None'; + + const editionDoneItems = []; + if (gap.editionScope === 'both' || gap.editionScope === 'core') { + editionDoneItems.push('- [ ] Core behavior documented'); + } + if (gap.editionScope === 'both' || gap.editionScope === 'enterprise') { + editionDoneItems.push('- [ ] Enterprise-specific behavior documented (if any)'); + } + if (gap.editionScope === 'both') { + editionDoneItems.push('- [ ] Superset relationship noted (Enterprise includes all Core changes)'); + } + + const sections = [ + `## ${severityEmoji} ${gap.severity.charAt(0).toUpperCase() + gap.severity.slice(1)} Priority Documentation Gap`, + '', + `**Release:** ${product} v${version}`, + `**Category:** ${gap.category || deriveCategoryLabel(gap.path, gap.tags)}`, + `**Edition scope:** ${editionLabel}`, + `**Change type:** ${gap.changeType}`, + '', + '## Spec Claim', + '', + '| Field | Value |', + '|-------|-------|', + `| Operation ID | \`${gap.operationId}\` |`, + `| HTTP method | \`${gap.method}\` |`, + `| API path | \`${gap.path}\` |`, + `| Spec summary | ${gap.summary || '*(not provided in spec)*'} |`, + '', + '## Severity Rationale', + '', + gap.severityRationale, + '', + '## Existing Documentation', + '', + existingPages, + '', + '## Suggested Documentation Location', + '', + suggestedPaths, + '', + ...(gap.note ? [`> **Note:** ${gap.note}`, ''] : []), + '## Engineering Verification Ask', + '', + 'Before writing documentation, please confirm:', + '', + `- [ ] Is \`${gap.operationId}\` (\`${gap.method} ${gap.path}\`) intended for public use in v${version}?`, + '- [ ] Does this endpoint replace or extend an existing endpoint? If so, which one?', + '- [ ] What are the primary use cases for end users?', + '- [ ] Are there any known limitations, gotchas, or required prerequisites?', + gap.editionScope === 'both' + ? '- [ ] Is the behavior identical across Core and Enterprise, or are there edition-specific differences?' + : '', + '', + '## Definition of Done', + '', + `- [ ] Engineering confirmed endpoint is public and stable in v${version}`, + '- [ ] Doc page created or updated at the suggested location', + '- [ ] API reference entry updated (description, example, parameters)', + '- [ ] Related guides updated if endpoint behavior changed', + `- [ ] Tested against ${product} v${version} release binary`, + ...editionDoneItems, + '', + '---', + `*Auto-generated by the release documentation pipeline for ${product} v${version}.*`, + ]; + + return sections.filter((s) => s !== '').join('\n'); +} + +// ─── Public API ─────────────────────────────────────────────────────────────── + +/** + * Create GitHub issues for high/critical documentation gaps. + * + * @param {object} params + * @param {object} params.mapResult - From runDocLocationMap() + * @param {string} params.product - 'core' | 'enterprise' | 'both' + * @param {string} params.version - Release version + * @param {string} [params.previousVersion] + * @param {boolean} [params.dryRun] - Print to stdout instead of creating issues + * @param {string[]} [params.severities] - Severity levels to file (default: critical + high) + */ +export async function createGapIssues({ + mapResult, + product, + version, + previousVersion, + dryRun = false, + severities = ['critical', 'high'], +}) { + // Collect all uncovered gaps from the map result + const gaps = []; + + for (const [edition, data] of Object.entries(mapResult.editions)) { + const { uncovered, operationIdToPath } = data; + for (const opId of uncovered) { + const opInfo = operationIdToPath.get(opId); + if (!opInfo) continue; + + // Determine cross-edition scope + let editionScope = edition; + if (mapResult.editions.core && mapResult.editions.enterprise) { + const inCore = mapResult.editions.core.operationIdToPath.has(opId); + const inEnterprise = mapResult.editions.enterprise.operationIdToPath.has(opId); + editionScope = inCore && inEnterprise ? 'both' : inCore ? 'core' : 'enterprise'; + } + + const changeType = 'existing'; // Without delta info here; gap-reporter handles delta scoring + const { severity, rationale } = scoreSeverity(opInfo, editionScope, changeType); + + if (!severities.includes(severity)) continue; + + gaps.push({ + operationId: opId, + method: opInfo.method, + path: opInfo.path, + summary: opInfo.summary || '', + tags: opInfo.tags || [], + category: deriveCategoryLabel(opInfo.path, opInfo.tags), + editionScope, + changeType, + severity, + severityRationale: rationale, + existingDocPages: [], + suggestedDocPaths: [], + }); + } + } + + // Deduplicate by operationId + const seen = new Set(); + const deduped = gaps.filter((g) => { + if (seen.has(g.operationId)) return false; + seen.add(g.operationId); + return true; + }); + + if (deduped.length === 0) { + console.log('No actionable gaps to file as issues.'); + return; + } + + console.log(`\n📋 ${dryRun ? '[DRY RUN] Would create' : 'Creating'} ${deduped.length} issue(s) for ${severities.join('/')} gaps...`); + + if (!dryRun) { + await ensureLabels(false); + } + + for (const gap of deduped) { + const title = `Doc gap [${gap.severity}]: ${gap.operationId} — ${product} v${version}`; + const body = buildIssueBody(gap, product, version); + + const labels = [ + 'documentation', + 'doc-gap', + `doc-gap:${gap.severity}`, + ]; + + if (gap.editionScope === 'both') { + labels.push('product:influxdb3-core', 'product:influxdb3-enterprise'); + } else { + labels.push(`product:influxdb3-${gap.editionScope}`); + } + + if (dryRun) { + console.log('\n' + '─'.repeat(72)); + console.log(`ISSUE: ${title}`); + console.log(`LABELS: ${labels.join(', ')}`); + console.log('─'.repeat(72)); + console.log(body); + } else { + try { + const labelArgs = labels.map((l) => `--label "${l}"`).join(' '); + execSync( + `gh issue create --title "${title.replace(/"/g, '\\"')}" --body "${body.replace(/"/g, '\\"').replace(/\n/g, '\\n')}" ${labelArgs}`, + { stdio: ['pipe', 'inherit', 'pipe'] } + ); + console.log(` ✓ Created: ${title}`); + } catch (err) { + // If label doesn't exist in repo, retry without product labels + try { + execSync( + `gh issue create --title "${title.replace(/"/g, '\\"')}" --body "${body.replace(/"/g, '\\"').replace(/\n/g, '\\n')}" --label "documentation" --label "doc-gap"`, + { stdio: ['pipe', 'inherit', 'pipe'] } + ); + console.log(` ✓ Created (without product labels): ${title}`); + } catch (err2) { + console.error(` ✗ Failed to create issue for ${gap.operationId}: ${err2.message}`); + } + } + } + } + + if (dryRun) { + console.log('\n' + '─'.repeat(72)); + console.log(`[DRY RUN] Would have created ${deduped.length} issue(s). Pass --create-issue without --dry-run to create them.`); + } +} From f5444dd7b700ac921049c5c6b00c75c9dcfc89fb Mon Sep 17 00:00:00 2001 From: Claude Date: Mon, 6 Apr 2026 03:42:48 +0000 Subject: [PATCH 2/4] style: apply prettier formatting to gap detection pipeline modules https://claude.ai/code/session_01CpE2NxtgSre6spEHLrUw5M --- scripts/docs-cli/commands/audit.js | 16 +-- scripts/docs-cli/lib/doc-location-map.js | 58 +++++++--- scripts/docs-cli/lib/gap-reporter.js | 132 +++++++++++++++++------ scripts/docs-cli/lib/gap-severity.js | 54 +++++++--- scripts/docs-cli/lib/issue-creator.js | 72 +++++++++---- 5 files changed, 248 insertions(+), 84 deletions(-) diff --git a/scripts/docs-cli/commands/audit.js b/scripts/docs-cli/commands/audit.js index b216ef4b1b..06ef69ec95 100644 --- a/scripts/docs-cli/commands/audit.js +++ b/scripts/docs-cli/commands/audit.js @@ -476,9 +476,8 @@ export default async function audit(args) { if (runAPIAudit) { console.log(`📋 Running API audit for ${influxProduct}...\n`); - const { runAPIAudit: runAPIAuditFn } = await import( - '../lib/api-auditor.js' - ); + const { runAPIAudit: runAPIAuditFn } = + await import('../lib/api-auditor.js'); await runAPIAuditFn(influxProduct, version, docsBranch, outputFormat); } } @@ -486,13 +485,16 @@ export default async function audit(args) { // ── Doc location map (inverted: content → spec coverage) ────────────── if (runDocLocationMapFlag && (hasCore || hasEnterprise)) { const influxProduct = - hasCore && hasEnterprise ? 'both' : hasEnterprise ? 'enterprise' : 'core'; + hasCore && hasEnterprise + ? 'both' + : hasEnterprise + ? 'enterprise' + : 'core'; console.log(`\n📍 Running doc location map for ${influxProduct}...\n`); - const { runDocLocationMap, writeDocLocationMapReport } = await import( - '../lib/doc-location-map.js' - ); + const { runDocLocationMap, writeDocLocationMapReport } = + await import('../lib/doc-location-map.js'); const { generateGapReport } = await import('../lib/gap-reporter.js'); const outputDir = join(__dirname, '..', '..', 'output', 'gap-reports'); diff --git a/scripts/docs-cli/lib/doc-location-map.js b/scripts/docs-cli/lib/doc-location-map.js index be6233b9da..f3a3276d6a 100644 --- a/scripts/docs-cli/lib/doc-location-map.js +++ b/scripts/docs-cli/lib/doc-location-map.js @@ -24,7 +24,8 @@ import yaml from 'js-yaml'; const SPEC_PATHS = { core: 'api-docs/influxdb3/core/v3/influxdb3-core-openapi.yaml', - enterprise: 'api-docs/influxdb3/enterprise/v3/influxdb3-enterprise-openapi.yaml', + enterprise: + 'api-docs/influxdb3/enterprise/v3/influxdb3-enterprise-openapi.yaml', }; const CONTENT_ROOTS = { @@ -296,7 +297,13 @@ function resolveSourcePath(frontmatter, repoRoot) { * @param {Set|null} filterOpIds - If set, only collect signals for these operationIds * @returns {Map} */ -async function scanContentFiles(files, pathToOpId, repoRoot, productEdition, filterOpIds) { +async function scanContentFiles( + files, + pathToOpId, + repoRoot, + productEdition, + filterOpIds +) { const results = new Map(); // operationId → [signal entries] const orphanedRefs = []; // { docPath, operationId } where operationId not in spec @@ -333,7 +340,12 @@ async function scanContentFiles(files, pathToOpId, repoRoot, productEdition, fil if (!results.has(opId)) results.set(opId, []); // Avoid duplicate entries for same docPath + signalType const existing = results.get(opId); - if (!existing.some((e) => e.docPath === entry.docPath && e.signalType === entry.signalType)) { + if ( + !existing.some( + (e) => + e.docPath === entry.docPath && e.signalType === entry.signalType + ) + ) { existing.push(entry); } } @@ -413,7 +425,8 @@ function reconcile(coverageMap, operationIdToPath, pathToOpId) { * }>} */ export async function runDocLocationMap(product, options = {}) { - const repoRoot = options.repoRoot || resolve(new URL('../../..', import.meta.url).pathname); + const repoRoot = + options.repoRoot || resolve(new URL('../../..', import.meta.url).pathname); const filterOpIds = options.filterOperationIds || null; const editions = product === 'both' ? ['core', 'enterprise'] : [product]; @@ -423,7 +436,9 @@ export async function runDocLocationMap(product, options = {}) { const specPath = join(repoRoot, SPEC_PATHS[edition]); const contentRoot = join(repoRoot, CONTENT_ROOTS[edition]); - console.log(`\n📍 Doc Location Map — InfluxDB 3 ${edition.charAt(0).toUpperCase() + edition.slice(1)}`); + console.log( + `\n📍 Doc Location Map — InfluxDB 3 ${edition.charAt(0).toUpperCase() + edition.slice(1)}` + ); console.log(` Spec: ${SPEC_PATHS[edition]}`); console.log(` Content: ${CONTENT_ROOTS[edition]}`); @@ -461,7 +476,9 @@ export async function runDocLocationMap(product, options = {}) { : 0, }; - console.log(` Covered: ${stats.coveredCount}/${stats.totalOperations} (${stats.coveragePercent}%)`); + console.log( + ` Covered: ${stats.coveredCount}/${stats.totalOperations} (${stats.coveragePercent}%)` + ); if (orphaned.length > 0) { console.log(` Orphaned references: ${orphaned.length}`); } @@ -489,13 +506,22 @@ export async function writeDocLocationMapReport(mapResult, outputDir) { await fs.mkdir(outputDir, { recursive: true }); for (const [edition, data] of Object.entries(mapResult.editions)) { - const { confirmedMap, orphaned, uncovered, specVersion, operationIdToPath, stats } = data; + const { + confirmedMap, + orphaned, + uncovered, + specVersion, + operationIdToPath, + stats, + } = data; const timestamp = new Date().toISOString().split('T')[0]; const filename = `doc-location-map-${edition}-${specVersion}.md`; const outputPath = join(outputDir, filename); const lines = []; - lines.push(`# Doc Location Map — InfluxDB 3 ${edition.charAt(0).toUpperCase() + edition.slice(1)}`); + lines.push( + `# Doc Location Map — InfluxDB 3 ${edition.charAt(0).toUpperCase() + edition.slice(1)}` + ); lines.push(''); lines.push(`**Spec version:** ${specVersion}`); lines.push(`**Generated:** ${timestamp}`); @@ -506,7 +532,9 @@ export async function writeDocLocationMapReport(mapResult, outputDir) { lines.push(`| Metric | Count |`); lines.push(`|--------|-------|`); lines.push(`| Total operations in spec | ${stats.totalOperations} |`); - lines.push(`| Operations with doc coverage | ${stats.coveredCount} (${stats.coveragePercent}%) |`); + lines.push( + `| Operations with doc coverage | ${stats.coveredCount} (${stats.coveragePercent}%) |` + ); lines.push(`| Operations with no coverage | ${stats.uncoveredCount} |`); lines.push(`| Orphaned doc references | ${stats.orphanedCount} |`); lines.push(''); @@ -534,14 +562,18 @@ export async function writeDocLocationMapReport(mapResult, outputDir) { if (uncovered.length > 0) { lines.push('## Uncovered Operations (No Doc Coverage)'); lines.push(''); - lines.push('These spec operations have no corresponding documentation page.'); + lines.push( + 'These spec operations have no corresponding documentation page.' + ); lines.push(''); lines.push('| Operation ID | Method | Path | Tags |'); lines.push('|---|---|---|---|'); for (const opId of uncovered) { const op = operationIdToPath.get(opId); const tags = op?.tags?.join(', ') || ''; - lines.push(`| ${opId} | ${op?.method || ''} | \`${op?.path || ''}\` | ${tags} |`); + lines.push( + `| ${opId} | ${op?.method || ''} | \`${op?.path || ''}\` | ${tags} |` + ); } lines.push(''); } @@ -550,7 +582,9 @@ export async function writeDocLocationMapReport(mapResult, outputDir) { if (orphaned.length > 0) { lines.push('## Orphaned References (Stale Doc Links)'); lines.push(''); - lines.push('These doc pages reference operationIds that are no longer in the spec.'); + lines.push( + 'These doc pages reference operationIds that are no longer in the spec.' + ); lines.push(''); lines.push('| Operation ID (missing from spec) | Doc Page |'); lines.push('|---|---|'); diff --git a/scripts/docs-cli/lib/gap-reporter.js b/scripts/docs-cli/lib/gap-reporter.js index d96925bab3..7cb50225fe 100644 --- a/scripts/docs-cli/lib/gap-reporter.js +++ b/scripts/docs-cli/lib/gap-reporter.js @@ -19,7 +19,11 @@ import { promises as fs } from 'fs'; import { join, resolve } from 'path'; import { execSync } from 'child_process'; import yaml from 'js-yaml'; -import { scoreSeverity, deriveCategoryLabel, sortBySeverity } from './gap-severity.js'; +import { + scoreSeverity, + deriveCategoryLabel, + sortBySeverity, +} from './gap-severity.js'; // ─── Spec delta computation ─────────────────────────────────────────────────── @@ -35,17 +39,25 @@ import { scoreSeverity, deriveCategoryLabel, sortBySeverity } from './gap-severi * @param {string} repoRoot - Absolute path to docs-v2 repo root * @returns {{ added: string[], modified: string[], removed: string[] }} */ -export function computeSpecDelta(specRelPath, fromRef, toRef = 'HEAD', repoRoot) { +export function computeSpecDelta( + specRelPath, + fromRef, + toRef = 'HEAD', + repoRoot +) { repoRoot = repoRoot || resolve(new URL('../../..', import.meta.url).pathname); let oldContent = ''; let newContent = ''; try { - oldContent = execSync(`git -C "${repoRoot}" show "${fromRef}:${specRelPath}"`, { - encoding: 'utf-8', - stdio: ['pipe', 'pipe', 'pipe'], - }); + oldContent = execSync( + `git -C "${repoRoot}" show "${fromRef}:${specRelPath}"`, + { + encoding: 'utf-8', + stdio: ['pipe', 'pipe', 'pipe'], + } + ); } catch { // File didn't exist at fromRef — all operations are 'added' } @@ -56,10 +68,13 @@ export function computeSpecDelta(specRelPath, fromRef, toRef = 'HEAD', repoRoot) encoding: 'utf-8', }); } else { - newContent = execSync(`git -C "${repoRoot}" show "${toRef}:${specRelPath}"`, { - encoding: 'utf-8', - stdio: ['pipe', 'pipe', 'pipe'], - }); + newContent = execSync( + `git -C "${repoRoot}" show "${toRef}:${specRelPath}"`, + { + encoding: 'utf-8', + stdio: ['pipe', 'pipe', 'pipe'], + } + ); } } catch { // File doesn't exist at toRef — all operations are 'removed' @@ -146,10 +161,12 @@ function suggestDocPaths(operationId, opInfo, confirmedMap, operationIdToPath) { if (coveredOp && coveredOp.path.startsWith(pathPrefix)) { for (const entry of entries) { // Convert content path to URL-style suggestion - const url = '/' + entry.docPath - .replace(/\/_index\.md$/, '/') - .replace(/\.md$/, '/') - .replace(/\/index\.md$/, '/'); + const url = + '/' + + entry.docPath + .replace(/\/_index\.md$/, '/') + .replace(/\.md$/, '/') + .replace(/\/index\.md$/, '/'); suggestions.add(url); } } @@ -173,8 +190,18 @@ function buildGapEntries({ mapResult, specDelta }) { const gaps = []; for (const [edition, data] of Object.entries(mapResult.editions)) { - const { uncovered, orphaned, confirmedMap, operationIdToPath, specVersion } = data; - const editionDelta = specDelta?.[edition] || { added: [], modified: [], removed: [] }; + const { + uncovered, + orphaned, + confirmedMap, + operationIdToPath, + specVersion, + } = data; + const editionDelta = specDelta?.[edition] || { + added: [], + modified: [], + removed: [], + }; // Uncovered operations for (const opId of uncovered) { @@ -189,12 +216,23 @@ function buildGapEntries({ mapResult, specDelta }) { let editionScope = edition; if (mapResult.editions.core && mapResult.editions.enterprise) { const inCore = mapResult.editions.core.operationIdToPath.has(opId); - const inEnterprise = mapResult.editions.enterprise.operationIdToPath.has(opId); - editionScope = inCore && inEnterprise ? 'both' : inCore ? 'core' : 'enterprise'; + const inEnterprise = + mapResult.editions.enterprise.operationIdToPath.has(opId); + editionScope = + inCore && inEnterprise ? 'both' : inCore ? 'core' : 'enterprise'; } - const { severity, rationale } = scoreSeverity(opInfo, editionScope, changeType); - const suggestedDocPaths = suggestDocPaths(opId, opInfo, confirmedMap, operationIdToPath); + const { severity, rationale } = scoreSeverity( + opInfo, + editionScope, + changeType + ); + const suggestedDocPaths = suggestDocPaths( + opId, + opInfo, + confirmedMap, + operationIdToPath + ); const category = deriveCategoryLabel(opInfo.path, opInfo.tags); gaps.push({ @@ -222,9 +260,14 @@ function buildGapEntries({ mapResult, specDelta }) { if (!opInfo) continue; const editionScope = edition; - const { severity, rationale } = scoreSeverity(opInfo, editionScope, 'modified'); + const { severity, rationale } = scoreSeverity( + opInfo, + editionScope, + 'modified' + ); const category = deriveCategoryLabel(opInfo.path, opInfo.tags); - const existingPages = confirmedMap.get(opId)?.map((e) => e.docPath) || []; + const existingPages = + confirmedMap.get(opId)?.map((e) => e.docPath) || []; gaps.push({ operationId: opId, @@ -240,7 +283,9 @@ function buildGapEntries({ mapResult, specDelta }) { severity, severityRationale: rationale, existingDocPages: existingPages, - suggestedDocPaths: existingPages.map((p) => '/' + p.replace(/\/_index\.md$/, '/').replace(/\.md$/, '/')), + suggestedDocPaths: existingPages.map( + (p) => '/' + p.replace(/\/_index\.md$/, '/').replace(/\.md$/, '/') + ), note: 'Existing docs may need updates for this changed endpoint.', }); } @@ -260,7 +305,8 @@ function buildGapEntries({ mapResult, specDelta }) { specVersion, changeType: 'removed', severity: 'medium', - severityRationale: 'Doc page references a removed endpoint; page needs update or removal', + severityRationale: + 'Doc page references a removed endpoint; page needs update or removal', existingDocPages: [ref.docPath], suggestedDocPaths: [], }); @@ -278,8 +324,16 @@ function buildGapEntries({ mapResult, specDelta }) { if (existing.editionScope !== gap.editionScope) { existing.editionScope = 'both'; // Re-score with wider scope - const opInfo = { path: existing.path, method: existing.method, tags: existing.tags }; - const { severity, rationale } = scoreSeverity(opInfo, 'both', existing.changeType); + const opInfo = { + path: existing.path, + method: existing.method, + tags: existing.tags, + }; + const { severity, rationale } = scoreSeverity( + opInfo, + 'both', + existing.changeType + ); existing.severity = severity; existing.severityRationale = rationale; } @@ -325,12 +379,15 @@ export async function generateGapReport({ specDelta = {}; const specFiles = { core: 'api-docs/influxdb3/core/v3/influxdb3-core-openapi.yaml', - enterprise: 'api-docs/influxdb3/enterprise/v3/influxdb3-enterprise-openapi.yaml', + enterprise: + 'api-docs/influxdb3/enterprise/v3/influxdb3-enterprise-openapi.yaml', }; for (const edition of Object.keys(mapResult.editions)) { if (specFiles[edition]) { - console.log(` Computing spec delta ${previousVersion}→${version} for ${edition}...`); + console.log( + ` Computing spec delta ${previousVersion}→${version} for ${edition}...` + ); try { specDelta[edition] = computeSpecDelta( specFiles[edition], @@ -343,7 +400,9 @@ export async function generateGapReport({ ` Delta: +${d.added.length} added, ~${d.modified.length} modified, -${d.removed.length} removed` ); } catch (err) { - console.warn(` ⚠️ Could not compute spec delta for ${edition}: ${err.message}`); + console.warn( + ` ⚠️ Could not compute spec delta for ${edition}: ${err.message}` + ); specDelta[edition] = { added: [], modified: [], removed: [] }; } } @@ -385,7 +444,8 @@ export async function generateGapReport({ async function writeMdSummary(report, outputPath) { const lines = []; - const titleEdition = report.product.charAt(0).toUpperCase() + report.product.slice(1); + const titleEdition = + report.product.charAt(0).toUpperCase() + report.product.slice(1); lines.push(`# Documentation Gap Report — InfluxDB 3 ${titleEdition}`); lines.push(''); @@ -420,7 +480,9 @@ async function writeMdSummary(report, outputPath) { if (sevGaps.length === 0) continue; const emoji = { critical: '🔴', high: '🟠', medium: '🟡', low: '🔵' }[sev]; - lines.push(`## ${emoji} ${sev.charAt(0).toUpperCase() + sev.slice(1)} Priority (${sevGaps.length})`); + lines.push( + `## ${emoji} ${sev.charAt(0).toUpperCase() + sev.slice(1)} Priority (${sevGaps.length})` + ); lines.push(''); for (const gap of sevGaps) { @@ -433,10 +495,14 @@ async function writeMdSummary(report, outputPath) { lines.push(`- **Rationale:** ${gap.severityRationale}`); if (gap.summary) lines.push(`- **Summary:** ${gap.summary}`); if (gap.existingDocPages.length > 0) { - lines.push(`- **Existing docs:** ${gap.existingDocPages.map((p) => `\`${p}\``).join(', ')}`); + lines.push( + `- **Existing docs:** ${gap.existingDocPages.map((p) => `\`${p}\``).join(', ')}` + ); } if (gap.suggestedDocPaths.length > 0) { - lines.push(`- **Suggested location:** ${gap.suggestedDocPaths.join(', ')}`); + lines.push( + `- **Suggested location:** ${gap.suggestedDocPaths.join(', ')}` + ); } if (gap.note) lines.push(`- **Note:** ${gap.note}`); lines.push(''); diff --git a/scripts/docs-cli/lib/gap-severity.js b/scripts/docs-cli/lib/gap-severity.js index d1d78ea714..5c166e8ebd 100644 --- a/scripts/docs-cli/lib/gap-severity.js +++ b/scripts/docs-cli/lib/gap-severity.js @@ -60,7 +60,11 @@ const PATH_TIER = [ { prefix: '/metrics', tier: 'low', cap: 'low' }, // Plugin test endpoints — internal / developer tooling - { prefix: '/api/v3/configure/processing_engine_trigger/test', tier: 'low', cap: 'low' }, + { + prefix: '/api/v3/configure/processing_engine_trigger/test', + tier: 'low', + cap: 'low', + }, ]; /** @@ -71,12 +75,12 @@ const TAG_TIER = { 'Query data': 'high', 'Manage databases': 'high', 'Manage tables': 'high', - 'Authenticate': 'medium', + Authenticate: 'medium', 'Manage tokens': 'medium', 'Cache data': 'medium', 'Processing engine': 'medium', 'Export data': 'medium', - 'System': 'low', + System: 'low', }; // Tier ordering for cap enforcement @@ -100,7 +104,8 @@ function capTier(tier, cap) { function baseTierFromPathAndTags(path, tags = []) { // Check path prefixes (most specific first) for (const entry of PATH_TIER) { - if (path.startsWith(entry.prefix)) return { tier: entry.tier, cap: entry.cap }; + if (path.startsWith(entry.prefix)) + return { tier: entry.tier, cap: entry.cap }; } // Fall back to tag-based lookup @@ -186,23 +191,44 @@ export function scoreSeverity(operation, editionScope, changeType) { * Derive a human-readable category label for an operation. */ export function deriveCategoryLabel(path, tags = []) { - if (path.startsWith('/write') || path.startsWith('/api/v2/write') || path.startsWith('/api/v3/write')) { + if ( + path.startsWith('/write') || + path.startsWith('/api/v2/write') || + path.startsWith('/api/v3/write') + ) { return 'Write data'; } - if (path.startsWith('/query') || path.startsWith('/api/v2/query') || path.startsWith('/api/v3/query')) { + if ( + path.startsWith('/query') || + path.startsWith('/api/v2/query') || + path.startsWith('/api/v3/query') + ) { return 'Query data'; } - if (path.startsWith('/api/v3/configure/database')) return 'Database management'; + if (path.startsWith('/api/v3/configure/database')) + return 'Database management'; if (path.startsWith('/api/v3/configure/table')) return 'Table management'; - if (path.startsWith('/api/v3/configure/token') || path.startsWith('/api/v3/configure/token')) return 'Token management'; - if (path.startsWith('/api/v3/configure/distinct_cache')) return 'Distinct Value Cache'; - if (path.startsWith('/api/v3/configure/last_cache')) return 'Last Value Cache'; - if (path.startsWith('/api/v3/configure/processing_engine_trigger')) return 'Processing engine triggers'; + if ( + path.startsWith('/api/v3/configure/token') || + path.startsWith('/api/v3/configure/token') + ) + return 'Token management'; + if (path.startsWith('/api/v3/configure/distinct_cache')) + return 'Distinct Value Cache'; + if (path.startsWith('/api/v3/configure/last_cache')) + return 'Last Value Cache'; + if (path.startsWith('/api/v3/configure/processing_engine_trigger')) + return 'Processing engine triggers'; if (path.startsWith('/api/v3/engine')) return 'Processing engine'; if (path.startsWith('/api/v3/export')) return 'Export data'; if (path.startsWith('/api/v3/enterprise')) return 'Enterprise administration'; if (path.startsWith('/api/v3/plugins')) return 'Plugin management'; - if (path.startsWith('/health') || path.startsWith('/ping') || path.startsWith('/metrics')) return 'System'; + if ( + path.startsWith('/health') || + path.startsWith('/ping') || + path.startsWith('/metrics') + ) + return 'System'; // Fall back to first tag if (tags.length > 0) return tags[0]; @@ -216,5 +242,7 @@ export function deriveCategoryLabel(path, tags = []) { */ export function sortBySeverity(gaps) { const order = { critical: 0, high: 1, medium: 2, low: 3 }; - return [...gaps].sort((a, b) => (order[a.severity] ?? 4) - (order[b.severity] ?? 4)); + return [...gaps].sort( + (a, b) => (order[a.severity] ?? 4) - (order[b.severity] ?? 4) + ); } diff --git a/scripts/docs-cli/lib/issue-creator.js b/scripts/docs-cli/lib/issue-creator.js index c522b603ab..4279ee8844 100644 --- a/scripts/docs-cli/lib/issue-creator.js +++ b/scripts/docs-cli/lib/issue-creator.js @@ -26,11 +26,31 @@ import { scoreSeverity, deriveCategoryLabel } from './gap-severity.js'; */ async function ensureLabels(dryRun) { const labelsToCreate = [ - { name: 'doc-gap', color: 'd93f0b', description: 'Documentation gap detected by automated pipeline' }, - { name: 'doc-gap:critical', color: 'b60205', description: 'Critical priority documentation gap' }, - { name: 'doc-gap:high', color: 'e4e669', description: 'High priority documentation gap' }, - { name: 'doc-gap:medium', color: '0075ca', description: 'Medium priority documentation gap' }, - { name: 'doc-gap:low', color: 'cfd3d7', description: 'Low priority documentation gap' }, + { + name: 'doc-gap', + color: 'd93f0b', + description: 'Documentation gap detected by automated pipeline', + }, + { + name: 'doc-gap:critical', + color: 'b60205', + description: 'Critical priority documentation gap', + }, + { + name: 'doc-gap:high', + color: 'e4e669', + description: 'High priority documentation gap', + }, + { + name: 'doc-gap:medium', + color: '0075ca', + description: 'Medium priority documentation gap', + }, + { + name: 'doc-gap:low', + color: 'cfd3d7', + description: 'Low priority documentation gap', + }, ]; if (dryRun) return; // Skip label creation in dry-run mode @@ -58,7 +78,9 @@ async function ensureLabels(dryRun) { * @returns {string} */ function buildIssueBody(gap, product, version) { - const severityEmoji = { critical: '🔴', high: '🟠', medium: '🟡', low: '🔵' }[gap.severity] || '⚪'; + const severityEmoji = + { critical: '🔴', high: '🟠', medium: '🟡', low: '🔵' }[gap.severity] || + '⚪'; const editionLabel = gap.editionScope === 'both' ? 'Core and Enterprise' @@ -79,10 +101,14 @@ function buildIssueBody(gap, product, version) { editionDoneItems.push('- [ ] Core behavior documented'); } if (gap.editionScope === 'both' || gap.editionScope === 'enterprise') { - editionDoneItems.push('- [ ] Enterprise-specific behavior documented (if any)'); + editionDoneItems.push( + '- [ ] Enterprise-specific behavior documented (if any)' + ); } if (gap.editionScope === 'both') { - editionDoneItems.push('- [ ] Superset relationship noted (Enterprise includes all Core changes)'); + editionDoneItems.push( + '- [ ] Superset relationship noted (Enterprise includes all Core changes)' + ); } const sections = [ @@ -177,12 +203,18 @@ export async function createGapIssues({ let editionScope = edition; if (mapResult.editions.core && mapResult.editions.enterprise) { const inCore = mapResult.editions.core.operationIdToPath.has(opId); - const inEnterprise = mapResult.editions.enterprise.operationIdToPath.has(opId); - editionScope = inCore && inEnterprise ? 'both' : inCore ? 'core' : 'enterprise'; + const inEnterprise = + mapResult.editions.enterprise.operationIdToPath.has(opId); + editionScope = + inCore && inEnterprise ? 'both' : inCore ? 'core' : 'enterprise'; } const changeType = 'existing'; // Without delta info here; gap-reporter handles delta scoring - const { severity, rationale } = scoreSeverity(opInfo, editionScope, changeType); + const { severity, rationale } = scoreSeverity( + opInfo, + editionScope, + changeType + ); if (!severities.includes(severity)) continue; @@ -216,7 +248,9 @@ export async function createGapIssues({ return; } - console.log(`\n📋 ${dryRun ? '[DRY RUN] Would create' : 'Creating'} ${deduped.length} issue(s) for ${severities.join('/')} gaps...`); + console.log( + `\n📋 ${dryRun ? '[DRY RUN] Would create' : 'Creating'} ${deduped.length} issue(s) for ${severities.join('/')} gaps...` + ); if (!dryRun) { await ensureLabels(false); @@ -226,11 +260,7 @@ export async function createGapIssues({ const title = `Doc gap [${gap.severity}]: ${gap.operationId} — ${product} v${version}`; const body = buildIssueBody(gap, product, version); - const labels = [ - 'documentation', - 'doc-gap', - `doc-gap:${gap.severity}`, - ]; + const labels = ['documentation', 'doc-gap', `doc-gap:${gap.severity}`]; if (gap.editionScope === 'both') { labels.push('product:influxdb3-core', 'product:influxdb3-enterprise'); @@ -261,7 +291,9 @@ export async function createGapIssues({ ); console.log(` ✓ Created (without product labels): ${title}`); } catch (err2) { - console.error(` ✗ Failed to create issue for ${gap.operationId}: ${err2.message}`); + console.error( + ` ✗ Failed to create issue for ${gap.operationId}: ${err2.message}` + ); } } } @@ -269,6 +301,8 @@ export async function createGapIssues({ if (dryRun) { console.log('\n' + '─'.repeat(72)); - console.log(`[DRY RUN] Would have created ${deduped.length} issue(s). Pass --create-issue without --dry-run to create them.`); + console.log( + `[DRY RUN] Would have created ${deduped.length} issue(s). Pass --create-issue without --dry-run to create them.` + ); } } From 77c6f073a4e89d88514361a22a6695acef313f08 Mon Sep 17 00:00:00 2001 From: Claude Date: Mon, 6 Apr 2026 11:52:05 +0000 Subject: [PATCH 3/4] fix(ci): address all PR #7058 review comments - gap-severity: reorder PATH_TIER so test endpoint prefix matches before its parent trigger prefix; remove duplicate /configure/token entry; collapse redundant token check in deriveCategoryLabel to single startsWith - issue-creator: replace execSync shell-string gh calls with execFileSync + temp body file to prevent shell injection from spec-derived field values; remove unused execFileSync import and replace with fs writeFileSync/unlinkSync - audit: add early validation that --create-issue requires --doc-location-map; wire --previous-version to computeSpecDelta so filterOperationIds is built from the spec delta and passed to runDocLocationMap (was silently ignored) - gap-reporter: replace execSync('cat ...') with readFileSync (cross-platform, no subprocess); add readFileSync to existing fs import - doc-location-map: remove never-populated orphanedRefs from scanContentFiles (orphan tracking is handled correctly by reconcile()) - influxdb3-release.yml: add create-documentation-pr to create-audit-issue needs so issue creation always runs after PR creation; fix inaccurate PR body claim that issues 'have been filed' before create-audit-issue has run https://claude.ai/code/session_01CpE2NxtgSre6spEHLrUw5M --- .github/workflows/influxdb3-release.yml | 4 +- scripts/docs-cli/commands/audit.js | 48 ++++++++++++++++++- scripts/docs-cli/lib/doc-location-map.js | 3 +- scripts/docs-cli/lib/gap-reporter.js | 6 +-- scripts/docs-cli/lib/gap-severity.js | 22 ++++----- scripts/docs-cli/lib/issue-creator.js | 59 ++++++++++++++++++++---- 6 files changed, 110 insertions(+), 32 deletions(-) diff --git a/.github/workflows/influxdb3-release.yml b/.github/workflows/influxdb3-release.yml index 81c3d2810f..1e0a8e36b7 100644 --- a/.github/workflows/influxdb3-release.yml +++ b/.github/workflows/influxdb3-release.yml @@ -438,7 +438,7 @@ jobs: ${{ needs.audit-api-documentation.outputs.gap_report_generated == 'true' && format('🔴 Critical: **{0}** | 🟠 High: **{1}**', needs.audit-api-documentation.outputs.critical_count, needs.audit-api-documentation.outputs.high_count) || 'Gap analysis did not run or was skipped.' }} See the [gap report artifact](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}) for the full list of uncovered operations with severity scores and suggested doc locations. - High/critical gaps have been automatically filed as individual GitHub issues. + High/critical gaps will be filed as individual GitHub issues by the `create-audit-issue` job (runs after this PR is created). ### Artifacts: - [Release Notes](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}) @@ -455,7 +455,7 @@ jobs: create-audit-issue: name: Create Audit Issue - needs: [audit-cli-documentation, audit-api-documentation] + needs: [audit-cli-documentation, audit-api-documentation, create-documentation-pr] runs-on: ubuntu-latest if: github.event.inputs.dry_run != 'true' && always() && (needs.audit-cli-documentation.result == 'success' || needs.audit-api-documentation.result == 'success') diff --git a/scripts/docs-cli/commands/audit.js b/scripts/docs-cli/commands/audit.js index 06ef69ec95..af573ee7a9 100644 --- a/scripts/docs-cli/commands/audit.js +++ b/scripts/docs-cli/commands/audit.js @@ -255,6 +255,17 @@ export default async function audit(args) { } } + // Validate --create-issue requires --doc-location-map + if (createIssueFlag && !runDocLocationMapFlag) { + console.error( + 'Error: --create-issue requires --doc-location-map to be set' + ); + console.error( + 'Example: docs audit --products influxdb3_core --doc-location-map --create-issue' + ); + process.exit(1); + } + // Validate mutual exclusion validateMutualExclusion({ products: productsInput, repos: reposInput }); @@ -495,12 +506,45 @@ export default async function audit(args) { const { runDocLocationMap, writeDocLocationMapReport } = await import('../lib/doc-location-map.js'); - const { generateGapReport } = await import('../lib/gap-reporter.js'); + const { generateGapReport, computeSpecDelta } = await import( + '../lib/gap-reporter.js' + ); const outputDir = join(__dirname, '..', '..', 'output', 'gap-reports'); + // When a previous version is given, limit the map to operations that + // changed between that version and HEAD so the report stays focused. + let filterOperationIds = null; + if (previousVersion) { + const SPEC_PATHS = { + core: 'api-docs/v3/openapi.json', + enterprise: 'api-docs/v3/enterprise/openapi.json', + }; + const targetEditions = + influxProduct === 'both' ? ['core', 'enterprise'] : [influxProduct]; + const deltaIds = new Set(); + for (const ed of targetEditions) { + const specPath = SPEC_PATHS[ed]; + if (specPath) { + try { + const delta = computeSpecDelta(specPath, previousVersion); + for (const id of [ + ...delta.added, + ...delta.modified, + ...delta.removed, + ]) { + deltaIds.add(id); + } + } catch { + // spec path may not exist for this edition; skip silently + } + } + } + if (deltaIds.size > 0) filterOperationIds = deltaIds; + } + const mapResult = await runDocLocationMap(influxProduct, { - ...(previousVersion && { previousVersion }), + ...(filterOperationIds && { filterOperationIds }), }); await writeDocLocationMapReport(mapResult, outputDir); diff --git a/scripts/docs-cli/lib/doc-location-map.js b/scripts/docs-cli/lib/doc-location-map.js index f3a3276d6a..61bd5c1ba8 100644 --- a/scripts/docs-cli/lib/doc-location-map.js +++ b/scripts/docs-cli/lib/doc-location-map.js @@ -305,7 +305,6 @@ async function scanContentFiles( filterOpIds ) { const results = new Map(); // operationId → [signal entries] - const orphanedRefs = []; // { docPath, operationId } where operationId not in spec for (const absFile of files) { const { frontmatter, body: stubBody } = await parseFrontmatter(absFile); @@ -351,7 +350,7 @@ async function scanContentFiles( } } - return { coverageMap: results, orphanedRefs }; + return { coverageMap: results }; } async function readSharedBody(absPath) { diff --git a/scripts/docs-cli/lib/gap-reporter.js b/scripts/docs-cli/lib/gap-reporter.js index 7cb50225fe..7fa2e748e6 100644 --- a/scripts/docs-cli/lib/gap-reporter.js +++ b/scripts/docs-cli/lib/gap-reporter.js @@ -15,7 +15,7 @@ * @module gap-reporter */ -import { promises as fs } from 'fs'; +import { promises as fs, readFileSync } from 'fs'; import { join, resolve } from 'path'; import { execSync } from 'child_process'; import yaml from 'js-yaml'; @@ -64,9 +64,7 @@ export function computeSpecDelta( try { if (toRef === 'HEAD') { - newContent = execSync(`cat "${join(repoRoot, specRelPath)}"`, { - encoding: 'utf-8', - }); + newContent = readFileSync(join(repoRoot, specRelPath), 'utf-8'); } else { newContent = execSync( `git -C "${repoRoot}" show "${toRef}:${specRelPath}"`, diff --git a/scripts/docs-cli/lib/gap-severity.js b/scripts/docs-cli/lib/gap-severity.js index 5c166e8ebd..d036ed4365 100644 --- a/scripts/docs-cli/lib/gap-severity.js +++ b/scripts/docs-cli/lib/gap-severity.js @@ -35,13 +35,19 @@ const PATH_TIER = [ // Token management { prefix: '/api/v3/configure/token', tier: 'medium' }, - { prefix: '/api/v3/configure/token', tier: 'medium' }, // Caching { prefix: '/api/v3/configure/distinct_cache', tier: 'medium' }, { prefix: '/api/v3/configure/last_cache', tier: 'medium' }, - // Processing engine / triggers + // Plugin test endpoints — must come before the general trigger prefix + { + prefix: '/api/v3/configure/processing_engine_trigger/test', + tier: 'low', + cap: 'low', + }, + + // Processing engine / triggers (general — after more specific test prefix above) { prefix: '/api/v3/configure/processing_engine_trigger', tier: 'medium' }, { prefix: '/api/v3/engine', tier: 'medium' }, @@ -59,12 +65,6 @@ const PATH_TIER = [ { prefix: '/ping', tier: 'low', cap: 'low' }, { prefix: '/metrics', tier: 'low', cap: 'low' }, - // Plugin test endpoints — internal / developer tooling - { - prefix: '/api/v3/configure/processing_engine_trigger/test', - tier: 'low', - cap: 'low', - }, ]; /** @@ -208,11 +208,7 @@ export function deriveCategoryLabel(path, tags = []) { if (path.startsWith('/api/v3/configure/database')) return 'Database management'; if (path.startsWith('/api/v3/configure/table')) return 'Table management'; - if ( - path.startsWith('/api/v3/configure/token') || - path.startsWith('/api/v3/configure/token') - ) - return 'Token management'; + if (path.startsWith('/api/v3/configure/token')) return 'Token management'; if (path.startsWith('/api/v3/configure/distinct_cache')) return 'Distinct Value Cache'; if (path.startsWith('/api/v3/configure/last_cache')) diff --git a/scripts/docs-cli/lib/issue-creator.js b/scripts/docs-cli/lib/issue-creator.js index 4279ee8844..eedc74722b 100644 --- a/scripts/docs-cli/lib/issue-creator.js +++ b/scripts/docs-cli/lib/issue-creator.js @@ -14,7 +14,10 @@ * @module issue-creator */ -import { execSync, execFileSync } from 'child_process'; +import { execFileSync } from 'child_process'; +import { writeFileSync, unlinkSync } from 'fs'; +import { tmpdir } from 'os'; +import { join } from 'path'; import { scoreSeverity, deriveCategoryLabel } from './gap-severity.js'; // ─── Label management ───────────────────────────────────────────────────────── @@ -57,8 +60,17 @@ async function ensureLabels(dryRun) { for (const label of labelsToCreate) { try { - execSync( - `gh label create "${label.name}" --color "${label.color}" --description "${label.description}" 2>/dev/null || true`, + execFileSync( + 'gh', + [ + 'label', + 'create', + label.name, + '--color', + label.color, + '--description', + label.description, + ], { stdio: 'pipe' } ); } catch { @@ -275,18 +287,41 @@ export async function createGapIssues({ console.log('─'.repeat(72)); console.log(body); } else { + const bodyFile = join(tmpdir(), `doc-gap-${gap.operationId}.md`); try { - const labelArgs = labels.map((l) => `--label "${l}"`).join(' '); - execSync( - `gh issue create --title "${title.replace(/"/g, '\\"')}" --body "${body.replace(/"/g, '\\"').replace(/\n/g, '\\n')}" ${labelArgs}`, + writeFileSync(bodyFile, body, 'utf-8'); + const labelFlags = labels.flatMap((l) => ['--label', l]); + execFileSync( + 'gh', + [ + 'issue', + 'create', + '--title', + title, + '--body-file', + bodyFile, + ...labelFlags, + ], { stdio: ['pipe', 'inherit', 'pipe'] } ); console.log(` ✓ Created: ${title}`); } catch (err) { - // If label doesn't exist in repo, retry without product labels + // If labels don't exist in repo, retry with only the core labels try { - execSync( - `gh issue create --title "${title.replace(/"/g, '\\"')}" --body "${body.replace(/"/g, '\\"').replace(/\n/g, '\\n')}" --label "documentation" --label "doc-gap"`, + execFileSync( + 'gh', + [ + 'issue', + 'create', + '--title', + title, + '--body-file', + bodyFile, + '--label', + 'documentation', + '--label', + 'doc-gap', + ], { stdio: ['pipe', 'inherit', 'pipe'] } ); console.log(` ✓ Created (without product labels): ${title}`); @@ -295,6 +330,12 @@ export async function createGapIssues({ ` ✗ Failed to create issue for ${gap.operationId}: ${err2.message}` ); } + } finally { + try { + unlinkSync(bodyFile); + } catch { + // ignore cleanup errors + } } } } From f698f3fdaa4242ab2cae5d4314d41afb84f1438f Mon Sep 17 00:00:00 2001 From: Claude Date: Mon, 6 Apr 2026 11:54:16 +0000 Subject: [PATCH 4/4] style: apply prettier formatting https://claude.ai/code/session_01CpE2NxtgSre6spEHLrUw5M --- scripts/docs-cli/commands/audit.js | 5 ++--- scripts/docs-cli/lib/gap-severity.js | 1 - 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/scripts/docs-cli/commands/audit.js b/scripts/docs-cli/commands/audit.js index af573ee7a9..aa4fdccb80 100644 --- a/scripts/docs-cli/commands/audit.js +++ b/scripts/docs-cli/commands/audit.js @@ -506,9 +506,8 @@ export default async function audit(args) { const { runDocLocationMap, writeDocLocationMapReport } = await import('../lib/doc-location-map.js'); - const { generateGapReport, computeSpecDelta } = await import( - '../lib/gap-reporter.js' - ); + const { generateGapReport, computeSpecDelta } = + await import('../lib/gap-reporter.js'); const outputDir = join(__dirname, '..', '..', 'output', 'gap-reports'); diff --git a/scripts/docs-cli/lib/gap-severity.js b/scripts/docs-cli/lib/gap-severity.js index d036ed4365..e40b0e025c 100644 --- a/scripts/docs-cli/lib/gap-severity.js +++ b/scripts/docs-cli/lib/gap-severity.js @@ -64,7 +64,6 @@ const PATH_TIER = [ { prefix: '/health', tier: 'low', cap: 'low' }, { prefix: '/ping', tier: 'low', cap: 'low' }, { prefix: '/metrics', tier: 'low', cap: 'low' }, - ]; /**