|
| 1 | +#!/usr/bin/env bash |
| 2 | + |
| 3 | +# Consolidated prerequisite checking script |
| 4 | +# |
| 5 | +# This script provides unified prerequisite checking for Spec-Driven Development workflow. |
| 6 | +# It replaces the functionality previously spread across multiple scripts. |
| 7 | +# |
| 8 | +# Usage: ./check-prerequisites.sh [OPTIONS] |
| 9 | +# |
| 10 | +# OPTIONS: |
| 11 | +# --json Output in JSON format |
| 12 | +# --require-tasks Require tasks.md to exist (for implementation phase) |
| 13 | +# --include-tasks Include tasks.md in AVAILABLE_DOCS list |
| 14 | +# --paths-only Only output path variables (no validation) |
| 15 | +# --help, -h Show help message |
| 16 | +# |
| 17 | +# OUTPUTS: |
| 18 | +# JSON mode: {"FEATURE_DIR":"...", "AVAILABLE_DOCS":["..."]} |
| 19 | +# Text mode: FEATURE_DIR:... \n AVAILABLE_DOCS: \n ✓/✗ file.md |
| 20 | +# Paths only: REPO_ROOT: ... \n BRANCH: ... \n FEATURE_DIR: ... etc. |
| 21 | + |
| 22 | +set -e |
| 23 | + |
| 24 | +# Parse command line arguments |
| 25 | +JSON_MODE=false |
| 26 | +REQUIRE_TASKS=false |
| 27 | +INCLUDE_TASKS=false |
| 28 | +PATHS_ONLY=false |
| 29 | + |
| 30 | +for arg in "$@"; do |
| 31 | + case "$arg" in |
| 32 | + --json) |
| 33 | + JSON_MODE=true |
| 34 | + ;; |
| 35 | + --require-tasks) |
| 36 | + REQUIRE_TASKS=true |
| 37 | + ;; |
| 38 | + --include-tasks) |
| 39 | + INCLUDE_TASKS=true |
| 40 | + ;; |
| 41 | + --paths-only) |
| 42 | + PATHS_ONLY=true |
| 43 | + ;; |
| 44 | + --help|-h) |
| 45 | + cat << 'EOF' |
| 46 | +Usage: check-prerequisites.sh [OPTIONS] |
| 47 | +
|
| 48 | +Consolidated prerequisite checking for Spec-Driven Development workflow. |
| 49 | +
|
| 50 | +OPTIONS: |
| 51 | + --json Output in JSON format |
| 52 | + --require-tasks Require tasks.md to exist (for implementation phase) |
| 53 | + --include-tasks Include tasks.md in AVAILABLE_DOCS list |
| 54 | + --paths-only Only output path variables (no prerequisite validation) |
| 55 | + --help, -h Show this help message |
| 56 | +
|
| 57 | +EXAMPLES: |
| 58 | + # Check task prerequisites (plan.md required) |
| 59 | + ./check-prerequisites.sh --json |
| 60 | + |
| 61 | + # Check implementation prerequisites (plan.md required, tasks.md always required) |
| 62 | + ./check-prerequisites.sh --json --require-tasks --include-tasks |
| 63 | + |
| 64 | + # Get feature paths only (no validation) |
| 65 | + ./check-prerequisites.sh --paths-only |
| 66 | + |
| 67 | +EOF |
| 68 | + exit 0 |
| 69 | + ;; |
| 70 | + *) |
| 71 | + echo "ERROR: Unknown option '$arg'. Use --help for usage information." >&2 |
| 72 | + exit 1 |
| 73 | + ;; |
| 74 | + esac |
| 75 | +done |
| 76 | + |
| 77 | +# Source common functions |
| 78 | +SCRIPT_DIR="$(CDPATH="" cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" |
| 79 | +source "$SCRIPT_DIR/common.sh" |
| 80 | + |
| 81 | +# Extract risk entries from a markdown file's Risk Register section |
| 82 | +extract_risks() { |
| 83 | + local file="$1" |
| 84 | + if [[ ! -f "$file" ]]; then |
| 85 | + echo "[]" |
| 86 | + return |
| 87 | + fi |
| 88 | + |
| 89 | + python3 - "$file" <<'PY' |
| 90 | +import json |
| 91 | +import re |
| 92 | +import sys |
| 93 | +from pathlib import Path |
| 94 | +
|
| 95 | +path = Path(sys.argv[1]) |
| 96 | +pattern = re.compile(r"^-\s*RISK:\s*(.+)$", re.IGNORECASE) |
| 97 | +risks = [] |
| 98 | +
|
| 99 | +def normalize_severity(value): |
| 100 | + """Normalize severity/impact to standard levels.""" |
| 101 | + if not value: |
| 102 | + return "Medium" |
| 103 | + value = value.lower().strip() |
| 104 | + if value in ["critical", "crit", "high", "hi"]: |
| 105 | + return "Critical" if value.startswith("crit") else "High" |
| 106 | + elif value in ["medium", "med"]: |
| 107 | + return "Medium" |
| 108 | + elif value in ["low", "lo"]: |
| 109 | + return "Low" |
| 110 | + else: |
| 111 | + # Try to map numeric or other values |
| 112 | + return "Medium" |
| 113 | +
|
| 114 | +for line in path.read_text().splitlines(): |
| 115 | + match = pattern.match(line.strip()) |
| 116 | + if not match: |
| 117 | + continue |
| 118 | +
|
| 119 | + parts = [p.strip() for p in match.group(1).split("|") if p.strip()] |
| 120 | + data = {} |
| 121 | +
|
| 122 | + if parts and ":" not in parts[0]: |
| 123 | + data["id"] = parts[0] |
| 124 | + parts = parts[1:] |
| 125 | +
|
| 126 | + for part in parts: |
| 127 | + if ":" not in part: |
| 128 | + continue |
| 129 | + key, value = part.split(":", 1) |
| 130 | + key = key.strip() |
| 131 | + value = value.strip() |
| 132 | + normalized = key.lower().replace(" ", "_") |
| 133 | + if normalized == "risk": |
| 134 | + data["id"] = value |
| 135 | + else: |
| 136 | + data[normalized] = value |
| 137 | +
|
| 138 | + if data: |
| 139 | + if "id" not in data: |
| 140 | + data["id"] = f"missing-id-{len(risks)+1}" |
| 141 | + # Normalize severity from impact or severity field |
| 142 | + severity = data.get("severity") or data.get("impact") |
| 143 | + data["severity"] = normalize_severity(severity) |
| 144 | + risks.append(data) |
| 145 | +
|
| 146 | +print(json.dumps(risks, ensure_ascii=False)) |
| 147 | +PY |
| 148 | +} |
| 149 | + |
| 150 | +# Get feature paths and validate branch |
| 151 | +_paths_output=$(get_feature_paths) || { echo "ERROR: Failed to resolve feature paths" >&2; exit 1; } |
| 152 | +eval "$_paths_output" |
| 153 | +unset _paths_output |
| 154 | +check_feature_branch "$CURRENT_BRANCH" "$HAS_GIT" || exit 1 |
| 155 | + |
| 156 | +# If paths-only mode, output paths and exit (support JSON + paths-only combined) |
| 157 | +if $PATHS_ONLY; then |
| 158 | + if $JSON_MODE; then |
| 159 | + # Minimal JSON paths payload (no validation performed) |
| 160 | + if has_jq; then |
| 161 | + jq -cn \ |
| 162 | + --arg repo_root "$REPO_ROOT" \ |
| 163 | + --arg branch "$CURRENT_BRANCH" \ |
| 164 | + --arg feature_dir "$FEATURE_DIR" \ |
| 165 | + --arg feature_spec "$FEATURE_SPEC" \ |
| 166 | + --arg impl_plan "$IMPL_PLAN" \ |
| 167 | + --arg tasks "$TASKS" \ |
| 168 | + --arg constitution "$CONSTITUTION" \ |
| 169 | + --arg ad "$AD" \ |
| 170 | + '{REPO_ROOT:$repo_root,BRANCH:$branch,FEATURE_DIR:$feature_dir,FEATURE_SPEC:$feature_spec,IMPL_PLAN:$impl_plan,TASKS:$tasks,CONSTITUTION:$constitution,AD:$ad}' |
| 171 | + else |
| 172 | + printf '{"REPO_ROOT":"%s","BRANCH":"%s","FEATURE_DIR":"%s","FEATURE_SPEC":"%s","IMPL_PLAN":"%s","TASKS":"%s","CONSTITUTION":"%s","AD":"%s"}\n' \ |
| 173 | + "$(json_escape "$REPO_ROOT")" "$(json_escape "$CURRENT_BRANCH")" "$(json_escape "$FEATURE_DIR")" "$(json_escape "$FEATURE_SPEC")" "$(json_escape "$IMPL_PLAN")" "$(json_escape "$TASKS")" "$(json_escape "$CONSTITUTION")" "$(json_escape "$AD")" |
| 174 | + fi |
| 175 | + else |
| 176 | + echo "REPO_ROOT: $REPO_ROOT" |
| 177 | + echo "BRANCH: $CURRENT_BRANCH" |
| 178 | + echo "FEATURE_DIR: $FEATURE_DIR" |
| 179 | + echo "FEATURE_SPEC: $FEATURE_SPEC" |
| 180 | + echo "IMPL_PLAN: $IMPL_PLAN" |
| 181 | + echo "TASKS: $TASKS" |
| 182 | + echo "CONSTITUTION: $CONSTITUTION" |
| 183 | + echo "AD: $AD" |
| 184 | + fi |
| 185 | + exit 0 |
| 186 | +fi |
| 187 | + |
| 188 | +# Validate required directories and files |
| 189 | +if [[ ! -d "$FEATURE_DIR" ]]; then |
| 190 | + echo "ERROR: Feature directory not found: $FEATURE_DIR" >&2 |
| 191 | + echo "Run /spec.specify first to create the feature structure." >&2 |
| 192 | + exit 1 |
| 193 | +fi |
| 194 | + |
| 195 | +# Check for plan.md (required) |
| 196 | +if [[ ! -f "$IMPL_PLAN" ]]; then |
| 197 | + echo "ERROR: plan.md not found in $FEATURE_DIR" >&2 |
| 198 | + echo "Run /spec.plan first to create the implementation plan." >&2 |
| 199 | + exit 1 |
| 200 | +fi |
| 201 | + |
| 202 | +# Check for tasks.md if required |
| 203 | +if $REQUIRE_TASKS && [[ ! -f "$TASKS" ]]; then |
| 204 | + echo "ERROR: tasks.md not found in $FEATURE_DIR" >&2 |
| 205 | + echo "Run /spec.tasks first to create the task list." >&2 |
| 206 | + exit 1 |
| 207 | +fi |
| 208 | + |
| 209 | +# Build list of available documents |
| 210 | +docs=() |
| 211 | + |
| 212 | +# Always check these optional docs |
| 213 | +[[ -f "$RESEARCH" ]] && docs+=("research.md") |
| 214 | +[[ -f "$DATA_MODEL" ]] && docs+=("data-model.md") |
| 215 | + |
| 216 | +# Check contracts directory (only if it exists and has files) |
| 217 | +if [[ -d "$CONTRACTS_DIR" ]] && [[ -n "$(ls -A "$CONTRACTS_DIR" 2>/dev/null)" ]]; then |
| 218 | + docs+=("contracts/") |
| 219 | +fi |
| 220 | + |
| 221 | +[[ -f "$QUICKSTART" ]] && docs+=("quickstart.md") |
| 222 | + |
| 223 | +# Include tasks.md if requested and it exists |
| 224 | +if $INCLUDE_TASKS && [[ -f "$TASKS" ]]; then |
| 225 | + docs+=("tasks.md") |
| 226 | +fi |
| 227 | + |
| 228 | +# Output results |
| 229 | +if $JSON_MODE; then |
| 230 | + # Build JSON array of documents |
| 231 | + if has_jq; then |
| 232 | + if [[ ${#docs[@]} -eq 0 ]]; then |
| 233 | + json_docs="[]" |
| 234 | + else |
| 235 | + json_docs=$(printf '%s\n' "${docs[@]}" | jq -R . | jq -s .) |
| 236 | + fi |
| 237 | + jq -cn \ |
| 238 | + --arg feature_dir "$FEATURE_DIR" \ |
| 239 | + --argjson docs "$json_docs" \ |
| 240 | + '{FEATURE_DIR:$feature_dir,AVAILABLE_DOCS:$docs}' |
| 241 | + else |
| 242 | + if [[ ${#docs[@]} -eq 0 ]]; then |
| 243 | + json_docs="[]" |
| 244 | + else |
| 245 | + json_docs=$(printf '"%s",' "${docs[@]}") |
| 246 | + json_docs="[${json_docs%,}]" |
| 247 | + fi |
| 248 | + printf '{"FEATURE_DIR":"%s","AVAILABLE_DOCS":%s}\n' "$(json_escape "$FEATURE_DIR")" "$json_docs" |
| 249 | + fi |
| 250 | + |
| 251 | + SPEC_RISKS=$(extract_risks "$FEATURE_SPEC") |
| 252 | + PLAN_RISKS=$(extract_risks "$IMPL_PLAN") |
| 253 | + |
| 254 | + # Check for constitution and architecture (optional governance documents) |
| 255 | + CONSTITUTION_EXISTS="false" |
| 256 | + AD_EXISTS="false" |
| 257 | + CONSTITUTION_RULES="[]" |
| 258 | + AD_VIEWS="{}" |
| 259 | + AD_DIAGRAMS="[]" |
| 260 | + |
| 261 | + if [[ -f "$CONSTITUTION" ]]; then |
| 262 | + CONSTITUTION_EXISTS="true" |
| 263 | + CONSTITUTION_RULES=$(extract_constitution_rules "$CONSTITUTION") |
| 264 | + fi |
| 265 | + |
| 266 | + if [[ -f "$AD" ]]; then |
| 267 | + AD_EXISTS="true" |
| 268 | + AD_VIEWS=$(extract_architecture_views "$AD") |
| 269 | + AD_DIAGRAMS=$(extract_architecture_diagrams "$AD") |
| 270 | + fi |
| 271 | + |
| 272 | + printf '{"FEATURE_DIR":"%s","AVAILABLE_DOCS":%s,"SPEC_RISKS":%s,"PLAN_RISKS":%s,"CONSTITUTION":"%s","CONSTITUTION_EXISTS":%s,"CONSTITUTION_RULES":%s,"AD":"%s","AD_EXISTS":%s,"AD_VIEWS":%s,"AD_DIAGRAMS":%s}\n' \ |
| 273 | + "$(json_escape "$FEATURE_DIR")" "$json_docs" "$SPEC_RISKS" "$PLAN_RISKS" \ |
| 274 | + "$(json_escape "$CONSTITUTION")" "$CONSTITUTION_EXISTS" "$CONSTITUTION_RULES" \ |
| 275 | + "$(json_escape "$AD")" "$AD_EXISTS" "$AD_VIEWS" "$AD_DIAGRAMS" |
| 276 | +else |
| 277 | + # Text output |
| 278 | + echo "FEATURE_DIR:$FEATURE_DIR" |
| 279 | + echo "AVAILABLE_DOCS:" |
| 280 | + |
| 281 | + # Show status of each potential document |
| 282 | + check_file "$RESEARCH" "research.md" |
| 283 | + check_file "$DATA_MODEL" "data-model.md" |
| 284 | + check_dir "$CONTRACTS_DIR" "contracts/" |
| 285 | + check_file "$QUICKSTART" "quickstart.md" |
| 286 | + |
| 287 | + if $INCLUDE_TASKS; then |
| 288 | + check_file "$TASKS" "tasks.md" |
| 289 | + fi |
| 290 | + |
| 291 | + spec_risks_count=$(extract_risks "$FEATURE_SPEC" | python3 - <<'PY' |
| 292 | +import json, sys |
| 293 | +try: |
| 294 | + data = json.load(sys.stdin) |
| 295 | +except json.JSONDecodeError: |
| 296 | + data = [] |
| 297 | +print(len(data)) |
| 298 | +PY |
| 299 | + ) |
| 300 | + plan_risks_count=$(extract_risks "$IMPL_PLAN" | python3 - <<'PY' |
| 301 | +import json, sys |
| 302 | +try: |
| 303 | + data = json.load(sys.stdin) |
| 304 | +except json.JSONDecodeError: |
| 305 | + data = [] |
| 306 | +print(len(data)) |
| 307 | +PY |
| 308 | + ) |
| 309 | + |
| 310 | + echo "SPEC_RISKS: $spec_risks_count" |
| 311 | + echo "PLAN_RISKS: $plan_risks_count" |
| 312 | + |
| 313 | + # Show governance document status |
| 314 | + echo "" |
| 315 | + echo "GOVERNANCE DOCUMENTS:" |
| 316 | + check_file "$CONSTITUTION" "constitution.md (optional)" |
| 317 | +fi |
0 commit comments