Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
141 changes: 141 additions & 0 deletions claude/hooks/akm-hook.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ AUTO_FEEDBACK="${AKM_AUTO_FEEDBACK:-1}"
AUTO_MEMORY="${AKM_AUTO_MEMORY:-1}"
AUTO_SETUP="${AKM_AUTO_SETUP:-1}"
INDEX_ON_SESSION_END="${AKM_INDEX_ON_SESSION_END:-0}"
LLM_PROXY_SHIM_PATH="$STATE_DIR/akm-llm-proxy.sh"
CURATED_PROMPT_HEADER="# AKM stash — assets relevant to this prompt"
CURATED_SESSION_HEADER="# AKM stash — assets relevant to this session"
CURATED_CONTEXT_TAIL="Tip: call \`akm show <ref>\` to fetch full content, and record \`akm feedback <ref> --positive|--negative\` once you know whether the asset helped."
Expand Down Expand Up @@ -257,6 +258,138 @@ run_index_on_session_end() {
return 0
}

# Returns 0 when akm has a top-level llm block configured, 1 when it does not.
# On any error (akm missing, parse failure) returns 0 (fail-open) to avoid
# incorrectly installing the proxy shim.
has_akm_llm_configured() {
akm_available || return 0
raw="$(akm_run --format json -q config get llm 2>/dev/null || printf '')"
raw="$(printf '%s' "$raw" | tr -d ' \t\n\r')"
[ -n "$raw" ] || return 1
case "$raw" in
"null"|"{}"|"[]"|"") return 1 ;;
esac
return 0
}

# Writes the LLM proxy shim to LLM_PROXY_SHIM_PATH and exports
# AKM_LLM_PROXY_CMD so all subsequent akm calls in this hook process can use
# the harness Claude connection for index passes.
# Provider credentials are never stored — the shim inherits ANTHROPIC_API_KEY
# and OPENAI_API_KEY from the Claude Code environment.
# Idempotent: if the shim already exists it is not rewritten.
install_llm_proxy_shim() {
mkdir -p "$STATE_DIR" 2>/dev/null || return 0

# Only rewrite the shim when it is missing or empty.
if [ ! -s "$LLM_PROXY_SHIM_PATH" ]; then
cat > "$LLM_PROXY_SHIM_PATH" <<'SHIM_EOF'
#!/usr/bin/env sh
# AKM LLM proxy shim — generated by akm-claude plugin
# Lends the Claude Code harness provider connection to akm when no akm.llm is
# configured. Contract: reads JSON {"prompt":"...","system":"...","model":"..."}
# from stdin, writes completion text to stdout.
# Exit 0 on success; non-zero causes akm index passes to degrade to no-op.
# Provider credentials are never stored here — the shim inherits them from the
# Claude Code process environment.
set -eu

INPUT="$(cat)"
PROMPT="$(printf '%s' "$INPUT" | python3 -c '
import json, sys
try:
d = json.loads(sys.stdin.read() or "{}")
print(d.get("prompt") or "")
except Exception:
print("")
' 2>/dev/null || printf '')"
[ -n "$PROMPT" ] || exit 1

# Prefer the claude CLI when available — uses existing session auth.
if command -v claude >/dev/null 2>&1; then
printf '%s\n' "$PROMPT" | claude -p --output-format text 2>/dev/null && exit 0
fi

# Anthropic REST API fallback
if [ -n "${ANTHROPIC_API_KEY:-}" ]; then
_AKM_P="$PROMPT" _AKM_S="$(printf '%s' "$INPUT" | python3 -c '
import json, sys
try:
d = json.loads(sys.stdin.read() or "{}")
print(d.get("system") or "")
except Exception:
print("")
' 2>/dev/null || printf '')" \
_AKM_M="${AKM_LLM_PROXY_MODEL:-claude-haiku-4-5}" \
python3 -c '
import json, os, sys, urllib.request
p = os.environ.get("_AKM_P", "")
s = os.environ.get("_AKM_S", "")
m = os.environ.get("_AKM_M", "claude-haiku-4-5")
k = os.environ["ANTHROPIC_API_KEY"]
body = {"model": m, "max_tokens": 4096, "messages": [{"role": "user", "content": p}]}
if s:
body["system"] = s
req = urllib.request.Request(
"https://api.anthropic.com/v1/messages",
data=json.dumps(body).encode(),
headers={"x-api-key": k, "anthropic-version": "2023-06-01", "content-type": "application/json"})
try:
with urllib.request.urlopen(req, timeout=120) as r:
result = json.load(r)
text = (result.get("content") or [{}])[0].get("text") or ""
print(text)
except Exception:
sys.exit(1)
' && exit 0
fi

# OpenAI REST API fallback
if [ -n "${OPENAI_API_KEY:-}" ]; then
_AKM_P="$PROMPT" _AKM_S="$(printf '%s' "$INPUT" | python3 -c '
import json, sys
try:
d = json.loads(sys.stdin.read() or "{}")
print(d.get("system") or "")
except Exception:
print("")
' 2>/dev/null || printf '')" \
_AKM_M="${AKM_LLM_PROXY_MODEL:-gpt-4o-mini}" \
python3 -c '
import json, os, sys, urllib.request
p = os.environ.get("_AKM_P", "")
s = os.environ.get("_AKM_S", "")
m = os.environ.get("_AKM_M", "gpt-4o-mini")
k = os.environ["OPENAI_API_KEY"]
msgs = []
if s:
msgs.append({"role": "system", "content": s})
msgs.append({"role": "user", "content": p})
body = {"model": m, "messages": msgs}
req = urllib.request.Request(
"https://api.openai.com/v1/chat/completions",
data=json.dumps(body).encode(),
headers={"Authorization": "Bearer " + k, "content-type": "application/json"})
try:
with urllib.request.urlopen(req, timeout=120) as r:
result = json.load(r)
text = ((result.get("choices") or [{}])[0].get("message") or {}).get("content") or ""
print(text)
except Exception:
sys.exit(1)
' && exit 0
fi

# No supported provider available — degrade to no-op.
exit 1
SHIM_EOF
chmod +x "$LLM_PROXY_SHIM_PATH" 2>/dev/null || true
fi

# Advertise the shim path to akm (and any child processes) for this hook run.
export AKM_LLM_PROXY_CMD="$LLM_PROXY_SHIM_PATH"
}

build_run_scope_args() {
sid="$1"
if [ -n "$sid" ]; then
Expand Down Expand Up @@ -586,6 +719,14 @@ session_start() {

akm_available || exit 0

# If akm has no llm block configured, install the proxy shim so index passes
# can borrow the harness provider connection. This is best-effort and never
# blocks session start. AKM_LLM_PROXY_CMD is exported for the duration of
# this hook process so all subsequent akm calls inherit it.
if ! has_akm_llm_configured; then
install_llm_proxy_shim
fi

# Keep the index warm in the background — never block session start.
( akm_run index >/dev/null & ) 2>/dev/null || true

Expand Down
Loading