Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 0 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,6 @@ Convert Claude Code session files (JSON or JSONL) to clean, mobile-friendly HTML

Read [A new way to extract detailed transcripts from Claude Code](https://simonwillison.net/2025/Dec/25/claude-code-transcripts/) for background on this project.

> [!WARNING]
>
> The `web` commands for both listing Claude Code for web sessions and converting those to a transcript are both broken right now due to changes to the unofficial and undocumented APIs that these commands were using. See [issue #77](https://github.com/simonw/claude-code-transcripts/issues/77) for details.

## Installation

Install this tool using `uv`:
Expand Down
90 changes: 80 additions & 10 deletions src/claude_code_transcripts/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -568,29 +568,99 @@ def get_api_headers(token, org_uuid):
def fetch_sessions(token, org_uuid):
"""Fetch list of sessions from the API.

Returns the sessions data as a dict.
Uses the Sessions API (GET /v1/sessions) with the required beta header.
Returns the sessions data as a dict with 'data' key containing session list.
Raises httpx.HTTPError on network/API errors.
"""
headers = get_api_headers(token, org_uuid)
headers["anthropic-beta"] = "ccr-byoc-2025-07-29"
response = httpx.get(f"{API_BASE_URL}/sessions", headers=headers, timeout=30.0)
response.raise_for_status()
return response.json()


def fetch_session(token, org_uuid, session_id):
"""Fetch a specific session from the API.
"""Fetch a specific session's transcript from the API.

Returns the session data as a dict.
Uses two endpoints:
1. GET /v1/sessions/{id} for session metadata (with beta header)
2. GET /v1/code/sessions/{id}/teleport-events for the transcript (paginated)

Falls back to the legacy /v1/session_ingress/session/{id} endpoint if
teleport-events returns 404 (migration period).

Returns the session data as a dict with 'loglines' key.
Raises httpx.HTTPError on network/API errors.
"""
headers = get_api_headers(token, org_uuid)
response = httpx.get(
f"{API_BASE_URL}/session_ingress/session/{session_id}",
headers=headers,
timeout=60.0,
)
response.raise_for_status()
return response.json()
headers["anthropic-beta"] = "ccr-byoc-2025-07-29"

# First try the new teleport-events endpoint (paginated)
loglines = _fetch_teleport_events(token, org_uuid, session_id)

if loglines is None:
# Fall back to legacy session_ingress endpoint
legacy_headers = get_api_headers(token, org_uuid)
response = httpx.get(
f"{API_BASE_URL}/session_ingress/session/{session_id}",
headers=legacy_headers,
timeout=60.0,
)
response.raise_for_status()
return response.json()

return {"loglines": loglines}


def _fetch_teleport_events(token, org_uuid, session_id):
"""Fetch transcript entries via GET /v1/code/sessions/{id}/teleport-events.

This is the CCR v2 endpoint that replaced session_ingress.
Returns a list of logline dicts, or None if the endpoint is unavailable.
"""
headers = get_api_headers(token, org_uuid)
headers["anthropic-beta"] = "ccr-byoc-2025-07-29"
base_url = f"{API_BASE_URL}/code/sessions/{session_id}/teleport-events"

all_entries = []
cursor = None
max_pages = 100

for page in range(max_pages):
params = {"limit": 1000}
if cursor is not None:
params["cursor"] = cursor

response = httpx.get(
base_url,
headers=headers,
params=params,
timeout=30.0,
)

if response.status_code == 404:
# Endpoint not available or session not found — signal fallback
if page == 0:
return None
else:
# Mid-pagination 404 means session deleted between pages
return all_entries

response.raise_for_status()
data = response.json()

events = data.get("data", [])
for ev in events:
payload = ev.get("payload")
if payload is not None:
all_entries.append(payload)

next_cursor = data.get("next_cursor")
if next_cursor is None:
break
cursor = next_cursor

return all_entries


def detect_github_repo(loglines):
Expand Down
36 changes: 27 additions & 9 deletions tests/test_generate_html.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,24 @@ def output_dir():
yield Path(tmpdir)


def _make_teleport_events_response(session_data):
"""Convert a session_data dict (with 'loglines') to teleport-events API response format."""
loglines = session_data.get("loglines", [])
return {
"data": [
{
"event_id": f"evt_{i}",
"event_type": entry.get("type", "unknown"),
"is_compaction": False,
"payload": entry,
"created_at": entry.get("timestamp", ""),
}
for i, entry in enumerate(loglines)
],
"next_cursor": None,
}


class TestGenerateHtml:
"""Tests for the main generate_html function."""

Expand Down Expand Up @@ -889,8 +907,8 @@ def test_import_json_saves_session_data(self, httpx_mock, output_dir):
session_data = json.load(f)

httpx_mock.add_response(
url="https://api.anthropic.com/v1/session_ingress/session/test-session-id",
json=session_data,
url="https://api.anthropic.com/v1/code/sessions/test-session-id/teleport-events?limit=1000",
json=_make_teleport_events_response(session_data),
)

runner = CliRunner()
Expand Down Expand Up @@ -918,7 +936,7 @@ def test_import_json_saves_session_data(self, httpx_mock, output_dir):
# Verify JSON content is valid
with open(json_file) as f:
saved_data = json.load(f)
assert saved_data == session_data
assert saved_data == {"loglines": session_data["loglines"]}


class TestImportGistOption:
Expand All @@ -936,8 +954,8 @@ def test_import_gist_creates_gist(self, httpx_mock, monkeypatch, tmp_path):
session_data = json.load(f)

httpx_mock.add_response(
url="https://api.anthropic.com/v1/session_ingress/session/test-session-id",
json=session_data,
url="https://api.anthropic.com/v1/code/sessions/test-session-id/teleport-events?limit=1000",
json=_make_teleport_events_response(session_data),
)

# Mock subprocess.run for gh gist create
Expand Down Expand Up @@ -1049,8 +1067,8 @@ def test_import_open_calls_webbrowser(self, httpx_mock, output_dir, monkeypatch)
session_data = json.load(f)

httpx_mock.add_response(
url="https://api.anthropic.com/v1/session_ingress/session/test-session-id",
json=session_data,
url="https://api.anthropic.com/v1/code/sessions/test-session-id/teleport-events?limit=1000",
json=_make_teleport_events_response(session_data),
)

# Track webbrowser.open calls
Expand Down Expand Up @@ -1512,8 +1530,8 @@ def test_web_output_auto_creates_subdirectory(self, httpx_mock, tmp_path):
session_data = json.load(f)

httpx_mock.add_response(
url="https://api.anthropic.com/v1/session_ingress/session/my-web-session-id",
json=session_data,
url="https://api.anthropic.com/v1/code/sessions/my-web-session-id/teleport-events?limit=1000",
json=_make_teleport_events_response(session_data),
)

runner = CliRunner()
Expand Down