|
| 1 | +import json |
| 2 | +import time |
| 3 | +from datetime import datetime |
| 4 | + |
| 5 | +import requests |
| 6 | + |
| 7 | +# ── CONFIG ──────────────────────────────────────────────────────────────────── |
| 8 | +URLS = [ |
| 9 | + "https://github.com/aboutcode-org/vulnerablecode/issues/17", |
| 10 | +] |
| 11 | + |
| 12 | +DELAY_SECONDS = 5 # pause between requests to avoid rate-limiting |
| 13 | +LOG_FILE = "archive_log.json" |
| 14 | +# ───────────────────────────────────────────────────────────────────────────── |
| 15 | + |
| 16 | +SPN_ENDPOINT = "https://web.archive.org/save/" |
| 17 | + |
| 18 | + |
| 19 | +def save_url(url: str) -> dict: |
| 20 | + """Submit a single URL to the Wayback Machine.""" |
| 21 | + try: |
| 22 | + response = requests.post( |
| 23 | + SPN_ENDPOINT, |
| 24 | + data={"url": url}, |
| 25 | + headers={"User-Agent": "ArchiveBot/1.0"}, |
| 26 | + timeout=30, |
| 27 | + ) |
| 28 | + |
| 29 | + if response.status_code == 200: |
| 30 | + # Archive.org returns the archived URL in the Content-Location header |
| 31 | + location = response.headers.get("Content-Location", "") |
| 32 | + archived_url = f"https://web.archive.org{location}" if location else "check manually" |
| 33 | + return {"url": url, "status": "success", "archived_url": archived_url} |
| 34 | + |
| 35 | + else: |
| 36 | + return { |
| 37 | + "url": url, |
| 38 | + "status": "failed", |
| 39 | + "http_code": response.status_code, |
| 40 | + "reason": response.text[:200], |
| 41 | + } |
| 42 | + |
| 43 | + except requests.exceptions.Timeout: |
| 44 | + return {"url": url, "status": "error", "reason": "Request timed out"} |
| 45 | + except requests.exceptions.RequestException as e: |
| 46 | + return {"url": url, "status": "error", "reason": str(e)} |
| 47 | + |
| 48 | + |
| 49 | +def archive_all(urls: list[str]) -> list[dict]: |
| 50 | + results = [] |
| 51 | + total = len(urls) |
| 52 | + |
| 53 | + print(f"Starting archive of {total} URL(s)...\n") |
| 54 | + |
| 55 | + for i, url in enumerate(urls, start=1): |
| 56 | + print(f"[{i}/{total}] Submitting: {url}") |
| 57 | + result = save_url(url) |
| 58 | + result["timestamp"] = datetime.utcnow().isoformat() |
| 59 | + results.append(result) |
| 60 | + |
| 61 | + if result["status"] == "success": |
| 62 | + print(f" ✓ Archived → {result['archived_url']}") |
| 63 | + else: |
| 64 | + print(f" ✗ {result.get('reason') or result.get('http_code')}") |
| 65 | + |
| 66 | + if i < total: |
| 67 | + time.sleep(DELAY_SECONDS) |
| 68 | + |
| 69 | + return results |
| 70 | + |
| 71 | + |
| 72 | +def save_log(results: list[dict], path: str) -> None: |
| 73 | + with open(path, "w") as f: |
| 74 | + json.dump(results, f, indent=2) |
| 75 | + print(f"\nLog saved to {path}") |
| 76 | + |
| 77 | + |
| 78 | +def print_summary(results: list[dict]) -> None: |
| 79 | + success = sum(1 for r in results if r["status"] == "success") |
| 80 | + failed = len(results) - success |
| 81 | + print(f"\n── Summary ──────────────────────") |
| 82 | + print(f" Total : {len(results)}") |
| 83 | + print(f" Success : {success}") |
| 84 | + print(f" Failed : {failed}") |
| 85 | + print(f"─────────────────────────────────") |
| 86 | + |
| 87 | + |
| 88 | +if __name__ == "__main__": |
| 89 | + results = archive_all(URLS) |
| 90 | + print_summary(results) |
| 91 | + save_log(results, LOG_FILE) |
0 commit comments