-
Notifications
You must be signed in to change notification settings - Fork 1
155 lines (138 loc) · 5.91 KB
/
html-export-workflow.yml
File metadata and controls
155 lines (138 loc) · 5.91 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
name: Exportera till HTML-format och ladda upp till Cloudflare R2
on:
workflow_dispatch: # Tillåter manuell körning
inputs:
source_ref:
description: 'Git ref att bygga från'
required: false
default: 'main'
filter:
description: 'Filtrera filer efter år (YYYY) eller specifik beteckning (YYYY:NNN). Kommaseparerad lista.'
required: false
type: string
workflow_call: # Tillåter anrop från andra workflows
inputs:
source_ref:
required: false
type: string
default: 'main'
filter:
required: false
type: string
permissions:
contents: read
jobs:
html-export:
runs-on: ubuntu-latest
environment: Test
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
ref: ${{ inputs.source_ref || 'main' }}
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Get JSON source files (from git or R2)
run: |
# Try to use JSON files from git first
if [ -d "data/sfs_json" ] && [ -n "$(ls -A data/sfs_json 2>/dev/null)" ]; then
echo "✅ Found $(find data/sfs_json -name '*.json' | wc -l) JSON files in git"
echo "Using JSON files from git checkout"
else
echo "⚠️ No JSON files in git, downloading from Cloudflare R2..."
# Configure AWS CLI for R2
aws configure set aws_access_key_id ${{ secrets.CLOUDFLARE_R2_ACCESS_KEY_ID }}
aws configure set aws_secret_access_key ${{ secrets.CLOUDFLARE_R2_SECRET_ACCESS_KEY }}
aws configure set region us-east-1
aws configure set output json
# Download all JSON files from R2
mkdir -p data/sfs_json
aws s3 sync s3://${{ secrets.CLOUDFLARE_R2_BUCKET_NAME }}/sfs_json/ data/sfs_json/ \
--endpoint-url https://${{ secrets.CLOUDFLARE_R2_ACCOUNT_ID }}.r2.cloudflarestorage.com \
--exclude "*" \
--include "*.json"
# Verify download
if [ ! -d "data/sfs_json" ] || [ -z "$(ls -A data/sfs_json)" ]; then
echo "::error::Failed to download JSON files from R2"
exit 1
fi
echo "✅ Downloaded $(find data/sfs_json -name '*.json' | wc -l) JSON files from R2"
fi
env:
AWS_DEFAULT_REGION: us-east-1
- name: Generate HTML export
run: |
if [ -n "${{ inputs.filter }}" ]; then
python sfs_processor.py --input data/sfs_json --output output/html --formats html --filter "${{ inputs.filter }}"
else
python sfs_processor.py --input data/sfs_json --output output/html --formats html
fi
env:
PYTHONPATH: ${{ github.workspace }}
- name: Regenerate index pages for HTML export
run: |
python exporters/html/populate_index_pages.py --input data/sfs_json --output index.html --limit 30
python exporters/html/populate_index_pages.py --input data/sfs_json --output latest.html --limit 10
env:
PYTHONPATH: ${{ github.workspace }}
- name: Check required secrets
run: |
if [ -z "${{ secrets.CLOUDFLARE_R2_ACCESS_KEY_ID }}" ]; then
echo "Error: CLOUDFLARE_R2_ACCESS_KEY_ID secret is not set"
exit 1
fi
if [ -z "${{ secrets.CLOUDFLARE_R2_SECRET_ACCESS_KEY }}" ]; then
echo "Error: CLOUDFLARE_R2_SECRET_ACCESS_KEY secret is not set"
exit 1
fi
if [ -z "${{ secrets.CLOUDFLARE_R2_BUCKET_NAME }}" ]; then
echo "Error: CLOUDFLARE_R2_BUCKET_NAME secret is not set"
exit 1
fi
if [ -z "${{ secrets.CLOUDFLARE_R2_ACCOUNT_ID }}" ]; then
echo "Error: CLOUDFLARE_R2_ACCOUNT_ID secret is not set"
exit 1
fi
echo "All required secrets are configured"
- name: Configure AWS CLI for Cloudflare R2
run: |
aws configure set aws_access_key_id ${{ secrets.CLOUDFLARE_R2_ACCESS_KEY_ID }}
aws configure set aws_secret_access_key ${{ secrets.CLOUDFLARE_R2_SECRET_ACCESS_KEY }}
aws configure set region us-east-1
aws configure set output json
- name: Upload HTML folder to Cloudflare R2
run: |
aws s3 sync output/html/ s3://${{ secrets.CLOUDFLARE_R2_BUCKET_NAME }}/sfs/ \
--endpoint-url https://${{ secrets.CLOUDFLARE_R2_ACCOUNT_ID }}.r2.cloudflarestorage.com \
--delete \
--cache-control "public, max-age=3600" \
--content-type "text/html" \
--exclude "*.md" \
--include "*.html"
env:
AWS_DEFAULT_REGION: us-east-1
- name: Upload index pages to Cloudflare R2
run: |
aws s3 cp index.html s3://${{ secrets.CLOUDFLARE_R2_BUCKET_NAME }}/index.html \
--endpoint-url https://${{ secrets.CLOUDFLARE_R2_ACCOUNT_ID }}.r2.cloudflarestorage.com \
--cache-control "public, max-age=1800" \
--content-type "text/html"
aws s3 cp latest.html s3://${{ secrets.CLOUDFLARE_R2_BUCKET_NAME }}/latest.html \
--endpoint-url https://${{ secrets.CLOUDFLARE_R2_ACCOUNT_ID }}.r2.cloudflarestorage.com \
--cache-control "public, max-age=1800" \
--content-type "text/html"
env:
AWS_DEFAULT_REGION: us-east-1
- name: Upload summary
run: |
echo "HTML export completed at $(date)" > upload-summary.txt
echo "Files uploaded to Cloudflare R2 bucket: ${{ secrets.CLOUDFLARE_R2_BUCKET_NAME }}/sfs/" >> upload-summary.txt
echo "Index pages uploaded: index.html (30 senaste), latest.html (10 senaste)" >> upload-summary.txt
aws s3 cp upload-summary.txt s3://${{ secrets.CLOUDFLARE_R2_BUCKET_NAME }}/sfs/last-update.txt \
--endpoint-url https://${{ secrets.CLOUDFLARE_R2_ACCOUNT_ID }}.r2.cloudflarestorage.com