Skip to content

Commit cc1019d

Browse files
Story 2269: Enable Conditional S3 Storage for development (#2287)
Co-authored-by: Teodoro B. Mendes <teobmendes@gmail.com>
1 parent b62156a commit cc1019d

7 files changed

Lines changed: 257 additions & 193 deletions

File tree

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -210,3 +210,6 @@ test-logs.txt
210210
smoke-logs.txt
211211
extract_links.py.py
212212
extract_links.py
213+
214+
# Large static images
215+
static/static-large/*

README.md

Lines changed: 45 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -174,13 +174,56 @@ To work with mailinglist data locally, the django application expects to be
174174
able to query a copy of the hyperkitty database from HYPERKITTY_DATABASE_NAME.
175175
Then, `just manage sync_mailinglist_stats` management command can be run.
176176

177+
## Syncing Large Static Images
178+
179+
Large static images and other assets are stored in S3 buckets rather than in the repository. Use the `scripts/sync-large-static-images.sh` script to manage these files.
180+
181+
### Prerequisites
182+
183+
- `awscli` must be installed.
184+
- Configure a set of credentials in your `~/.aws/credentials` file with the profile name `sync-boost-images`:
185+
186+
```ini
187+
[sync-boost-images]
188+
aws_access_key_id = <your_key_id>
189+
aws_secret_access_key = <your_secret_key>
190+
```
191+
192+
### Usage
193+
194+
The script supports both uploading and downloading files.
195+
196+
#### Uploading
197+
198+
To upload files from your local directory (`static/static-large/`) to the default S3 bucket:
199+
200+
```shell
201+
$ just up_sync_images
202+
```
203+
204+
To upload to all S3 buckets:
205+
206+
```shell
207+
$ just up_sync_images_all_buckets
208+
```
209+
210+
#### Downloading
211+
212+
To download missing or outdated static items from the staging bucket to your local directory:
213+
214+
```shell
215+
$ just down_sync_images
216+
```
217+
218+
---
219+
177220
## Deploying
178221

179-
TDB
222+
TBD
180223

181224
## Production Environment Considerations
182225

183-
TDB
226+
TBD
184227

185228
---
186229

core/templatetags/custom_static.py

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
from django import template
2+
from django.conf import settings
3+
4+
register = template.Library()
5+
6+
7+
@register.simple_tag
8+
def large_static(file_path: str):
9+
"""
10+
Automatically handles serving large static content based on development vs. production.
11+
12+
Serves specified content from the local large static file if LOCAL_DEVELOPMENT is True, else serves
13+
from the static content in S3.
14+
"""
15+
# Strip any leading slashes to avoid footguns
16+
file_path = file_path.lstrip("/")
17+
18+
# Strip any trailing slashes to avoid assumptions
19+
static_url = settings.STATIC_URL.rstrip("/")
20+
static_aws_endpoint = settings.STATIC_CONTENT_AWS_S3_ENDPOINT_URL.rstrip("/")
21+
22+
if settings.LOCAL_DEVELOPMENT:
23+
return f"{static_url}/static-large/{file_path}"
24+
else:
25+
return f"{static_aws_endpoint}/{settings.STATIC_CONTENT_BUCKET_NAME}/static/{file_path}"

justfile

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ COMPOSE_FILE := "docker-compose.yml"
33
ENV_FILE := ".env"
44
DJANGO_VERSION := "5.2"
55

6+
67
@_default:
78
just --list
89

@@ -180,3 +181,13 @@ alias shell := console
180181

181182
@manage +args:
182183
docker compose run --rm web python manage.py {{ args }}
184+
185+
# Static File Management
186+
@down_sync_images:
187+
scripts/sync-large-static-images.sh --down-sync;
188+
189+
@up_sync_images:
190+
scripts/sync-large-static-images.sh --up-sync;
191+
192+
@up_sync_images_all_buckets:
193+
scripts/sync-large-static-images.sh --up-sync --all-buckets;
Lines changed: 163 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,163 @@
1+
#!/usr/bin/env bash
2+
# sync-large-static-images.sh — Upload local images to S3 buckets
3+
4+
set -euo pipefail
5+
6+
DEFAULT_BUCKET="stage.boost.org.v2"
7+
S3_BUCKETS="boost.org.v2 boost.org-cppal-dev-v2 ${DEFAULT_BUCKET}"
8+
AWS_PROFILE='sync-boost-images'
9+
DEST_PATH="/static/"
10+
11+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
12+
SOURCE_DIR="$(dirname "$SCRIPT_DIR")/static/static-large/"
13+
14+
# ─────────────────────────────────────────────
15+
# PARSE COMMAND-LINE OPTIONS
16+
# ─────────────────────────────────────────────
17+
usage() {
18+
cat <<EOF
19+
Usage: $0 [OPTION]
20+
21+
Upload or download image files and other static assets to/from the website S3 buckets.
22+
23+
Options:
24+
--up-sync Upload files from the default source dir (${SOURCE_DIR}) to S3 buckets.
25+
--down-sync Download files from S3 stage bucket to local static directory (${SOURCE_DIR}).
26+
--all-buckets When used with --up-sync, upload to all buckets instead of the default bucket.
27+
--help Display this help and exit.
28+
29+
Configuration:
30+
The default destination for upload is ${DEST_PATH}.
31+
In your .aws/credentials file, add a set of credentials:
32+
33+
[${AWS_PROFILE}]
34+
aws_access_key_id = <your_key_id>
35+
aws_secret_access_key = <your_secret_key>
36+
EOF
37+
}
38+
39+
validate_dependencies() {
40+
if ! command -v aws &>/dev/null; then
41+
echo "awscli is required. Please install it from https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html"
42+
exit 1
43+
fi
44+
}
45+
46+
upload_images() {
47+
local all_buckets="${1:-false}"
48+
# ─────────────────────────────────────────────
49+
# CHECK FOR AWS CLI
50+
# ─────────────────────────────────────────────
51+
validate_dependencies
52+
53+
echo ""
54+
echo "Source files found in $SOURCE_DIR:"
55+
ls -1 "$SOURCE_DIR"
56+
57+
echo "Destination path: $DEST_PATH"
58+
59+
# ─────────────────────────────────────────────
60+
# UPLOAD TO BUCKETS
61+
# ─────────────────────────────────────────────
62+
UPLOAD_FAILED=0
63+
64+
BUCKETS_TO_UPLOAD="$DEFAULT_BUCKET"
65+
if [[ "$all_buckets" == "true" ]]; then
66+
BUCKETS_TO_UPLOAD="$S3_BUCKETS"
67+
fi
68+
69+
for bucket in $BUCKETS_TO_UPLOAD; do
70+
S3_DEST="s3://${bucket}${DEST_PATH}"
71+
echo ""
72+
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
73+
echo "Uploading to: $S3_DEST"
74+
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
75+
76+
set +e
77+
aws s3 sync --profile "${AWS_PROFILE}" "$SOURCE_DIR" "$S3_DEST" 2>&1
78+
EXIT_CODE=$?
79+
set -e
80+
81+
if [[ $EXIT_CODE -ne 0 ]]; then
82+
echo ""
83+
echo "The upload failed for bucket: $bucket (exit code $EXIT_CODE)"
84+
echo " Source: $SOURCE_DIR"
85+
echo " Destination: $S3_DEST"
86+
UPLOAD_FAILED=1
87+
else
88+
echo ""
89+
echo "✓ Upload succeeded for bucket: $bucket"
90+
fi
91+
done
92+
93+
# ─────────────────────────────────────────────
94+
# POST-UPLOAD SUMMARY
95+
# ─────────────────────────────────────────────
96+
echo ""
97+
if [[ $UPLOAD_FAILED -ne 0 ]]; then
98+
echo "One or more uploads failed. Please review the output above for details."
99+
echo " Source folder: $SOURCE_DIR"
100+
echo " Destination: $DEST_PATH"
101+
echo " Buckets: $S3_BUCKETS"
102+
exit 1
103+
fi
104+
105+
echo ""
106+
echo "Done."
107+
}
108+
109+
download_images() {
110+
validate_dependencies
111+
112+
if echo "${S3_BUCKETS}" | grep -q -w "${DEFAULT_BUCKET}"; then
113+
aws s3 sync "s3://${DEFAULT_BUCKET}/static/" "${SOURCE_DIR}" --profile "${AWS_PROFILE}";
114+
echo "All missing or outdated static items synced.";
115+
else
116+
echo "Bucket name invalid: ${DEFAULT_BUCKET}";
117+
exit 1;
118+
fi
119+
}
120+
121+
122+
123+
ALL_BUCKETS=false
124+
UPLOAD_COMMAND=false
125+
DOWNLOAD_COMMAND=false
126+
while [[ $# -gt 0 ]]; do
127+
case "$1" in
128+
--help)
129+
usage
130+
exit 0
131+
;;
132+
--all-buckets)
133+
ALL_BUCKETS=true
134+
;;
135+
--up-sync)
136+
UPLOAD_COMMAND=true
137+
;;
138+
--down-sync)
139+
DOWNLOAD_COMMAND=true
140+
;;
141+
*)
142+
echo "Unknown option: $1"
143+
usage
144+
exit 1
145+
;;
146+
esac
147+
shift
148+
done
149+
150+
if [ "${UPLOAD_COMMAND}" == true ] && [ "${DOWNLOAD_COMMAND}" == true ]; then
151+
echo "Uploading and downloading must be done separately. Exiting.";
152+
exit 1;
153+
elif [ "${UPLOAD_COMMAND}" == true ]; then
154+
upload_images "$ALL_BUCKETS";
155+
exit $?;
156+
elif [ "${DOWNLOAD_COMMAND}" == true ]; then
157+
download_images;
158+
exit $?;
159+
else
160+
echo "Unknown command";
161+
usage;
162+
exit 1;
163+
fi

0 commit comments

Comments
 (0)