Skip to content

Commit ec12083

Browse files
committed
chore(tests): refactor root_cache with default True; pass bucket name
1 parent f886ea0 commit ec12083

5 files changed

Lines changed: 50 additions & 30 deletions

File tree

tests/integration/lib.py

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -169,7 +169,7 @@ def getMinio(id="disposable"):
169169
print("Reusing existing global minio")
170170
aws_access_key_id = os.getenv("AWS_ACCESS_KEY_ID")
171171
aws_secret_access_key = os.getenv("AWS_SECRET_ACCESS_KEY")
172-
172+
aws_s3_default_bucket = AWS_S3_DEFAULT_BUCKET
173173
s3 = boto3.client(
174174
"s3",
175175
endpoint_url=endpoint_url,
@@ -187,6 +187,7 @@ def getMinio(id="disposable"):
187187
"endpoint_url": endpoint_url,
188188
"aws_access_key_id": aws_access_key_id,
189189
"aws_secret_access_key": aws_secret_access_key,
190+
"aws_s3_default_bucket": aws_s3_default_bucket,
190191
"s3": s3,
191192
}
192193
_minioCache.update({id: result})
@@ -222,6 +223,7 @@ def onstop():
222223

223224
aws_access_key_id = "minioadmin"
224225
aws_secret_access_key = "minioadmin"
226+
aws_s3_default_bucket = AWS_S3_DEFAULT_BUCKET
225227
s3 = boto3.client(
226228
"s3",
227229
endpoint_url=endpoint_url,
@@ -241,6 +243,7 @@ def onstop():
241243
"endpoint_url": endpoint_url,
242244
"aws_access_key_id": aws_access_key_id,
243245
"aws_secret_access_key": aws_secret_access_key,
246+
"aws_s3_default_bucket": aws_s3_default_bucket,
244247
"s3": s3,
245248
}
246249
_minioCache.update({id: result})
@@ -251,7 +254,13 @@ def onstop():
251254

252255

253256
def getDDA(
254-
minio=None, command=None, environment={}, stream_logs=False, wait=True, **kwargs
257+
minio=None,
258+
command=None,
259+
environment={},
260+
stream_logs=False,
261+
wait=True,
262+
root_cache=True,
263+
**kwargs,
255264
):
256265
global _ddaCache
257266
if _ddaCache:
@@ -278,7 +287,7 @@ def getDDA(
278287
"AWS_ACCESS_KEY_ID": minio.aws_access_key_id,
279288
"AWS_SECRET_ACCESS_KEY": minio.aws_secret_access_key,
280289
"AWS_DEFAULT_REGION": "",
281-
"AWS_S3_DEFAULT_BUCKET": AWS_S3_DEFAULT_BUCKET,
290+
"AWS_S3_DEFAULT_BUCKET": minio.aws_s3_default_bucket,
282291
"AWS_S3_ENDPOINT_URL": minio.endpoint_url,
283292
}
284293
)
@@ -287,13 +296,16 @@ def onstop():
287296
global _ddaCache
288297
_ddaCache = None
289298

299+
HOME = os.getenv("HOME")
300+
290301
container, stop = startContainer(
291302
"gadicc/diffusers-api:test",
292303
command,
293304
stream_logs=stream_logs,
294305
ports={8000: port},
295306
device_requests=[docker.types.DeviceRequest(count=-1, capabilities=[["gpu"]])],
296307
environment=environment,
308+
volumes=root_cache and [f"{HOME}/root-cache:/root/.cache"],
297309
onstop=onstop,
298310
**kwargs,
299311
)

tests/integration/test_build_download.py

Lines changed: 32 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -2,33 +2,39 @@
22
from .lib import getMinio, getDDA
33
from test import runTest
44

5+
56
def test_cloudcache_build_download():
67
"""
78
Download a model from cloud-cache at build time (no HuggingFace)
89
"""
910
minio = getMinio()
1011
print(minio)
1112
environment = {
12-
"RUNTIME_DOWNLOADS": 0,
13-
"MODEL_ID": "stabilityai/stable-diffusion-2-1-base",
14-
"MODEL_PRECISION": "fp16",
15-
"MODEL_REVISION": "fp16",
16-
"MODEL_URL": "s3://", # <--
13+
"RUNTIME_DOWNLOADS": 0,
14+
"MODEL_ID": "stabilityai/stable-diffusion-2-1-base",
15+
"MODEL_PRECISION": "fp16",
16+
"MODEL_REVISION": "fp16",
17+
"MODEL_URL": "s3://", # <--
1718
}
18-
conda="conda run --no-capture-output -n xformers"
19+
conda = "conda run --no-capture-output -n xformers"
1920
dda = getDDA(
20-
minio=minio,
21-
stream_logs=True,
22-
environment=environment,
23-
command=["sh", "-c", f"{conda} python3 -u download.py && ls -l && {conda} python3 -u server.py"],
21+
minio=minio,
22+
stream_logs=True,
23+
environment=environment,
24+
root_cache=False,
25+
command=[
26+
"sh",
27+
"-c",
28+
f"{conda} python3 -u download.py && ls -l && {conda} python3 -u server.py",
29+
],
2430
)
2531
print(dda)
2632
assert dda.container.status == "running"
2733

2834
## bucket.objects.all().delete()
2935
result = runTest(
3036
"txt2img",
31-
{ "test_url": dda.url },
37+
{"test_url": dda.url},
3238
{
3339
"MODEL_ID": "stabilityai/stable-diffusion-2-1-base",
3440
},
@@ -48,30 +54,35 @@ def test_huggingface_build_download():
4854
uploads if missing.
4955
"""
5056
environment = {
51-
"RUNTIME_DOWNLOADS": 0,
52-
"MODEL_ID": "stabilityai/stable-diffusion-2-1-base",
53-
"MODEL_PRECISION": "fp16",
54-
"MODEL_REVISION": "fp16",
57+
"RUNTIME_DOWNLOADS": 0,
58+
"MODEL_ID": "stabilityai/stable-diffusion-2-1-base",
59+
"MODEL_PRECISION": "fp16",
60+
"MODEL_REVISION": "fp16",
5561
}
56-
conda="conda run --no-capture-output -n xformers"
62+
conda = "conda run --no-capture-output -n xformers"
5763
dda = getDDA(
58-
stream_logs=True,
59-
environment=environment,
60-
command=["sh", "-c", f"{conda} python3 -u download.py && ls -l && {conda} python3 -u server.py"],
64+
stream_logs=True,
65+
environment=environment,
66+
root_cache=False,
67+
command=[
68+
"sh",
69+
"-c",
70+
f"{conda} python3 -u download.py && ls -l && {conda} python3 -u server.py",
71+
],
6172
)
6273
print(dda)
6374
assert dda.container.status == "running"
6475

6576
## bucket.objects.all().delete()
6677
result = runTest(
6778
"txt2img",
68-
{ "test_url": dda.url },
79+
{"test_url": dda.url},
6980
{
7081
"MODEL_ID": "stabilityai/stable-diffusion-2-1-base",
7182
# "MODEL_ID": "hf-internal-testing/tiny-stable-diffusion-pipe",
7283
"MODEL_PRECISION": "fp16",
7384
"MODEL_REVISION": "fp16",
74-
"MODEL_URL": "", # <-- no model_url, i.e. no cloud cache
85+
"MODEL_URL": "", # <-- no model_url, i.e. no cloud cache
7586
},
7687
{"num_inference_steps": 1},
7788
)

tests/integration/test_cloud_cache.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,20 +2,21 @@
22
from .lib import getMinio, getDDA
33
from test import runTest
44

5+
56
def test_cloud_cache_create_and_upload():
67
"""
78
Check if model exists in cloud cache bucket download otherwise, save
89
with safetensors, and upload model.tar.zst to bucket
910
"""
1011
minio = getMinio()
1112
print(minio)
12-
dda = getDDA(minio=minio, stream_logs=True)
13+
dda = getDDA(minio=minio, stream_logs=True, root_cache=False)
1314
print(dda)
1415

1516
## bucket.objects.all().delete()
1617
result = runTest(
1718
"txt2img",
18-
{ "test_url": dda.url },
19+
{"test_url": dda.url},
1920
{
2021
"MODEL_ID": "stabilityai/stable-diffusion-2-1-base",
2122
# "MODEL_ID": "hf-internal-testing/tiny-stable-diffusion-pipe",

tests/integration/test_general.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,11 +23,9 @@ def setup_class(self):
2323
print("setup_class")
2424
# self.minio = minio = getMinio()
2525

26-
HOME = os.getenv("HOME")
2726
self.dda = dda = getDDA(
2827
# minio=minio
2928
# stream_logs=True,
30-
volumes=[f"{HOME}/root-cache:/root/.cache"],
3129
)
3230
print(dda)
3331

tests/integration/test_memory.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,9 @@ def test_memory():
99
Make sure when switching models we release VRAM afterwards.
1010
"""
1111
minio = getMinio("global")
12-
HOME = os.getenv("HOME")
1312
dda = getDDA(
1413
minio=minio,
1514
stream_logs=True,
16-
volumes=[f"{HOME}/root-cache:/root/.cache"],
1715
)
1816
print(dda)
1917

0 commit comments

Comments
 (0)