Skip to content

Commit b2f1fe8

Browse files
committed
Add cloning examples
1 parent cd70da0 commit b2f1fe8

File tree

4 files changed

+163
-0
lines changed

4 files changed

+163
-0
lines changed

civitai-clone/.beta9ignore

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
# Generated by Beta9 SDK
2+
.beta9ignore
3+
pyproject.toml
4+
.git
5+
.idea
6+
.python-version
7+
.vscode
8+
.venv
9+
venv
10+
__pycache__
11+
.DS_Store
12+
.config
13+
drive/MyDrive
14+
.coverage
15+
.pytest_cache
16+
.ipynb
17+
.ruff_cache
18+
.dockerignore
19+
.ipynb_checkpoints
20+
.env.local
21+
.envrc
22+
**/__pycache__/
23+
**/.pytest_cache/
24+
**/node_modules/
25+
**/.venv/
26+
*.pyc
27+
.next/
28+
.circleci

civitai-clone/app.py

Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
import os
2+
import requests
3+
import urllib.parse
4+
import json
5+
from beta9 import function, Volume, Image
6+
7+
@function(
8+
app="volume-imports",
9+
name="civitai-clone-model",
10+
secrets=["CIVITAI_API_KEY"],
11+
memory="4gb",
12+
image=Image(
13+
python_packages=["requests"]
14+
),
15+
volumes=[Volume(name="civitai_models", mount_path="/civitai_models")]
16+
)
17+
def handler(*, url: str):
18+
if not url:
19+
raise ValueError("url is required")
20+
21+
print(f"Downloading model from {url}")
22+
model_id = urllib.parse.urlparse(url).path.split("/")[2]
23+
print(f"Model ID: {model_id}")
24+
25+
try:
26+
response = requests.get(
27+
f"https://civitai.com/api/v1/models/{model_id}",
28+
headers={
29+
"Content-Type": "application/json",
30+
"Authorization": f"Bearer {os.getenv('CIVITAI_API_KEY')}"
31+
},
32+
)
33+
response.raise_for_status()
34+
data = response.json()
35+
36+
latest_version = data["modelVersions"][0]
37+
download_url = latest_version["downloadUrl"]
38+
print(f"Download URL: {download_url}")
39+
40+
save_path = f"/civitai_models/{data['name'].replace(' ', '_')}"
41+
with requests.get(download_url,
42+
headers={
43+
"Content-Type": "application/json",
44+
"Authorization": f"Bearer {os.getenv('CIVITAI_API_KEY')}"
45+
},
46+
stream=True) as response:
47+
response.raise_for_status()
48+
total_size = int(response.headers.get('content-length', 0))
49+
print(f"Total file size: {total_size / (1024*1024):.2f} MB")
50+
51+
with open(save_path, 'wb') as f:
52+
for chunk in response.iter_content(chunk_size=8192):
53+
if chunk:
54+
f.write(chunk)
55+
56+
print("Model downloaded successfully")
57+
return {
58+
"model_id": model_id,
59+
"downloaded_path": save_path
60+
}
61+
62+
except Exception as e:
63+
print(f"Failed to get model ID: {str(e)}")
64+
raise Exception(f"Failed to get model ID: {str(e)}")
65+
66+
if __name__ == "__main__":
67+
handler(url="https://civitai.com/models/1224788/prefect-illustrious-xl")

huggingface-clone/.beta9ignore

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
# Generated by Beta9 SDK
2+
.beta9ignore
3+
pyproject.toml
4+
.git
5+
.idea
6+
.python-version
7+
.vscode
8+
.venv
9+
venv
10+
__pycache__
11+
.DS_Store
12+
.config
13+
drive/MyDrive
14+
.coverage
15+
.pytest_cache
16+
.ipynb
17+
.ruff_cache
18+
.dockerignore
19+
.ipynb_checkpoints
20+
.env.local
21+
.envrc
22+
**/__pycache__/
23+
**/.pytest_cache/
24+
**/node_modules/
25+
**/.venv/
26+
*.pyc
27+
.next/
28+
.circleci

huggingface-clone/app.py

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
from beta9 import function, Volume, Image
2+
from transformers import AutoModelForCausalLM, AutoTokenizer
3+
4+
@function(
5+
app="volume-imports",
6+
name="huggingface-clone-model",
7+
secrets=["HUGGINGFACE_TOKEN"],
8+
memory="8gb",
9+
image=Image(
10+
python_packages=["torch","transformers"]
11+
),
12+
volumes=[Volume(name="huggingface_models", mount_path="/huggingface_models")]
13+
)
14+
def handler(*, model_name: str = ""):
15+
if not model_name:
16+
raise ValueError("model_name is required")
17+
18+
print(f"Downloading model: {model_name}")
19+
20+
try:
21+
# Download model and tokenizer
22+
model = AutoModelForCausalLM.from_pretrained(model_name)
23+
tokenizer = AutoTokenizer.from_pretrained(model_name)
24+
25+
# Save to local volume
26+
save_path = f"/huggingface_models/{model_name.replace('/', '_')}"
27+
model.save_pretrained(save_path)
28+
tokenizer.save_pretrained(save_path)
29+
print(f"Model and tokenizer saved to: {save_path}")
30+
31+
return {
32+
"model_name": model_name,
33+
"saved_path": save_path
34+
}
35+
except Exception as e:
36+
print(f"Failed to download model: {str(e)}")
37+
raise Exception(f"Failed to download model: {str(e)}")
38+
39+
if __name__ == "__main__":
40+
handler(model_name="distilbert/distilgpt2")

0 commit comments

Comments
 (0)