11from beam import function , Volume , Image
2- from transformers import AutoModelForCausalLM , AutoTokenizer
2+ from huggingface_hub import snapshot_download
3+ import os
34
45@function (
56 app = "volume-imports" ,
67 name = "huggingface-clone-model" ,
7- secrets = ["HUGGINGFACE_TOKEN" ],
8+ secrets = ["HUGGINGFACE_TOKEN" , "HF_TOKEN" ],
89 memory = "8gb" ,
10+ gpu = "T4" ,
911 image = Image (
10- python_packages = ["torch" ,"transformers " ]
12+ python_packages = ["torch" , "huggingface_hub " ]
1113 ),
1214 volumes = [Volume (name = "huggingface_models" , mount_path = "/huggingface_models" )]
1315)
@@ -16,25 +18,21 @@ def handler(*, model_name: str = ""):
1618 raise ValueError ("model_name is required" )
1719
1820 print (f"Downloading model: { model_name } " )
21+ token = os .getenv ("HUGGINGFACE_TOKEN" ) or os .getenv ("HF_TOKEN" )
1922
2023 try :
21- # Download model and tokenizer
22- model = AutoModelForCausalLM .from_pretrained (model_name )
23- tokenizer = AutoTokenizer .from_pretrained (model_name )
24+ os .makedirs (f"/huggingface_models/{ model_name } " , exist_ok = True )
2425
25- # Save to local volume
26- save_path = f"/huggingface_models/{ model_name .replace ('/' , '_' )} "
27- model .save_pretrained (save_path )
28- tokenizer .save_pretrained (save_path )
29- print (f"Model and tokenizer saved to: { save_path } " )
26+ path = snapshot_download (repo_id = model_name , local_dir = f"/huggingface_models/{ model_name } " , token = token )
27+ print (f"Model downloaded to: { path } " )
3028
3129 return {
3230 "model_name" : model_name ,
33- "saved_path" : save_path
31+ "saved_path" : path
3432 }
3533 except Exception as e :
3634 print (f"Failed to download model: { str (e )} " )
3735 raise Exception (f"Failed to download model: { str (e )} " )
3836
3937if __name__ == "__main__" :
40- handler (model_name = "distilbert/distilgpt2 " )
38+ handler (model_name = "tencent/Hunyuan3D-2.1 " )
0 commit comments