File tree Expand file tree Collapse file tree 2 files changed +10
-1
lines changed
Expand file tree Collapse file tree 2 files changed +10
-1
lines changed Original file line number Diff line number Diff line change @@ -27,6 +27,11 @@ def get_args():
2727 action = "store_true" ,
2828 help = "Run nvidia test" ,
2929 )
30+ parser .add_argument (
31+ "--qy" ,
32+ action = "store_true" ,
33+ help = "Run qy test" ,
34+ )
3035 parser .add_argument (
3136 "--metax" ,
3237 action = "store_true" ,
@@ -210,6 +215,8 @@ def test(
210215 device_str = "cpu"
211216 elif args .nvidia :
212217 device_str = "cuda"
218+ elif args .qy :
219+ device_str = "cuda"
213220 elif args .metax :
214221 device_str = "cuda"
215222 elif args .moore :
@@ -220,7 +227,7 @@ def test(
220227 device_str = "mlu"
221228 else :
222229 print (
223- "Usage: python examples/jiuge.py [--cpu | --nvidia | --metax | --moore | --iluvatar] --model_path=<path/to/model_dir>\n "
230+ "Usage: python examples/jiuge.py [--cpu | --nvidia | --qy | -- metax | --moore | --iluvatar] --model_path=<path/to/model_dir>\n "
224231 "such as, python examples/jiuge.py --nvidia --model_path=~/TinyLlama-1.1B-Chat-v1.0"
225232 )
226233 sys .exit (1 )
Original file line number Diff line number Diff line change @@ -21,6 +21,8 @@ def from_pretrained(model_path):
2121
2222 if config_dict ["model_type" ] == "llama" :
2323 return LlamaConfig (** config_dict )
24+ elif config_dict ["model_type" ] == "fm9g7b" :
25+ return LlamaConfig (** config_dict )
2426 elif config_dict ["model_type" ] == "qwen2" :
2527 return LlamaConfig (** config_dict )
2628
You can’t perform that action at this time.
0 commit comments