Skip to content

Commit ca7b4be

Browse files
committed
add option to provide config file via kwargs
1 parent 23db23a commit ca7b4be

2 files changed

Lines changed: 6 additions & 1 deletion

File tree

src/maxtext/configs/pyconfig.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -81,6 +81,11 @@ def _resolve_or_infer_config(argv: list[str] | None = None, **kwargs) -> tuple[s
8181
"""Resolves or infers config file path from module."""
8282
if argv is None:
8383
argv = [""]
84+
85+
if kwargs.get("base_config"):
86+
logger.info(f"Using config : {kwargs['base_config']}")
87+
return resolve_config_path(kwargs["base_config"]), argv[1:] if len(argv) > 1 else []
88+
8489
if len(argv) >= 2 and argv[1].endswith(".yml"):
8590
return resolve_config_path(argv[1]), argv[2:]
8691
module = _module_from_path(argv[0]) if len(argv) > 0 else None

src/maxtext/utils/model_creation_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -252,7 +252,7 @@ def from_pretrained(config, original_mesh=None, devices=None, model_mode=MODEL_M
252252
"""Creates a NNX model with sharded parameters, possibly loading from a checkpoint."""
253253
mesh = original_mesh
254254
if config.convert_checkpoint_if_possible:
255-
if not not (epath.Path(config.base_output_directory) / "0" / "items").exists():
255+
if not (epath.Path(config.base_output_directory) / "0" / "items").exists():
256256
# Try to convert checkpoint on the fly
257257
if not config.hf_access_token:
258258
raise ValueError("hf_access_token must be provided when not providing a pre-existing checkpoint")

0 commit comments

Comments
 (0)