Skip to content

Commit c2cb8ec

Browse files
realAsmaclaude
andcommitted
Address PR review feedback: type annotation, checkpoint warning, comment
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com> Signed-off-by: realAsma <akuriparambi@nvidia.com>
1 parent 31b3bca commit c2cb8ec

2 files changed

Lines changed: 6 additions & 1 deletion

File tree

modelopt/torch/opt/searcher.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -254,6 +254,10 @@ def load_search_checkpoint(self, strict=True) -> bool:
254254
return False
255255
# Backward compat: fall back to the original single-file path
256256
if not os.path.exists(checkpoint):
257+
warn_rank_0(
258+
f"Per-rank checkpoint {checkpoint} not found, falling back to "
259+
f"{self.config['checkpoint']}. Ensure world size matches the original run."
260+
)
257261
checkpoint = self.config["checkpoint"]
258262
if not os.path.exists(checkpoint):
259263
warn_rank_0(f"Checkpoint {checkpoint} does not exist! Initializing from scratch.")

modelopt/torch/quantization/algorithms.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -337,7 +337,7 @@ class _AutoQuantizeBaseSearcher(BaseSearcher, ABC):
337337
candidate_stats: dict[str, dict[str, list[float]]]
338338
best: dict[str, Any]
339339
quantizer_states: dict
340-
method_name: str = None
340+
method_name: str | None = None
341341

342342
quant_grouping_rules = [
343343
r"^(.*?)\.(q_proj|k_proj|v_proj)$", # q_proj, k_proj, v_proj for llama like models
@@ -1356,6 +1356,7 @@ def _resolve_best_recipe(search_state, constraints, verbose=False):
13561356

13571357

13581358
def _match_quantizer_cfg(quant_cfg, quantizer_attr):
1359+
# Last-match-wins to mirror set_quantizer_by_cfg behavior
13591360
matched = None
13601361
for pattern, cfg in quant_cfg.items():
13611362
if fnmatch.fnmatch(quantizer_attr, pattern):

0 commit comments

Comments
 (0)