Skip to content

Commit 84f35d6

Browse files
NefAIcursoragent
andcommitted
fix: resolve emitter getitem, alloc spec, and view_copy spec errors
- _emitter.py: handle single _AbstractValue from delegate in getitem (fixes '_AbstractValue' object is not subscriptable) - memory_planning.py: skip memory.alloc nodes in ensure_graph_node_specs (fixes 'Out-var allocation node already has a spec assigned') - replace_view_copy_with_view_pass.py: derive base_spec from meta["val"] when meta["spec"] is missing (fixes KeyError: 'spec' for xnnpack tests) Fixes test_mobilenet_v3, test_resnet18, test_mobilenet_v3_xnnpack, test_resnet18_xnnpack, test_to_out_variant_multiple_out, test_emit_lowered_backend_module, test_emit_nested_lowered_backend_module. Co-authored-by: Cursor <cursoragent@cursor.com>
1 parent d4fa140 commit 84f35d6

3 files changed

Lines changed: 21 additions & 4 deletions

File tree

exir/emit/_emitter.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1747,9 +1747,12 @@ def call_function( # pyre-fixme[14]
17471747
# Default to none and let delegates and ops override.
17481748
if target == operator.getitem:
17491749
assert len(args) == 2
1750-
head = typing.cast(Mapping[int, _EmitterValue], args[0])
1750+
head = args[0]
17511751
index = typing.cast(int, args[1])
1752-
return head[index]
1752+
# Delegate may return a single _AbstractValue (single output) or tuple/list.
1753+
if index == 0 and isinstance(head, _AbstractValue):
1754+
return head
1755+
return typing.cast(Mapping[int, _EmitterValue], head)[index]
17531756

17541757
elif target == memory.alloc:
17551758
assert len(args) == 1

exir/memory_planning.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -759,12 +759,15 @@ def ensure_graph_node_specs(graph_module: torch.fx.GraphModule) -> None:
759759
"""
760760
Set meta["spec"] from meta["val"] for nodes that are missing spec (e.g. output
761761
or out-var nodes in delegated graphs that were built after SpecPropPass).
762+
Skip memory.alloc nodes; their spec is set by MemoryPlanningPass._set_alloc_node_spec.
762763
"""
763764
for node in graph_module.graph.nodes:
764765
if "spec" in node.meta:
765766
continue
766767
if "val" not in node.meta:
767768
continue
769+
if node.op == "call_function" and node.target is memory.alloc:
770+
continue
768771
val = node.meta["val"]
769772

770773
def to_spec(x: Any) -> Any:

exir/passes/replace_view_copy_with_view_pass.py

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -282,12 +282,23 @@ def call(self, graph_module: torch.fx.GraphModule) -> PassResult:
282282

283283
# Create spec for the node.
284284
# _ViewSpec gives a view into its base spec for non-size
285-
# related information.
285+
# related information. Use base.meta["spec"] if present,
286+
# else derive from base.meta["val"] (e.g. after delegation).
287+
base_spec = base.meta.get("spec")
288+
if base_spec is None and "val" in base.meta:
289+
val = base.meta["val"]
290+
if isinstance(val, torch.Tensor):
291+
base_spec = TensorSpec.from_tensor(val)
292+
if base_spec is None:
293+
raise KeyError(
294+
f"replace_view_copy_with_view: base node {base} has no "
295+
"'spec' and no tensor 'val' to derive one"
296+
)
286297

287298
# the shape is not the same as node.args[1] because node.args[1]
288299
# can have an inferred sizes (-1).
289300
shape = node.meta["val"].shape
290-
node.meta["spec"] = _ViewSpec(base.meta["spec"], shape)
301+
node.meta["spec"] = _ViewSpec(base_spec, shape)
291302

292303
n_replaced += 1
293304

0 commit comments

Comments
 (0)