Skip to content

Commit 0f1bc67

Browse files
Added unit test.
Simplified code, In the case of no user input lifted constants are placed before 'none' node, aka the begining. We update the graph node to place these at the beginning as well. fixed formatting.
1 parent a563d8a commit 0f1bc67

File tree

3 files changed

+43
-35
lines changed

3 files changed

+43
-35
lines changed

exir/memory_planning.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -248,7 +248,16 @@ def verify_graph_input_output(self) -> None:
248248
has_dynamic_unbound_output |= has_dynamic_unbound_tensor
249249

250250
# only check if inputs are allocated if there are user inputs:
251-
user_inputs_exist = len(list(filter(lambda input: input.kind == InputKind.USER_INPUT, self.graph_signature.input_specs))) > 0
251+
user_inputs_exist = (
252+
len(
253+
list(
254+
filter(
255+
lambda input: input.kind == InputKind.USER_INPUT,
256+
self.graph_signature.input_specs,
257+
)
258+
)
259+
)
260+
) > 0
252261

253262
if "placeholder" in check_list and user_inputs_exist:
254263
assert graph_input_allocated is not None, "graph_input_allocated not set"

exir/program/_program.py

Lines changed: 4 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -268,42 +268,13 @@ def lift_constant_tensor_pass(ep):
268268
buffers = list(graph_signature.buffers)
269269

270270
fake_mode = list(ep.graph.nodes)[0].meta["val"].fake_mode
271-
insert_before_node = None
271+
first_user_input = None
272272
lifted_constants = []
273273
for node in ep.graph.nodes:
274274
if node.op == "placeholder" and node.name in graph_signature.user_inputs:
275-
insert_before_node = node # first user input
275+
first_user_input = node
276276
break
277277

278-
if insert_before_node is None:
279-
# we have no user inputs, find the node after the last buffer
280-
# (that we will insert the lifted constants before).
281-
# this is a bit hacky, but I am not certain of what the contract is for
282-
# node ordering. is the first non-placeholder node guranteed to be the
283-
# first node after input paramters? what if there is no op, and it is
284-
# just placeholders? Easier to just find the last buffer, and insert after.
285-
286-
# also error if we have no buffers and no user inputs... if that is an issue, fix it later?
287-
last_buffer = None
288-
for node in ep.graph.nodes:
289-
node_buffer_fqn = graph_signature.inputs_to_buffers.get(node.name, None)
290-
# not sure if both cases are needed, if is it possible to encounter a
291-
# buffer that is not a user input?
292-
if (
293-
node_buffer_fqn is not None
294-
and node_buffer_fqn in graph_signature.buffers
295-
):
296-
last_buffer = node
297-
continue
298-
if node.op == "placeholder" and node.name in graph_signature.buffers:
299-
last_buffer = node
300-
continue
301-
# we have our last buffer, grab the node after it, to insert the lifted constants before.
302-
insert_before_node = last_buffer.next
303-
304-
if insert_before_node is None:
305-
raise ValueError("No user inputs and no buffers found. Cannot lift constants.")
306-
307278
for node in ep.graph.nodes:
308279
if node.op == "get_attr":
309280
constant_tensor = getattr(ep.graph_module, node.target)
@@ -312,7 +283,7 @@ def lift_constant_tensor_pass(ep):
312283

313284
constant_tensor_fqn = f"_lifted_tensor_constant{len(buffers)}"
314285

315-
with ep.graph.inserting_before(insert_before_node):
286+
with ep.graph.inserting_before(first_user_input):
316287
# Insert the constant node before the first user input
317288
const_placeholder_node = ep.graph.placeholder(constant_tensor_fqn)
318289
for k, v in node.meta.items():
@@ -345,9 +316,8 @@ def lift_constant_tensor_pass(ep):
345316
new_input_specs.extend(lifted_constants)
346317
lifted_constants.clear()
347318
new_input_specs.append(s)
348-
# Add remaining lifted constants if no user inputs exist.
349319
if len(lifted_constants) > 0:
350-
new_input_specs.extend(lifted_constants)
320+
new_input_specs = lifted_constants + new_input_specs
351321
ep.graph_signature.input_specs = new_input_specs
352322
ep.graph_module.recompile()
353323
return ep

exir/tests/test_passes.py

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1057,6 +1057,35 @@ def forward(self, x: torch.Tensor) -> torch.Tensor:
10571057
new_ep.graph_module.code
10581058
)
10591059

1060+
def test_pass_no_user_inputs(self) -> None:
1061+
class NoUserInputs(torch.nn.Module):
1062+
def __init__(self):
1063+
super().__init__()
1064+
self.register_buffer("a", torch.ones(1))
1065+
1066+
def forward(self) -> torch.Tensor:
1067+
return 3 + self.a
1068+
1069+
mod = NoUserInputs()
1070+
exported_program = export(mod, (), strict=True)
1071+
edge = to_edge(
1072+
exported_program,
1073+
compile_config=EdgeCompileConfig(_skip_dim_order=False),
1074+
)
1075+
ep = edge.exported_program()
1076+
# because there is no user input, the lifted constant should be the first input.
1077+
FileCheck().check("_lifted_tensor_constant1").check(
1078+
"b_a" # followed by the buffer input.
1079+
).run(ep.graph_module.code)
1080+
# the graph signature should also be the same:
1081+
assert ep.graph_signature.input_specs[0].arg.name == "_lifted_tensor_constant1"
1082+
assert ep.graph_signature.input_specs[1].arg.name == "b_a"
1083+
1084+
executorch_program = edge.to_executorch()
1085+
# # the graph signature should also be the same:
1086+
# executorch_program.graph_signature.input_specs[0].arg.name == "_lifted_tensor_constant1"
1087+
# executorch_program.graph_signature.input_specs[1].arg.name == "b_a"
1088+
10601089
def test_constant_prop_pass_for_parameter(self) -> None:
10611090
def count_additions(gm: torch.fx.GraphModule) -> int:
10621091
return sum(

0 commit comments

Comments
 (0)