Skip to content

ET Schema change to add module hierarchy to node metadata #376

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 6 additions & 2 deletions sdk/edir/et_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@
# Keywords used in EDIR Metadata
class RESERVED_METADATA_ARG(Enum):
DEBUG_HANDLE = "debug_handle"
MODULE_STACK = "module_stack"
MODULE_STACK = "nn_module_stack"
SOURCE_FN = "source_fn"
MODULE_TYPE = "nn_module_type"
MODULE_TYPE = "module_type"
PROFILE_START_TIME = "profile_start_time"
PROFILE_END_TIME = "profile_end_time"
LOAD_START_TIME = "load_start_time"
Expand Down Expand Up @@ -719,6 +719,10 @@ def _extract_metadata(
ret[RESERVED_METADATA_ARG.STACK_TRACE.value] = metadata[
RESERVED_METADATA_ARG.STACK_TRACE.value
]
if RESERVED_METADATA_ARG.MODULE_STACK.value in metadata:
ret[RESERVED_METADATA_ARG.MODULE_STACK.value] = metadata[
RESERVED_METADATA_ARG.MODULE_STACK.value
]
return ret

# Not yet implemented
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
# @generated by //executorch/sdk/edir/tests:generate_fixtures

FXOperatorGraph(graph_name='base', elements=[FXOperatorGraph(graph_name='inputs', elements=[ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], metadata=None), FXOperatorGraph(graph_name='forward', elements=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), OperatorNode(name='mul_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={}, op='mul.Tensor'), OperatorGraphWithStats(graph_name='executorch_call_delegate', elements=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={}, op='executorch_call_delegate')], metadata={'module_type': 'executorch_call_delegate'})], metadata=None), FXOperatorGraph(graph_name='outputs', elements=[ValueNode(name='output_', inputs=[OperatorNode(name='mul_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={}, op='mul.Tensor')], output_shapes=None, metadata={}, dtype=None, val=None)], metadata=None)], metadata=None)
FXOperatorGraph(graph_name='base', elements=[FXOperatorGraph(graph_name='inputs', elements=[ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], metadata=None), FXOperatorGraph(graph_name='forward', elements=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'nn_module_stack': {'L__self___lowered_module': ("L['self'].lowered_module", <class 'executorch.exir.lowered_backend_module.LoweredBackendModule'>)}}, dtype=None, val=None), OperatorNode(name='mul_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'nn_module_stack': {'L__self___lowered_module': ("L['self'].lowered_module", <class 'executorch.exir.lowered_backend_module.LoweredBackendModule'>)}}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={'nn_module_stack': {'L__self___lowered_module': ("L['self'].lowered_module", <class 'executorch.exir.lowered_backend_module.LoweredBackendModule'>)}}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={}, op='mul.Tensor'), OperatorGraphWithStats(graph_name='executorch_call_delegate', elements=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'nn_module_stack': {'L__self___lowered_module': ("L['self'].lowered_module", <class 'executorch.exir.lowered_backend_module.LoweredBackendModule'>)}}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={'nn_module_stack': {'L__self___lowered_module': ("L['self'].lowered_module", <class 'executorch.exir.lowered_backend_module.LoweredBackendModule'>)}}, op='executorch_call_delegate')], metadata={'module_type': 'executorch_call_delegate'})], metadata=None), FXOperatorGraph(graph_name='outputs', elements=[ValueNode(name='output_', inputs=[OperatorNode(name='mul_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'nn_module_stack': {'L__self___lowered_module': ("L['self'].lowered_module", <class 'executorch.exir.lowered_backend_module.LoweredBackendModule'>)}}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={'nn_module_stack': {'L__self___lowered_module': ("L['self'].lowered_module", <class 'executorch.exir.lowered_backend_module.LoweredBackendModule'>)}}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={}, op='mul.Tensor')], output_shapes=None, metadata={}, dtype=None, val=None)], metadata=None)], metadata=None)
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
# @generated by //executorch/sdk/edir/tests:generate_fixtures

FXOperatorGraph(graph_name='base', elements=[FXOperatorGraph(graph_name='inputs', elements=[ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=()), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 2}, dtype=None, val=())], metadata=None), FXOperatorGraph(graph_name='forward', elements=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'debug_handle': 3}, dtype=None, val=None), OperatorNode(name='aten_mul_tensor_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'debug_handle': 3}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 4}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 2}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 6}, op='aten.mul.Tensor'), OperatorGraphWithStats(graph_name='executorch_call_delegate', elements=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'debug_handle': 3}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 4}, op='executorch_call_delegate')], metadata={'module_type': 'executorch_call_delegate'})], metadata=None), FXOperatorGraph(graph_name='outputs', elements=[ValueNode(name='output_', inputs=[OperatorNode(name='aten_mul_tensor_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'debug_handle': 3}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 4}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 2}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 6}, op='aten.mul.Tensor')], output_shapes=None, metadata={'debug_handle': 7}, dtype=None, val=None)], metadata=None)], metadata=None)
FXOperatorGraph(graph_name='base', elements=[FXOperatorGraph(graph_name='inputs', elements=[ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=()), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 2}, dtype=None, val=())], metadata=None), FXOperatorGraph(graph_name='forward', elements=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'debug_handle': 3, 'nn_module_stack': {'L__self___lowered_module': ("L['self'].lowered_module", <class 'executorch.exir.lowered_backend_module.LoweredBackendModule'>)}}, dtype=None, val=None), OperatorNode(name='aten_mul_tensor_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'debug_handle': 3, 'nn_module_stack': {'L__self___lowered_module': ("L['self'].lowered_module", <class 'executorch.exir.lowered_backend_module.LoweredBackendModule'>)}}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 4, 'nn_module_stack': {'L__self___lowered_module': ("L['self'].lowered_module", <class 'executorch.exir.lowered_backend_module.LoweredBackendModule'>)}}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 2}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 6}, op='aten.mul.Tensor'), OperatorGraphWithStats(graph_name='executorch_call_delegate', elements=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'debug_handle': 3, 'nn_module_stack': {'L__self___lowered_module': ("L['self'].lowered_module", <class 'executorch.exir.lowered_backend_module.LoweredBackendModule'>)}}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 4, 'nn_module_stack': {'L__self___lowered_module': ("L['self'].lowered_module", <class 'executorch.exir.lowered_backend_module.LoweredBackendModule'>)}}, op='executorch_call_delegate')], metadata={'module_type': 'executorch_call_delegate'})], metadata=None), FXOperatorGraph(graph_name='outputs', elements=[ValueNode(name='output_', inputs=[OperatorNode(name='aten_mul_tensor_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'debug_handle': 3, 'nn_module_stack': {'L__self___lowered_module': ("L['self'].lowered_module", <class 'executorch.exir.lowered_backend_module.LoweredBackendModule'>)}}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 4, 'nn_module_stack': {'L__self___lowered_module': ("L['self'].lowered_module", <class 'executorch.exir.lowered_backend_module.LoweredBackendModule'>)}}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 2}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 6}, op='aten.mul.Tensor')], output_shapes=None, metadata={'debug_handle': 7}, dtype=None, val=None)], metadata=None)], metadata=None)
Loading