Skip to content

Commit c1382e3

Browse files
Olivia-liufacebook-github-bot
authored andcommitted
Update Inspector interface based on new design and define underlying data classes (Event, EventBlock, PerfData) (#343)
Summary: Design doc: https://docs.google.com/document/d/1-0z9yq2g2B8JFTpVUQ_7L8JqOe8n5esk8gfaFd1bg4E/edit?usp=sharing. The .txts are regenerated fixtures because of the et_schema change Reviewed By: Jack-Khuu Differential Revision: D49099840
1 parent 7f395fd commit c1382e3

12 files changed

+307
-73
lines changed

sdk/edir/et_schema.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ class RESERVED_METADATA_ARG(Enum):
3535
DEBUG_HANDLE = "debug_handle"
3636
MODULE_STACK = "module_stack"
3737
SOURCE_FN = "source_fn"
38-
MODULE_TYPE = "module_type"
38+
MODULE_TYPE = "nn_module_type"
3939
PROFILE_START_TIME = "profile_start_time"
4040
PROFILE_END_TIME = "profile_end_time"
4141
LOAD_START_TIME = "load_start_time"
@@ -672,7 +672,7 @@ def _compose_op_graph(
672672
# Generate Module Graphs
673673
module_graphs: List[OperatorGraph] = []
674674
for module_key, module_nodes in module_mapping.items():
675-
module_element = OperatorGraph(
675+
module_element = OperatorGraphWithStats(
676676
graph_name=module_key[0],
677677
elements=module_nodes,
678678
metadata={"module_type": module_key[1]},
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
# @generated by //executorch/sdk/edir/tests:generate_fixtures
22

3-
FXOperatorGraph(graph_name='base', elements=[FXOperatorGraph(graph_name='inputs', elements=[ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], metadata=None), FXOperatorGraph(graph_name='forward', elements=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), OperatorNode(name='mul_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={}, op='mul.Tensor'), OperatorGraph(graph_name='executorch_call_delegate', elements=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={}, op='executorch_call_delegate')], metadata={'module_type': 'executorch_call_delegate'})], metadata=None), FXOperatorGraph(graph_name='outputs', elements=[ValueNode(name='output_', inputs=[OperatorNode(name='mul_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={}, op='mul.Tensor')], output_shapes=None, metadata={}, dtype=None, val=None)], metadata=None)], metadata=None)
3+
FXOperatorGraph(graph_name='base', elements=[FXOperatorGraph(graph_name='inputs', elements=[ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], metadata=None), FXOperatorGraph(graph_name='forward', elements=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), OperatorNode(name='mul_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={}, op='mul.Tensor'), OperatorGraphWithStats(graph_name='executorch_call_delegate', elements=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={}, op='executorch_call_delegate')], metadata={'module_type': 'executorch_call_delegate'})], metadata=None), FXOperatorGraph(graph_name='outputs', elements=[ValueNode(name='output_', inputs=[OperatorNode(name='mul_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=())], output_shapes=None, metadata={}, op='mul.Tensor')], output_shapes=None, metadata={}, dtype=None, val=None)], metadata=None)], metadata=None)
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
# @generated by //executorch/sdk/edir/tests:generate_fixtures
22

3-
FXOperatorGraph(graph_name='base', elements=[FXOperatorGraph(graph_name='inputs', elements=[ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=()), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 2}, dtype=None, val=())], metadata=None), FXOperatorGraph(graph_name='forward', elements=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'debug_handle': 3}, dtype=None, val=None), OperatorNode(name='aten_mul_tensor_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'debug_handle': 3}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 4}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 2}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 6}, op='aten.mul.Tensor'), OperatorGraph(graph_name='executorch_call_delegate', elements=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'debug_handle': 3}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 4}, op='executorch_call_delegate')], metadata={'module_type': 'executorch_call_delegate'})], metadata=None), FXOperatorGraph(graph_name='outputs', elements=[ValueNode(name='output_', inputs=[OperatorNode(name='aten_mul_tensor_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'debug_handle': 3}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 4}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 2}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 6}, op='aten.mul.Tensor')], output_shapes=None, metadata={'debug_handle': 7}, dtype=None, val=None)], metadata=None)], metadata=None)
3+
FXOperatorGraph(graph_name='base', elements=[FXOperatorGraph(graph_name='inputs', elements=[ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=()), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 2}, dtype=None, val=())], metadata=None), FXOperatorGraph(graph_name='forward', elements=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'debug_handle': 3}, dtype=None, val=None), OperatorNode(name='aten_mul_tensor_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'debug_handle': 3}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 4}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 2}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 6}, op='aten.mul.Tensor'), OperatorGraphWithStats(graph_name='executorch_call_delegate', elements=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'debug_handle': 3}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 4}, op='executorch_call_delegate')], metadata={'module_type': 'executorch_call_delegate'})], metadata=None), FXOperatorGraph(graph_name='outputs', elements=[ValueNode(name='output_', inputs=[OperatorNode(name='aten_mul_tensor_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={'debug_handle': 3}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 4}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 2}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 6}, op='aten.mul.Tensor')], output_shapes=None, metadata={'debug_handle': 7}, dtype=None, val=None)], metadata=None)], metadata=None)
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
# @generated by //executorch/sdk/edir/tests:generate_fixtures
22

3-
FXOperatorGraph(graph_name='base', elements=[FXOperatorGraph(graph_name='inputs', elements=[ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=()), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 2}, dtype=None, val=())], metadata=None), FXOperatorGraph(graph_name='forward', elements=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), OperatorNode(name='aten_mul_tensor_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 0}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 2}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 1}, op='mul.out'), OperatorGraph(graph_name='executorch_call_delegate', elements=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 0}, op='executorch_call_delegate')], metadata={'module_type': 'executorch_call_delegate'})], metadata=None), FXOperatorGraph(graph_name='outputs', elements=[ValueNode(name='output_', inputs=[OperatorNode(name='aten_mul_tensor_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 0}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 2}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 1}, op='mul.out')], output_shapes=None, metadata={'debug_handle': 7}, dtype=None, val=None)], metadata=None)], metadata=None)
3+
FXOperatorGraph(graph_name='base', elements=[FXOperatorGraph(graph_name='inputs', elements=[ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=()), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 2}, dtype=None, val=())], metadata=None), FXOperatorGraph(graph_name='forward', elements=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), OperatorNode(name='aten_mul_tensor_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 0}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 2}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 1}, op='mul.out'), OperatorGraphWithStats(graph_name='executorch_call_delegate', elements=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 0}, op='executorch_call_delegate')], metadata={'module_type': 'executorch_call_delegate'})], metadata=None), FXOperatorGraph(graph_name='outputs', elements=[ValueNode(name='output_', inputs=[OperatorNode(name='aten_mul_tensor_', inputs=[OperatorNode(name='executorch_call_delegate_', inputs=[ValueNode(name='lowered_module_0_', inputs=None, output_shapes=None, metadata={}, dtype=None, val=None), ValueNode(name='arg0_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 0}, dtype=None, val=()), ValueNode(name='arg1_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 1}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 0}, op='executorch_call_delegate'), ValueNode(name='arg2_1_', inputs=None, output_shapes=None, metadata={'debug_handle': 2}, dtype=None, val=())], output_shapes=None, metadata={'debug_handle': 1}, op='mul.out')], output_shapes=None, metadata={'debug_handle': 7}, dtype=None, val=None)], metadata=None)], metadata=None)

sdk/edir/tests/fixtures/two_linear_module_fx_graph_aten_dialect.txt

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

sdk/edir/tests/fixtures/two_linear_module_fx_graph_edge_dialect.txt

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

sdk/edir/tests/fixtures/two_linear_module_fx_graph_et_dialect.txt

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

sdk/etdb/TARGETS

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,25 @@ python_library(
3232
"inspector.py",
3333
],
3434
deps = [
35+
"fbsource//third-party/pypi/ipython:ipython",
36+
"fbsource//third-party/pypi/numpy:numpy",
3537
"fbsource//third-party/pypi/pandas:pandas",
38+
"fbsource//third-party/pypi/tabulate:tabulate",
39+
":inspector_utils",
40+
"//caffe2:torch",
41+
"//executorch/exir:lib",
42+
"//executorch/sdk/edir:et_schema",
43+
"//executorch/sdk/etrecord:etrecord",
44+
],
45+
)
46+
47+
python_library(
48+
name = "inspector_utils",
49+
srcs = [
50+
"_inspector_utils.py",
51+
],
52+
deps = [
3653
"//executorch/sdk/edir:et_schema",
54+
"//executorch/sdk/etrecord:etrecord",
3755
],
3856
)

sdk/etdb/_inspector_utils.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
# Copyright (c) Meta Platforms, Inc. and affiliates.
2+
# All rights reserved.
3+
#
4+
# This source code is licensed under the BSD-style license found in the
5+
# LICENSE file in the root directory of this source tree.
6+
7+
from typing import Mapping
8+
9+
from executorch.sdk.edir.et_schema import FXOperatorGraph, OperatorGraphWithStats
10+
from executorch.sdk.etrecord import ETRecord
11+
12+
13+
# TODO: add a unittest for this function
14+
def gen_graphs_from_etrecord(
15+
etrecord: ETRecord,
16+
) -> Mapping[str, OperatorGraphWithStats]:
17+
if etrecord.graph_map is None:
18+
return {}
19+
return {
20+
name: FXOperatorGraph.gen_operator_graph(exported_program.graph_module)
21+
for name, exported_program in etrecord.graph_map.items()
22+
}

0 commit comments

Comments
 (0)