Skip to content

Commit 05dd877

Browse files
lucylqfacebook-github-bot
authored andcommitted
export and delegate example fix, typos (#478)
Summary: - Fix error creating [composite and whole models]( https://www.internalfb.com/intern/staticdocs/executorch/docs/tutorials/setting_up_executorch/#step-4-generate-a-program-file-from-an-nnmodule) - Add test to CI when model is add_mul - Found some typos as well Differential Revision: D49602228
1 parent 9b384a4 commit 05dd877

File tree

6 files changed

+56
-16
lines changed

6 files changed

+56
-16
lines changed

.ci/scripts/gather_test_models.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,7 @@ def export_models_for_ci() -> None:
6464
"quantization": quantization,
6565
"xnnpack_delegation": xnnpack_delegation,
6666
"runner": RUNNERS.get(name, DEFAULT_RUNNER),
67+
"export_and_delegate": name == "add_mul",
6768
}
6869
)
6970
set_output("models", json.dumps(models))

.ci/scripts/test.sh

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,11 @@ if [[ -z "${XNNPACK_DELEGATION:-}" ]]; then
3232
XNNPACK_DELEGATION=false
3333
fi
3434

35+
EXPORT_AND_DELEGATE=$5
36+
if [[ -z "${EXPORT_AND_DELEGATE:-}" ]]; then
37+
EXPORT_AND_DELEGATE=false
38+
fi
39+
3540
which "${PYTHON_EXECUTABLE}"
3641
# Just set this variable here, it's cheap even if we use buck2
3742
CMAKE_OUTPUT_DIR=cmake-out
@@ -106,6 +111,30 @@ test_model_with_xnnpack() {
106111
fi
107112
}
108113

114+
test_export_and_delegate() {
115+
echo "Testing export and delegate on AddMul"
116+
"${PYTHON_EXECUTABLE}" -m examples.export.export_and_delegate --option "composite"
117+
"${PYTHON_EXECUTABLE}" -m examples.export.export_and_delegate --option "partition"
118+
"${PYTHON_EXECUTABLE}" -m examples.export.export_and_delegate --option "whole"
119+
120+
# Run test model
121+
if [[ "${BUILD_TOOL}" == "buck2" ]]; then
122+
buck2 run //examples/executor_runner:executor_runner -- --model_path "./composite_model.pte"
123+
buck2 run //examples/executor_runner:executor_runner -- --model_path "./partition_lowered_model.pte"
124+
buck2 run //examples/executor_runner:executor_runner -- --model_path "./whole.pte"
125+
elif [[ "${BUILD_TOOL}" == "cmake" ]]; then
126+
if [[ ! -f ${CMAKE_OUTPUT_DIR}/executor_runner ]]; then
127+
build_cmake_executor_runner
128+
fi
129+
./${CMAKE_OUTPUT_DIR}/executor_runner --model_path "./composite_model.pte"
130+
./${CMAKE_OUTPUT_DIR}/executor_runner --model_path "./partition_lowered_model.pte"
131+
./${CMAKE_OUTPUT_DIR}/executor_runner --model_path "./whole.pte"
132+
else
133+
echo "Invalid build tool ${BUILD_TOOL}. Only buck2 and cmake are supported atm"
134+
exit 1
135+
fi
136+
}
137+
109138
echo "Testing ${MODEL_NAME} (fp32, quantized, xnnpack) with ${BUILD_TOOL}..."
110139
# Test the select model without XNNPACK or quantization
111140
test_model
@@ -128,3 +157,8 @@ fi
128157
if [[ "${XNNPACK_DELEGATION}" == true ]] && [[ "${QUANTIZATION}" == true ]]; then
129158
test_model_with_xnnpack true
130159
fi
160+
161+
# Test export and delegate
162+
if [[ "${EXPORT_AND_DELEGATE}" == true ]]; then
163+
test_export_and_delegate
164+
fi

.github/workflows/pull.yml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,10 +59,11 @@ jobs:
5959
BUILD_TOOL=${{ matrix.build-tool }}
6060
QUANTIZATION=${{ matrix.quantization }}
6161
XNNPACK_DELEGATION=${{ matrix.xnnpack_delegation }}
62+
EXPORT_AND_DELEGATE=${{ matrix.export_and_delegate }}
6263
6364
PYTHON_EXECUTABLE=python bash .ci/scripts/setup-linux.sh "${BUILD_TOOL}"
6465
# Build and test Executorch
65-
PYTHON_EXECUTABLE=python bash .ci/scripts/test.sh "${MODEL_NAME}" "${BUILD_TOOL}" "${QUANTIZATION}" "${XNNPACK_DELEGATION}"
66+
PYTHON_EXECUTABLE=python bash .ci/scripts/test.sh "${MODEL_NAME}" "${BUILD_TOOL}" "${QUANTIZATION}" "${XNNPACK_DELEGATION}" "${EXPORT_AND_DELEGATE}"
6667
6768
test-models-macos:
6869
name: test-models-macos
@@ -84,11 +85,12 @@ jobs:
8485
BUILD_TOOL=${{ matrix.build-tool }}
8586
QUANTIZATION=${{ matrix.quantization }}
8687
XNNPACK_DELEGATION=${{ matrix.xnnpack_delegation }}
88+
EXPORT_AND_DELEGATE=${{ matrix.export_and_delegate }}
8789
8890
# Setup MacOS dependencies as there is no Docker support on MacOS atm
8991
PYTHON_EXECUTABLE=python bash .ci/scripts/setup-macos.sh "${BUILD_TOOL}"
9092
# Build and test Executorch
91-
PYTHON_EXECUTABLE=python bash .ci/scripts/test.sh "${MODEL_NAME}" "${BUILD_TOOL}" "${QUANTIZATION}" "${XNNPACK_DELEGATION}"
93+
PYTHON_EXECUTABLE=python bash .ci/scripts/test.sh "${MODEL_NAME}" "${BUILD_TOOL}" "${QUANTIZATION}" "${XNNPACK_DELEGATION}" "${EXPORT_AND_DELEGATE}"
9294
popd
9395
9496
test-custom-ops-linux:

docs/website/docs/ir_spec/00_exir.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ The `ExportedProgram` has the following attributes:
7070
containing the parameters and buffers.
7171
* `range constraints (Dict[sympy.Symbol, RangeConstraint])`: For programs that
7272
are exported with data dependent behavior, the metadata on each node will
73-
contain symbolic shapes (hich look like `s0`, `i0`). This attribute maps the
73+
contain symbolic shapes (which look like `s0`, `i0`). This attribute maps the
7474
symbolic shapes to their lower/upper ranges.
7575
* `equality_constraints (List[Tuple[InputDim, InputDim]])`: A list of nodes in
7676
the graph and dimensions that have the same shape.
@@ -196,7 +196,7 @@ A `call_function` node represents a call to an operator.
196196

197197
* **Functional:** We say a callable is “functional” if it satisfy all following requirements:
198198
* Non-aliasing, ie output tensors do not share data storage with each other or with inputs of the operator
199-
* Non-mutating, ie the operator does not mutate value of it’s input (for tensors, this includes both metadata and data)
199+
* Non-mutating, ie the operator does not mutate value of its input (for tensors, this includes both metadata and data)
200200
* No side effects, ie the operator does not mutate states that are visible from outside, like changing values of module parameters.
201201

202202
* **Operator:** is a functional callable with a predefined schema. Examples of
@@ -208,7 +208,7 @@ A `call_function` node represents a call to an operator.
208208
%name = call_function[target = operator](args = (%x, %y, …), kwargs = {})
209209
```
210210

211-
#### Differences from vanila FX call_function
211+
#### Differences from vanilla FX call_function
212212

213213
1. In FX graph, a call_function can refer to any callable, in EXIR, we restrict
214214
this to only Canonical ATen operators (a select subset of PyTorch ATen operator

docs/website/docs/tutorials/exporting_to_executorch.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ dynamic and impose ranges on them. To learn more about constraints, you can look
5555
at [these docs](../export/constraint_apis.md)
5656

5757
The output of `torch._export.capture_pre_autograd_graph` is a fully flattened graph (meaning the graph does
58-
not contain any module heirachy, except in the case of control flow operators).
58+
not contain any module hierachy, except in the case of control flow operators).
5959
Furthermore, the captured graph is in ATen dialect with ATen opset which is autograd safe, i.e. safe for eager mode training.
6060
This is important for quantization as noted in https://github.com/pytorch/executorch/issues/290.
6161

examples/export/export_and_delegate.py

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,9 @@
1818
from executorch.exir.backend.test.op_partitioner_demo import AddMulPartitionerDemo
1919

2020
from ..models import MODEL_NAME_TO_MODEL
21+
from ..models.model_factory import EagerModelFactory
2122

22-
from ..utils import export_to_edge
23+
from .utils import export_to_edge
2324

2425

2526
FORMAT = "[%(levelname)s %(asctime)s %(filename)s:%(lineno)s] %(message)s"
@@ -37,7 +38,7 @@
3738
"""
3839

3940

40-
def export_compsite_module_with_lower_graph():
41+
def export_composite_module_with_lower_graph():
4142
"""
4243
4344
AddMulModule:
@@ -57,9 +58,10 @@ def export_compsite_module_with_lower_graph():
5758
logging.info(
5859
"Running the example to export a composite module with lowered graph..."
5960
)
60-
m, m_inputs = MODEL_NAME_TO_MODEL.get("add_mul")()
61-
m = m.eval()
62-
m_inputs = m.get_example_inputs()
61+
62+
m, m_inputs = EagerModelFactory.create_model(*MODEL_NAME_TO_MODEL["add_mul"])
63+
m_compile_spec = m.get_compile_spec()
64+
6365
# pre-autograd export. eventually this will become torch.export
6466
m = export.capture_pre_autograd_graph(m, m_inputs)
6567
edge = export_to_edge(m, m_inputs)
@@ -68,7 +70,7 @@ def export_compsite_module_with_lower_graph():
6870
# Lower AddMulModule to the demo backend
6971
logging.info("Lowering to the demo backend...")
7072
lowered_graph = to_backend(
71-
BackendWithCompilerDemo.__name__, edge.exported_program, m.get_compile_spec()
73+
BackendWithCompilerDemo.__name__, edge.exported_program, m_compile_spec
7274
)
7375

7476
# Composite the lower graph with other module
@@ -166,8 +168,9 @@ def export_and_lower_the_whole_graph():
166168
"""
167169
logging.info("Running the example to export and lower the whole graph...")
168170

169-
m, m_inputs = MODEL_NAME_TO_MODEL.get("add_mul")()
170-
m = m.eval()
171+
m, m_inputs = EagerModelFactory.create_model(*MODEL_NAME_TO_MODEL["add_mul"])
172+
m_compile_spec = m.get_compile_spec()
173+
171174
m_inputs = m.get_example_inputs()
172175
# pre-autograd export. eventually this will become torch.export
173176
m = export.capture_pre_autograd_graph(m, m_inputs)
@@ -177,7 +180,7 @@ def export_and_lower_the_whole_graph():
177180
# Lower AddMulModule to the demo backend
178181
logging.info("Lowering to the demo backend...")
179182
lowered_module = to_backend(
180-
BackendWithCompilerDemo.__name__, edge, m.get_compile_spec()
183+
BackendWithCompilerDemo.__name__, edge.exported_program, m_compile_spec
181184
)
182185

183186
buffer = lowered_module.buffer()
@@ -190,7 +193,7 @@ def export_and_lower_the_whole_graph():
190193

191194

192195
OPTIONS_TO_LOWER = {
193-
"composite": export_compsite_module_with_lower_graph,
196+
"composite": export_composite_module_with_lower_graph,
194197
"partition": export_and_lower_partitioned_graph,
195198
"whole": export_and_lower_the_whole_graph,
196199
}

0 commit comments

Comments
 (0)