File tree Expand file tree Collapse file tree 2 files changed +24
-0
lines changed Expand file tree Collapse file tree 2 files changed +24
-0
lines changed Original file line number Diff line number Diff line change @@ -86,6 +86,11 @@ __ET_NODISCARD Error XNNExecutor::prepare_args(EValue** args) {
86
86
// Reshape runtime inputs
87
87
if (i < input_ids_.size ()) {
88
88
size_t num_dims = tensor->dim ();
89
+ ET_CHECK_OR_RETURN_ERROR (
90
+ is_contiguous_dim_order (tensor->dim_order ().data (), tensor->dim ()),
91
+ Internal,
92
+ " Expecting default dim_order but got a non default dim_order tensor for external input %u" ,
93
+ i);
89
94
size_t dims[XNN_MAX_TENSOR_DIMS];
90
95
ET_CHECK_OR_RETURN_ERROR (
91
96
num_dims <= XNN_MAX_TENSOR_DIMS,
Original file line number Diff line number Diff line change @@ -78,6 +78,22 @@ def generate_node_to_external_map(
78
78
return node_to_external_map
79
79
80
80
81
+ def assert_default_dim_order (edge_graph_module : torch .fx .GraphModule ) -> None :
82
+ for node in edge_graph_module .graph .nodes :
83
+ if node .op != "placeholder" :
84
+ continue
85
+
86
+ # We expect the default dim order for all tensor-like inputs i.e. inputs, buffers, and params
87
+ t = node .meta .get ("val" , None )
88
+ if t is not None and getattr (t , "dim_order" , None ) is not None :
89
+ default_dim_order = tuple (range (t .dim ()))
90
+ if t .dim_order () != default_dim_order :
91
+ raise RuntimeError (
92
+ f"XNNPACK backend only supports contiguous memory format for inputs."
93
+ f"Expecting dim_order: { default_dim_order } , but got { node .meta ['val' ].dim_order ()} for a placeholder node { node } ."
94
+ )
95
+
96
+
81
97
@final
82
98
class XnnpackBackend (BackendDetails ):
83
99
@staticmethod
@@ -126,6 +142,9 @@ def preprocess(
126
142
127
143
node_to_external_map = generate_node_to_external_map (ep , graph_module )
128
144
145
+ # Make sure all inputs are contiguous_format or NCHW or default dim order
146
+ assert_default_dim_order (graph_module )
147
+
129
148
# TODO retrace the graph module to lift the new params may have
130
149
# been added to the graph in passes
131
150
You can’t perform that action at this time.
0 commit comments