Skip to content

Commit d679ad7

Browse files
authored
Update XNNPACK to 1ed874e65 (#6538)
* Update XNNPACK to c88c8504fd9889c22391f0f3ece6061a7f855cf3 fix bug * Update test_llama.sh and test_llava.sh to use release mode as default
1 parent 5785fc3 commit d679ad7

File tree

6 files changed

+22
-27
lines changed

6 files changed

+22
-27
lines changed

.ci/scripts/test_llama.sh

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,9 @@ UPLOAD_DIR="${UPLOAD_DIR:-}"
5151
# Default PT2E_QUANTIZE to empty string if not set
5252
PT2E_QUANTIZE="${PT2E_QUANTIZE:-}"
5353

54+
# Default CMake Build Type to release mode
55+
CMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE:-Release}
56+
5457
if [[ $# -lt 4 ]]; then # Assuming 4 mandatory args
5558
echo "Expecting atleast 4 positional arguments"
5659
echo "Usage: [...]"
@@ -143,7 +146,7 @@ cmake_install_executorch_libraries() {
143146
rm -rf cmake-out
144147
retry cmake \
145148
-DCMAKE_INSTALL_PREFIX=cmake-out \
146-
-DCMAKE_BUILD_TYPE=Debug \
149+
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
147150
-DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
148151
-DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \
149152
-DEXECUTORCH_BUILD_EXTENSION_TENSOR=ON \
@@ -157,22 +160,22 @@ cmake_install_executorch_libraries() {
157160
-DQNN_SDK_ROOT="$QNN_SDK_ROOT" \
158161
-DPYTHON_EXECUTABLE="$PYTHON_EXECUTABLE" \
159162
-Bcmake-out .
160-
cmake --build cmake-out -j9 --target install --config Debug
163+
cmake --build cmake-out -j9 --target install --config "$CMAKE_BUILD_TYPE"
161164
}
162165

163166
cmake_build_llama_runner() {
164167
echo "Building llama runner"
165168
dir="examples/models/llama"
166169
retry cmake \
167170
-DCMAKE_INSTALL_PREFIX=cmake-out \
168-
-DCMAKE_BUILD_TYPE=Debug \
171+
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
169172
-DEXECUTORCH_BUILD_KERNELS_CUSTOM="$CUSTOM" \
170173
-DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
171174
-DEXECUTORCH_BUILD_XNNPACK="$XNNPACK" \
172175
-DPYTHON_EXECUTABLE="$PYTHON_EXECUTABLE" \
173176
-Bcmake-out/${dir} \
174177
${dir}
175-
cmake --build cmake-out/${dir} -j9 --config Debug
178+
cmake --build cmake-out/${dir} -j9 --config "$CMAKE_BUILD_TYPE"
176179

177180
}
178181

.ci/scripts/test_llava.sh

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,11 @@
88
set -exu
99
# shellcheck source=/dev/null
1010

11-
BUILD_TYPE=${1:-Debug}
1211
TARGET_OS=${2:-Native}
1312
BUILD_DIR=${3:-cmake-out}
13+
CMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE:-Release}
1414

15-
echo "Building with BUILD_TYPE: $BUILD_TYPE, TARGET_OS: $TARGET_OS, BUILD_DIR: $BUILD_DIR"
15+
echo "Building with CMAKE_BUILD_TYPE: $CMAKE_BUILD_TYPE, TARGET_OS: $TARGET_OS, BUILD_DIR: $BUILD_DIR"
1616

1717
if [[ -z "${PYTHON_EXECUTABLE:-}" ]]; then
1818
PYTHON_EXECUTABLE=python3
@@ -32,7 +32,7 @@ if hash nproc &> /dev/null; then NPROC=$(nproc); fi
3232

3333
EXECUTORCH_COMMON_CMAKE_ARGS=" \
3434
-DCMAKE_INSTALL_PREFIX=${BUILD_DIR} \
35-
-DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
35+
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
3636
-DEXECUTORCH_ENABLE_LOGGING=ON \
3737
-DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \
3838
-DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
@@ -49,7 +49,7 @@ cmake_install_executorch_libraries() {
4949
${EXECUTORCH_COMMON_CMAKE_ARGS} \
5050
-B${BUILD_DIR} .
5151

52-
cmake --build ${BUILD_DIR} -j${NPROC} --target install --config ${BUILD_TYPE}
52+
cmake --build ${BUILD_DIR} -j${NPROC} --target install --config ${CMAKE_BUILD_TYPE}
5353
}
5454

5555
cmake_install_executorch_libraries_for_android() {
@@ -59,14 +59,14 @@ cmake_install_executorch_libraries_for_android() {
5959
${EXECUTORCH_COMMON_CMAKE_ARGS} \
6060
-B${BUILD_DIR} .
6161

62-
cmake --build ${BUILD_DIR} -j${NPROC} --target install --config ${BUILD_TYPE}
62+
cmake --build ${BUILD_DIR} -j${NPROC} --target install --config ${CMAKE_BUILD_TYPE}
6363
}
6464

6565

6666
LLAVA_COMMON_CMAKE_ARGS=" \
6767
-DPYTHON_EXECUTABLE="$PYTHON_EXECUTABLE" \
6868
-DCMAKE_INSTALL_PREFIX=${BUILD_DIR} \
69-
-DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
69+
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
7070
-DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \
7171
-DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
7272
-DEXECUTORCH_BUILD_XNNPACK=ON"
@@ -81,7 +81,7 @@ cmake_build_llava_runner() {
8181
-B${BUILD_DIR}/${dir} \
8282
${dir}
8383

84-
cmake --build ${BUILD_DIR}/${dir} -j${NPROC} --config ${BUILD_TYPE}
84+
cmake --build ${BUILD_DIR}/${dir} -j${NPROC} --config ${CMAKE_BUILD_TYPE}
8585
}
8686

8787

@@ -98,7 +98,7 @@ cmake_build_llava_runner_for_android() {
9898
-B${BUILD_DIR}/${dir} \
9999
${dir}
100100

101-
cmake --build ${BUILD_DIR}/${dir} -j${NPROC} --config ${BUILD_TYPE}
101+
cmake --build ${BUILD_DIR}/${dir} -j${NPROC} --config ${CMAKE_BUILD_TYPE}
102102
}
103103

104104
# only export the one without custom op for now since it's

.github/workflows/trunk.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -290,7 +290,7 @@ jobs:
290290
# ${CONDA_RUN} python -m unittest examples.models.llava.test.test_llava
291291

292292
# # run e2e (export, tokenizer and runner)
293-
# PYTHON_EXECUTABLE=python ${CONDA_RUN} bash .ci/scripts/test_llava.sh Release
293+
# PYTHON_EXECUTABLE=python ${CONDA_RUN} bash .ci/scripts/test_llava.sh
294294

295295
test-qnn-model:
296296
name: test-qnn-model

backends/xnnpack/third-party/XNNPACK

Submodule XNNPACK updated 2740 files

backends/xnnpack/third-party/xnnpack.buck.bzl

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def define_xnnpack():
4242
"XNNPACK/src/mutex.c",
4343
"XNNPACK/src/normalization.c",
4444
"XNNPACK/src/operator-utils.c",
45-
"XNNPACK/src/packing.cc",
45+
"XNNPACK/src/reference/packing.cc",
4646
],
4747
headers = get_xnnpack_headers(),
4848
header_namespace = "",
@@ -67,7 +67,7 @@ def define_xnnpack():
6767
# @lint-ignore BUCKLINT: native and fb_native are explicitly forbidden in fbcode.
6868
native.cxx_library(
6969
name = "subgraph",
70-
srcs = SUBGRAPH_SRCS,
70+
srcs = SUBGRAPH_SRCS + ["XNNPACK/src/datatype.c"],
7171
compiler_flags = [
7272
"-Wno-error=missing-braces", # required since the SGX toolchain does not have this by default
7373
],
@@ -1076,6 +1076,8 @@ def define_xnnpack():
10761076
"XNNPACK/src/configs/hardware-config.c",
10771077
"XNNPACK/src/microparams-init.c",
10781078
"XNNPACK/src/microkernel-utils.c",
1079+
"XNNPACK/src/reference/binary-elementwise.cc",
1080+
"XNNPACK/src/reference/unary-elementwise.cc",
10791081
],
10801082
headers = get_xnnpack_headers(),
10811083
exported_headers = {

backends/xnnpack/third-party/xnnpack_src_defs.bzl

Lines changed: 1 addition & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -17,24 +17,14 @@ def prod_srcs_for_arch_wrapper(arch):
1717
return define_xnnpack_build_src(prod_srcs)
1818

1919
def get_xnnpack_headers():
20-
# XNNPACK Headers in the path containing xnnpack/ or configs/
21-
# do not contain the src/ path. However headers not in xnnpack/ or
22-
# configs/ are prepend with the src/ path. This function helps us
23-
# to correctly parse all the header files to the correct name
2420
src_headers = subdir_glob([
2521
("XNNPACK/src", "**/*.h"),
2622
])
27-
fixed_headers = {}
28-
for k, v in src_headers.items():
29-
new_key = k
30-
if not k.startswith("xnnpack") and not k.startswith("configs"):
31-
new_key = "src/{}".format(k)
32-
fixed_headers[new_key] = v
3323
include_headers = subdir_glob([
3424
("XNNPACK/include", "*.h"),
3525
])
3626

37-
return fixed_headers | include_headers
27+
return src_headers | include_headers
3828

3929
OPERATOR_SRCS = define_xnnpack_build_src(_OPERATOR_SRCS)
4030
SUBGRAPH_SRCS = define_xnnpack_build_src(_SUBGRAPH_SRCS)

0 commit comments

Comments
 (0)