|
| 1 | +/* |
| 2 | + * Copyright (c) Meta Platforms, Inc. and affiliates. |
| 3 | + * All rights reserved. |
| 4 | + * |
| 5 | + * This source code is licensed under the BSD-style license found in the |
| 6 | + * LICENSE file in the root directory of this source tree. |
| 7 | + */ |
| 8 | + |
| 9 | +/** |
| 10 | + * @file |
| 11 | + * |
| 12 | + * This is a simple executor_runner that boots up the DSP, configures the serial |
| 13 | + * port, sends a bunch of test messages to the M33 core and then loads the model |
| 14 | + * defined in model_pte.h. It runs this model using the ops available in |
| 15 | + * xtensa/ops directory. |
| 16 | + */ |
| 17 | + |
| 18 | +#include <fsl_debug_console.h> |
| 19 | +#include "fsl_device_registers.h" |
| 20 | +#include "fsl_mu.h" |
| 21 | + |
| 22 | +#include "board_hifi4.h" |
| 23 | +#include "model_pte.h" |
| 24 | +#include "pin_mux.h" |
| 25 | + |
| 26 | +#include <memory> |
| 27 | +#include <vector> |
| 28 | + |
| 29 | +#include <executorch/extension/data_loader/buffer_data_loader.h> |
| 30 | +#include <executorch/runtime/executor/method.h> |
| 31 | +#include <executorch/runtime/executor/program.h> |
| 32 | +#include <executorch/runtime/platform/log.h> |
| 33 | +#include <executorch/runtime/platform/profiler.h> |
| 34 | +#include <executorch/runtime/platform/runtime.h> |
| 35 | +#include <executorch/util/util.h> |
| 36 | + |
| 37 | +static uint8_t method_allocator_pool[18 * 1024U]; // 4 MB |
| 38 | + |
| 39 | +using namespace torch::executor; |
| 40 | +#include <xtensa/config/core.h> |
| 41 | + |
| 42 | +#define APP_MU MUB |
| 43 | +/* Flag indicates Core Boot Up*/ |
| 44 | +#define BOOT_FLAG 0x01U |
| 45 | +/* Channel transmit and receive register */ |
| 46 | +#define CHN_MU_REG_NUM 0U |
| 47 | +/* How many message is used to test message sending */ |
| 48 | +#define MSG_LENGTH 32U |
| 49 | + |
| 50 | +using torch::executor::Error; |
| 51 | +using torch::executor::Result; |
| 52 | + |
| 53 | +void LED_INIT(); |
| 54 | +void LED_TOGGLE(); |
| 55 | + |
| 56 | +void LED_INIT() { |
| 57 | + CLOCK_EnableClock(kCLOCK_HsGpio0); |
| 58 | + RESET_PeripheralReset(kHSGPIO0_RST_SHIFT_RSTn); |
| 59 | + gpio_pin_config_t pin_config = {kGPIO_DigitalOutput, LOGIC_LED_OFF}; |
| 60 | + GPIO_PinInit( |
| 61 | + BOARD_LED_RED_GPIO, |
| 62 | + BOARD_LED_RED_GPIO_PORT, |
| 63 | + BOARD_LED_RED_GPIO_PIN, |
| 64 | + &pin_config); |
| 65 | +} |
| 66 | + |
| 67 | +void LED_TOGGLE() { |
| 68 | + LED_RED_TOGGLE(); |
| 69 | +} |
| 70 | + |
| 71 | +/*! |
| 72 | + * @brief Function to create delay for Led blink. |
| 73 | + */ |
| 74 | +void delay(void) { |
| 75 | + volatile uint32_t i = 0; |
| 76 | + for (i = 0; i < 5000000; ++i) { |
| 77 | + __NOP(); |
| 78 | + } |
| 79 | +} |
| 80 | + |
| 81 | +void et_pal_emit_log_message( |
| 82 | + et_timestamp_t timestamp, |
| 83 | + et_pal_log_level_t level, |
| 84 | + const char* filename, |
| 85 | + __ET_UNUSED const char* function, |
| 86 | + size_t line, |
| 87 | + const char* message, |
| 88 | + __ET_UNUSED size_t length) { |
| 89 | + PRINTF("\r%s\n", message); |
| 90 | +} |
| 91 | + |
| 92 | +int main(int argc, char** argv) { |
| 93 | + /* Init board hardware. */ |
| 94 | + BOARD_InitBootPins(); |
| 95 | + |
| 96 | + /* Initialize LED */ |
| 97 | + LED_INIT(); |
| 98 | + |
| 99 | + /* MUB init */ |
| 100 | + MU_Init(APP_MU); |
| 101 | + |
| 102 | + /* Send flag to Core 0 to indicate Core 1 has startup */ |
| 103 | + MU_SetFlags(APP_MU, BOOT_FLAG); |
| 104 | + |
| 105 | + BOARD_InitDebugConsole(); |
| 106 | + ET_LOG(Info, "Booted up in DSP."); |
| 107 | + |
| 108 | + torch::executor::runtime_init(); |
| 109 | + |
| 110 | + auto loader = |
| 111 | + torch::executor::util::BufferDataLoader(model_pte, sizeof(model_pte)); |
| 112 | + |
| 113 | + Result<torch::executor::Program> program = |
| 114 | + torch::executor::Program::load(&loader); |
| 115 | + if (!program.ok()) { |
| 116 | + ET_LOG( |
| 117 | + Error, |
| 118 | + "ET: Program loading failed @ 0x%p: 0x%" PRIx32, |
| 119 | + model_pte, |
| 120 | + program.error()); |
| 121 | + } |
| 122 | + |
| 123 | + ET_LOG( |
| 124 | + Info, |
| 125 | + "AET: Model buffer loaded, has %u methods", |
| 126 | + program->num_methods()); |
| 127 | + |
| 128 | + const char* method_name = nullptr; |
| 129 | + { |
| 130 | + const auto method_name_result = program->get_method_name(0); |
| 131 | + ET_CHECK_MSG(method_name_result.ok(), "Program has no methods"); |
| 132 | + method_name = *method_name_result; |
| 133 | + } |
| 134 | + ET_LOG(Info, "ET: Running method %s", method_name); |
| 135 | + |
| 136 | + Result<torch::executor::MethodMeta> method_meta = |
| 137 | + program->method_meta(method_name); |
| 138 | + if (!method_meta.ok()) { |
| 139 | + ET_LOG( |
| 140 | + Error, |
| 141 | + "ET: Failed to get method_meta for %s: 0x%x", |
| 142 | + method_name, |
| 143 | + (unsigned int)method_meta.error()); |
| 144 | + } |
| 145 | + |
| 146 | + torch::executor::MemoryAllocator method_allocator{ |
| 147 | + torch::executor::MemoryAllocator( |
| 148 | + sizeof(method_allocator_pool), method_allocator_pool)}; |
| 149 | + |
| 150 | + std::vector<std::unique_ptr<uint8_t[]>> planned_buffers; // Owns the memory |
| 151 | + std::vector<torch::executor::Span<uint8_t>> |
| 152 | + planned_spans; // Passed to the allocator |
| 153 | + size_t num_memory_planned_buffers = method_meta->num_memory_planned_buffers(); |
| 154 | + |
| 155 | + for (size_t id = 0; id < num_memory_planned_buffers; ++id) { |
| 156 | + size_t buffer_size = |
| 157 | + static_cast<size_t>(method_meta->memory_planned_buffer_size(id).get()); |
| 158 | + ET_LOG( |
| 159 | + Info, |
| 160 | + "ET: Setting up planned buffer %zu, size %zu.", |
| 161 | + id, |
| 162 | + buffer_size); |
| 163 | + |
| 164 | + planned_buffers.push_back(std::make_unique<uint8_t[]>(buffer_size)); |
| 165 | + planned_spans.push_back({planned_buffers.back().get(), buffer_size}); |
| 166 | + } |
| 167 | + |
| 168 | + torch::executor::HierarchicalAllocator planned_memory( |
| 169 | + {planned_spans.data(), planned_spans.size()}); |
| 170 | + |
| 171 | + torch::executor::MemoryManager memory_manager( |
| 172 | + &method_allocator, &planned_memory); |
| 173 | + |
| 174 | + Result<torch::executor::Method> method = |
| 175 | + program->load_method(method_name, &memory_manager); |
| 176 | + if (!method.ok()) { |
| 177 | + ET_LOG( |
| 178 | + Error, |
| 179 | + "Loading of method %s failed with status 0x%" PRIx32, |
| 180 | + method_name, |
| 181 | + method.error()); |
| 182 | + } |
| 183 | + |
| 184 | + ET_LOG(Info, "Method loaded."); |
| 185 | + torch::executor::util::PrepareInputTensors(*method); |
| 186 | + ET_LOG(Info, "Starting the model execution..."); |
| 187 | + |
| 188 | + Error status = method->execute(); |
| 189 | + ET_LOG(Info, "Executed model"); |
| 190 | + if (status != Error::Ok) { |
| 191 | + ET_LOG( |
| 192 | + Error, |
| 193 | + "Execution of method %s failed with status 0x%" PRIx32, |
| 194 | + method_name, |
| 195 | + status); |
| 196 | + } else { |
| 197 | + ET_LOG(Info, "Model executed successfully."); |
| 198 | + } |
| 199 | + |
| 200 | + while (1) { |
| 201 | + delay(); |
| 202 | + LED_TOGGLE(); |
| 203 | + } |
| 204 | + |
| 205 | + return 0; |
| 206 | +} |
0 commit comments