Skip to content

[executorch] Program::Load -> Program::load for all of //executorch #385

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ int main(int argc, char** argv) {

// Parse the program file. This is immutable, and can also be reused
// between multiple execution invocations across multiple threads.
Result<Program> program = Program::Load(&buffer_data_loader);
Result<Program> program = Program::load(&buffer_data_loader);
if (!program.ok()) {
ET_LOG(Error, "Failed to parse model file %s", model_path);
return 1;
Expand Down
2 changes: 1 addition & 1 deletion examples/executor_runner/executor_runner.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ int main(int argc, char** argv) {

// Parse the program file. This is immutable, and can also be reused between
// multiple execution invocations across multiple threads.
Result<Program> program = Program::Load(&loader.get());
Result<Program> program = Program::load(&loader.get());
if (!program.ok()) {
ET_LOG(Error, "Failed to parse model file %s", model_path);
return 1;
Expand Down
2 changes: 1 addition & 1 deletion exir/backend/test/demos/rpc/ExecutorBackend.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ class ExecutorBackend final : public PyTorchBackendInterface {
// Can't free `processed` because the program will point into that memory.

// Try loading the program.
Result<Program> program_result = Program::Load(loader);
Result<Program> program_result = Program::load(loader);
if (!program_result.ok()) {
return program_result.error();
}
Expand Down
2 changes: 1 addition & 1 deletion extension/pybindings/pybindings.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ class Module final {
explicit Module(std::unique_ptr<DataLoader> loader)
: loader_(std::move(loader)) {
runtime_init();
Result<Program> program = Program::Load(
Result<Program> program = Program::load(
loader_.get(), Program::Verification::InternalConsistency);
THROW_IF_ERROR(
program.error(),
Expand Down
2 changes: 1 addition & 1 deletion runtime/executor/test/allocation_failure_stress_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ class AllocationFailureStressTest : public ::testing::Test {
loader_ = std::make_unique<FileDataLoader>(std::move(loader.get()));

// Use it to load the program.
Result<Program> program = Program::Load(
Result<Program> program = Program::load(
loader_.get(), Program::Verification::InternalConsistency);
ASSERT_EQ(program.error(), Error::Ok);
program_ = std::make_unique<Program>(std::move(program.get()));
Expand Down
8 changes: 4 additions & 4 deletions runtime/executor/test/backend_integration_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@ TEST_P(BackendIntegrationTest, BasicInitSucceeds) {
Result<FileDataLoader> loader = FileDataLoader::From(program_path());
ASSERT_EQ(loader.error(), Error::Ok);

Result<Program> program = Program::Load(&loader.get());
Result<Program> program = Program::load(&loader.get());
ASSERT_EQ(program.error(), Error::Ok);

ManagedMemoryManager mmm(kDefaultNonConstMemBytes, kDefaultRuntimeMemBytes);
Expand Down Expand Up @@ -326,7 +326,7 @@ TEST_P(BackendIntegrationTest, FreeingProcessedBufferSucceeds) {
DataLoaderSpy spy_loader(&loader.get());

// Load the program.
Result<Program> program = Program::Load(&spy_loader);
Result<Program> program = Program::load(&spy_loader);
ASSERT_EQ(program.error(), Error::Ok);
ManagedMemoryManager mmm(kDefaultNonConstMemBytes, kDefaultRuntimeMemBytes);
Result<Method> method_res = program->load_method("forward", &mmm.get());
Expand Down Expand Up @@ -390,7 +390,7 @@ TEST_P(BackendIntegrationTest, EndToEndTestWithProcessedAsHandle) {
DataLoaderSpy spy_loader(&loader.get());

// Load the program.
Result<Program> program = Program::Load(&spy_loader);
Result<Program> program = Program::load(&spy_loader);
ASSERT_EQ(program.error(), Error::Ok);

// Hold onto the address of the processed buffer so we can compare against
Expand Down Expand Up @@ -538,7 +538,7 @@ TEST_P(DelegateDataAlignmentTest, ExpectedDataAlignment) {
DataLoaderSpy spy_loader(&loader.get());

// Load the program.
Result<Program> program = Program::Load(&spy_loader);
Result<Program> program = Program::load(&spy_loader);
ASSERT_EQ(program.error(), Error::Ok);
ManagedMemoryManager mmm(kDefaultNonConstMemBytes, kDefaultRuntimeMemBytes);
Result<Method> method = program->load_method("forward", &mmm.get());
Expand Down
2 changes: 1 addition & 1 deletion runtime/executor/test/execution_plan_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ class ExecutionPlanTest : public ::testing::Test {
loader_ = std::make_unique<FileDataLoader>(std::move(loader.get()));

// Use it to load the program.
Result<Program> program = Program::Load(
Result<Program> program = Program::load(
loader_.get(), Program::Verification::InternalConsistency);
ASSERT_EQ(program.error(), Error::Ok);
program_ = std::make_unique<Program>(std::move(program.get()));
Expand Down
2 changes: 1 addition & 1 deletion runtime/executor/test/kernel_integration_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ class KernelIntegrationTest : public ::testing::Test {
loader_ = std::make_unique<FileDataLoader>(std::move(loader.get()));

// Use it to load the program.
Result<Program> program = Program::Load(
Result<Program> program = Program::load(
loader_.get(), Program::Verification::InternalConsistency);
ASSERT_EQ(program.error(), Error::Ok);
program_ = std::make_unique<Program>(std::move(program.get()));
Expand Down
2 changes: 1 addition & 1 deletion runtime/executor/test/kernel_resolution_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ class KernelResolutionTest : public ::testing::Test {
loader_ = std::make_unique<FileDataLoader>(std::move(loader.get()));

// Use it to load the program.
Result<Program> program = Program::Load(
Result<Program> program = Program::load(
loader_.get(), Program::Verification::InternalConsistency);
ASSERT_EQ(program.error(), Error::Ok);
program_ = std::make_unique<Program>(std::move(program.get()));
Expand Down
2 changes: 1 addition & 1 deletion runtime/executor/test/method_meta_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class MethodMetaTest : public ::testing::Test {
loader_ = std::make_unique<FileDataLoader>(std::move(loader.get()));

// Use it to load the program.
Result<Program> program = Program::Load(
Result<Program> program = Program::load(
loader_.get(), Program::Verification::InternalConsistency);
ASSERT_EQ(program.error(), Error::Ok);
program_ = std::make_unique<Program>(std::move(program.get()));
Expand Down
2 changes: 1 addition & 1 deletion runtime/executor/test/method_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ class MethodTest : public ::testing::Test {
std::make_unique<FileDataLoader>(std::move(loader.get()))});

// Use it to load the program.
Result<Program> program = Program::Load(
Result<Program> program = Program::load(
loaders_[module_name].get(),
Program::Verification::InternalConsistency);
ASSERT_EQ(program.error(), Error::Ok);
Expand Down
2 changes: 1 addition & 1 deletion sdk/runners/executor_runner.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ int main(int argc, char** argv) {
// multiple execution invocations across multiple threads.
uint32_t prof_tok = EXECUTORCH_BEGIN_PROF("de-serialize model");
Result<Program> program =
torch::executor::Program::Load(program_data.program_loader());
torch::executor::Program::load(program_data.program_loader());
EXECUTORCH_END_PROF(prof_tok);
if (!program.ok()) {
ET_LOG(Error, "Failed to parse model file %s", FLAGS_model_path.c_str());
Expand Down
2 changes: 1 addition & 1 deletion test/multi_runner.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ class PreparedModel final {

private:
static Program load_program_or_die(DataLoader& loader) {
Result<Program> program = Program::Load(&loader);
Result<Program> program = Program::load(&loader);
ET_CHECK(program.ok());
return std::move(program.get());
}
Expand Down
2 changes: 1 addition & 1 deletion test/relocatable_runner.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ Program* load_program(
new (loader) util::BufferDataLoader(file_data, file_data_len);

// Load the program.
Result<Program> program_result = Program::Load(loader);
Result<Program> program_result = Program::load(loader);
ET_CHECK(program_result.ok());

// Move the Program into worker memory.
Expand Down
4 changes: 2 additions & 2 deletions test/size_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -57,10 +57,10 @@ int main(int argc, char** argv) {
loader.ok(), "FileDataLoader::From() failed: 0x%" PRIx32, loader.error());

uint32_t prof_tok = EXECUTORCH_BEGIN_PROF("de-serialize model");
const auto program = Program::Load(&loader.get());
const auto program = Program::load(&loader.get());
EXECUTORCH_END_PROF(prof_tok);
ET_CHECK_MSG(
program.ok(), "Program::Load() failed: 0x%" PRIx32, program.error());
program.ok(), "Program::load() failed: 0x%" PRIx32, program.error());
ET_LOG(Info, "Program file %s loaded.", argv[1]);

// Use the first method in the program.
Expand Down