Skip to content

Commit 8155d42

Browse files
shoumikhinfacebook-github-bot
authored andcommitted
Refactor namespace usage in module tests. (#5172)
Summary: Pull Request resolved: #5172 . Differential Revision: D62366679
1 parent 99fbca3 commit 8155d42

File tree

1 file changed

+56
-52
lines changed

1 file changed

+56
-52
lines changed

extension/module/test/module_test.cpp

Lines changed: 56 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,8 @@
1515

1616
#include <executorch/extension/data_loader/file_data_loader.h>
1717

18-
using namespace ::testing;
19-
20-
namespace torch::executor {
18+
using namespace ::executorch::extension;
19+
using namespace ::executorch::runtime;
2120

2221
class ModuleTest : public ::testing::Test {
2322
protected:
@@ -102,13 +101,13 @@ TEST_F(ModuleTest, TestMethodMeta) {
102101

103102
const auto input_meta = meta->input_tensor_meta(0);
104103
EXPECT_TRUE(input_meta.ok());
105-
EXPECT_EQ(input_meta->scalar_type(), ScalarType::Float);
104+
EXPECT_EQ(input_meta->scalar_type(), exec_aten::ScalarType::Float);
106105
EXPECT_EQ(input_meta->sizes().size(), 1);
107106
EXPECT_EQ(input_meta->sizes()[0], 1);
108107

109108
const auto output_meta = meta->output_tensor_meta(0);
110109
EXPECT_TRUE(output_meta.ok());
111-
EXPECT_EQ(output_meta->scalar_type(), ScalarType::Float);
110+
EXPECT_EQ(output_meta->scalar_type(), exec_aten::ScalarType::Float);
112111
EXPECT_EQ(output_meta->sizes().size(), 1);
113112
EXPECT_EQ(output_meta->sizes()[0], 1);
114113
}
@@ -125,11 +124,11 @@ TEST_F(ModuleTest, TestExecute) {
125124

126125
std::array<float, 1> input{1};
127126
std::array<int32_t, 1> sizes{1};
128-
TensorImpl tensor(
129-
ScalarType::Float, sizes.size(), sizes.data(), input.data());
127+
exec_aten::TensorImpl tensor(
128+
exec_aten::ScalarType::Float, sizes.size(), sizes.data(), input.data());
130129

131-
const auto result =
132-
module.execute("forward", {Tensor(&tensor), Tensor(&tensor)});
130+
const auto result = module.execute(
131+
"forward", {exec_aten::Tensor(&tensor), exec_aten::Tensor(&tensor)});
133132
EXPECT_TRUE(result.ok());
134133

135134
EXPECT_TRUE(result.ok());
@@ -149,11 +148,11 @@ TEST_F(ModuleTest, TestExecutePreload) {
149148

150149
std::array<float, 1> input{1};
151150
std::array<int32_t, 1> sizes{1};
152-
TensorImpl tensor(
153-
ScalarType::Float, sizes.size(), sizes.data(), input.data());
151+
exec_aten::TensorImpl tensor(
152+
exec_aten::ScalarType::Float, sizes.size(), sizes.data(), input.data());
154153

155-
const auto result =
156-
module.execute("forward", {Tensor(&tensor), Tensor(&tensor)});
154+
const auto result = module.execute(
155+
"forward", {exec_aten::Tensor(&tensor), exec_aten::Tensor(&tensor)});
157156
EXPECT_TRUE(result.ok());
158157

159158
const auto data = result->at(0).toTensor().const_data_ptr<float>();
@@ -169,11 +168,11 @@ TEST_F(ModuleTest, TestExecutePreload_method) {
169168

170169
std::array<float, 1> input{1};
171170
std::array<int32_t, 1> sizes{1};
172-
TensorImpl tensor(
173-
ScalarType::Float, sizes.size(), sizes.data(), input.data());
171+
exec_aten::TensorImpl tensor(
172+
exec_aten::ScalarType::Float, sizes.size(), sizes.data(), input.data());
174173

175-
const auto result =
176-
module.execute("forward", {Tensor(&tensor), Tensor(&tensor)});
174+
const auto result = module.execute(
175+
"forward", {exec_aten::Tensor(&tensor), exec_aten::Tensor(&tensor)});
177176
EXPECT_TRUE(result.ok());
178177

179178
const auto data = result->at(0).toTensor().const_data_ptr<float>();
@@ -192,11 +191,11 @@ TEST_F(ModuleTest, TestExecutePreloadProgramAndMethod) {
192191

193192
std::array<float, 1> input{1};
194193
std::array<int32_t, 1> sizes{1};
195-
TensorImpl tensor(
196-
ScalarType::Float, sizes.size(), sizes.data(), input.data());
194+
exec_aten::TensorImpl tensor(
195+
exec_aten::ScalarType::Float, sizes.size(), sizes.data(), input.data());
197196

198-
const auto result =
199-
module.execute("forward", {Tensor(&tensor), Tensor(&tensor)});
197+
const auto result = module.execute(
198+
"forward", {exec_aten::Tensor(&tensor), exec_aten::Tensor(&tensor)});
200199
EXPECT_TRUE(result.ok());
201200

202201
const auto data = result->at(0).toTensor().const_data_ptr<float>();
@@ -225,10 +224,11 @@ TEST_F(ModuleTest, TestGet) {
225224

226225
std::array<float, 1> input{1};
227226
std::array<int32_t, 1> sizes{1};
228-
TensorImpl tensor(
229-
ScalarType::Float, sizes.size(), sizes.data(), input.data());
227+
exec_aten::TensorImpl tensor(
228+
exec_aten::ScalarType::Float, sizes.size(), sizes.data(), input.data());
230229

231-
const auto result = module.get("forward", {Tensor(&tensor), Tensor(&tensor)});
230+
const auto result = module.get(
231+
"forward", {exec_aten::Tensor(&tensor), exec_aten::Tensor(&tensor)});
232232

233233
EXPECT_TRUE(result.ok());
234234
const auto data = result->toTensor().const_data_ptr<float>();
@@ -240,20 +240,22 @@ TEST_F(ModuleTest, TestForward) {
240240

241241
std::array<float, 1> input{1};
242242
std::array<int32_t, 1> sizes{1};
243-
TensorImpl tensor(
244-
ScalarType::Float, sizes.size(), sizes.data(), input.data());
243+
exec_aten::TensorImpl tensor(
244+
exec_aten::ScalarType::Float, sizes.size(), sizes.data(), input.data());
245245

246-
const auto result = module->forward({Tensor(&tensor), Tensor(&tensor)});
246+
const auto result =
247+
module->forward({exec_aten::Tensor(&tensor), exec_aten::Tensor(&tensor)});
247248
EXPECT_TRUE(result.ok());
248249

249250
const auto data = result->at(0).toTensor().const_data_ptr<float>();
250251

251252
EXPECT_NEAR(data[0], 2, 1e-5);
252253

253254
std::array<float, 2> input2{2, 3};
254-
TensorImpl tensor2(
255-
ScalarType::Float, sizes.size(), sizes.data(), input2.data());
256-
const auto result2 = module->forward({Tensor(&tensor2), Tensor(&tensor2)});
255+
exec_aten::TensorImpl tensor2(
256+
exec_aten::ScalarType::Float, sizes.size(), sizes.data(), input2.data());
257+
const auto result2 = module->forward(
258+
{exec_aten::Tensor(&tensor2), exec_aten::Tensor(&tensor2)});
257259
EXPECT_TRUE(result2.ok());
258260

259261
const auto data2 = result->at(0).toTensor().const_data_ptr<float>();
@@ -298,10 +300,9 @@ TEST_F(ModuleTest, TestProgramSharingBetweenModules) {
298300
}
299301

300302
TEST_F(ModuleTest, TestProgramSharingAndDataLoaderManagement) {
301-
auto loader = util::FileDataLoader::from(model_path_.c_str());
303+
auto loader = FileDataLoader::from(model_path_.c_str());
302304
EXPECT_TRUE(loader.ok());
303-
auto data_loader =
304-
std::make_unique<util::FileDataLoader>(std::move(loader.get()));
305+
auto data_loader = std::make_unique<FileDataLoader>(std::move(loader.get()));
305306

306307
auto module1 = std::make_unique<Module>(std::move(data_loader));
307308

@@ -311,35 +312,35 @@ TEST_F(ModuleTest, TestProgramSharingAndDataLoaderManagement) {
311312

312313
std::array<float, 1> input{1};
313314
std::array<int32_t, 1> sizes{1};
314-
TensorImpl tensor(
315-
ScalarType::Float, sizes.size(), sizes.data(), input.data());
315+
exec_aten::TensorImpl tensor(
316+
exec_aten::ScalarType::Float, sizes.size(), sizes.data(), input.data());
316317

317-
auto result1 =
318-
module1->execute("forward", {Tensor(&tensor), Tensor(&tensor)});
318+
auto result1 = module1->execute(
319+
"forward", {exec_aten::Tensor(&tensor), exec_aten::Tensor(&tensor)});
319320
EXPECT_TRUE(result1.ok());
320321

321322
auto module2 = std::make_unique<Module>(module1->program());
322323

323-
auto result2 =
324-
module2->execute("forward", {Tensor(&tensor), Tensor(&tensor)});
324+
auto result2 = module2->execute(
325+
"forward", {exec_aten::Tensor(&tensor), exec_aten::Tensor(&tensor)});
325326
EXPECT_TRUE(result2.ok());
326327

327328
module1 = std::make_unique<Module>("/path/to/nonexistent/file.pte");
328329
EXPECT_FALSE(module1->is_loaded());
329330

330-
auto result3 =
331-
module2->execute("forward", {Tensor(&tensor), Tensor(&tensor)});
331+
auto result3 = module2->execute(
332+
"forward", {exec_aten::Tensor(&tensor), exec_aten::Tensor(&tensor)});
332333
EXPECT_TRUE(result3.ok());
333334
}
334335

335336
TEST_F(ModuleTest, TestProgramPersistenceAndReuseAfterModuleDestruction) {
336337
std::shared_ptr<Program> shared_program;
337338

338339
{
339-
auto loader = util::FileDataLoader::from(model_path_.c_str());
340+
auto loader = FileDataLoader::from(model_path_.c_str());
340341
EXPECT_TRUE(loader.ok());
341342
auto data_loader =
342-
std::make_unique<util::FileDataLoader>(std::move(loader.get()));
343+
std::make_unique<FileDataLoader>(std::move(loader.get()));
343344
auto* data_loader_ptr = data_loader.get();
344345

345346
Module module(std::move(data_loader));
@@ -362,10 +363,11 @@ TEST_F(ModuleTest, TestProgramPersistenceAndReuseAfterModuleDestruction) {
362363

363364
std::array<float, 1> input{1};
364365
std::array<int32_t, 1> sizes{1};
365-
TensorImpl tensor(
366-
ScalarType::Float, sizes.size(), sizes.data(), input.data());
366+
exec_aten::TensorImpl tensor(
367+
exec_aten::ScalarType::Float, sizes.size(), sizes.data(), input.data());
367368

368-
auto result = module.execute("forward", {Tensor(&tensor), Tensor(&tensor)});
369+
auto result = module.execute(
370+
"forward", {exec_aten::Tensor(&tensor), exec_aten::Tensor(&tensor)});
369371
EXPECT_TRUE(result.ok());
370372

371373
auto data = result->at(0).toTensor().const_data_ptr<float>();
@@ -391,10 +393,14 @@ TEST_F(ModuleTest, TestConcurrentExecutionWithSharedProgram) {
391393
const std::array<float, 1>& input) {
392394
Module module(program);
393395
std::array<int32_t, 1> sizes{1};
394-
TensorImpl tensor(
395-
ScalarType::Float, sizes.size(), sizes.data(), (void*)input.data());
396-
397-
const auto result = module.forward({Tensor(&tensor), Tensor(&tensor)});
396+
exec_aten::TensorImpl tensor(
397+
exec_aten::ScalarType::Float,
398+
sizes.size(),
399+
sizes.data(),
400+
(void*)input.data());
401+
402+
const auto result = module.forward(
403+
{exec_aten::Tensor(&tensor), exec_aten::Tensor(&tensor)});
398404
EXPECT_TRUE(result.ok());
399405

400406
const auto data = result->at(0).toTensor().const_data_ptr<float>();
@@ -413,5 +419,3 @@ TEST_F(ModuleTest, TestConcurrentExecutionWithSharedProgram) {
413419
t4.join();
414420
t5.join();
415421
}
416-
417-
} // namespace torch::executor

0 commit comments

Comments
 (0)