Skip to content

Commit 187bd05

Browse files
authored
Remove underscore prefixes from compiler.h macros
Differential Revision: D61346624 Pull Request resolved: #4737
1 parent d3da92d commit 187bd05

File tree

91 files changed

+334
-317
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

91 files changed

+334
-317
lines changed

backends/apple/coreml/runtime/test/CoreMLBackendDelegateTests.mm

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
{}
3333

3434
Result<FreeableBuffer> load(
35-
size_t offset, size_t size, __ET_UNUSED const DataLoader::SegmentInfo& segment_info) const override {
35+
size_t offset, size_t size, ET_UNUSED const DataLoader::SegmentInfo& segment_info) const override {
3636
NSData *subdata = [data_ subdataWithRange:NSMakeRange(offset, size)];
3737
return FreeableBuffer(subdata.bytes, size, nullptr);
3838
}

backends/apple/mps/runtime/MPSBackend.mm

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ bool is_available() const override {
5555

5656
// Function that actually executes the model in the backend.
5757
Error execute(
58-
__ET_UNUSED BackendExecutionContext& context,
58+
ET_UNUSED BackendExecutionContext& context,
5959
DelegateHandle* handle,
6060
EValue** args) const override {
6161
auto executor = static_cast<mps::delegate::MPSExecutor*>(handle);

backends/apple/mps/runtime/MPSCompiler.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ class MPSCompiler {
2424
// Takes Flatbuffer Serialized MPS Model and rebuilds the MPSGraphExecutable
2525
// returns an executor object that holds the MPS runtime object which we
2626
// can then use to set inputs and run inference using the MPSGraphExecutable.
27-
__ET_NODISCARD static Error compileModel(
27+
ET_NODISCARD static Error compileModel(
2828
const void* buffer_pointer,
2929
size_t num_bytes,
3030
MPSExecutor* executor,

backends/apple/mps/runtime/MPSCompiler.mm

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
Builds the mps runtime object using the buffer pointer. The buffer pointer
3333
must be a valid pointer to the serialized mps object.
3434
*/
35-
__ET_NODISCARD Error MPSCompiler::compileModel(
35+
ET_NODISCARD Error MPSCompiler::compileModel(
3636
const void* buffer_pointer,
3737
size_t num_bytes,
3838
MPSExecutor* executor,

backends/apple/mps/runtime/MPSExecutor.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,9 +73,9 @@ class MPSExecutor {
7373
return _executable;
7474
}
7575

76-
__ET_NODISCARD Error forward(std::vector<const Tensor*>& outputs);
76+
ET_NODISCARD Error forward(std::vector<const Tensor*>& outputs);
7777

78-
__ET_NODISCARD Error
78+
ET_NODISCARD Error
7979
set_inputs_outputs(std::vector<const Tensor*>& inputs, std::vector<const Tensor*>& outputs);
8080

8181
Error initDataBuffers();

backends/apple/mps/runtime/MPSExecutor.mm

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ @interface MPSGraphExecutable()
3737
_outputsArray = [[NSMutableArray<MPSGraphTensorData *> alloc] initWithCapacity:getNumOutputs()];
3838
}
3939

40-
__ET_NODISCARD Error
40+
ET_NODISCARD Error
4141
MPSExecutor::set_inputs_outputs(std::vector<const Tensor*>& inputs, std::vector<const Tensor*>& outputs) {
4242
ET_CHECK_OR_RETURN_ERROR(inputs.size() == getNumInputs(), Internal, "Inputs mismatch");
4343
ET_CHECK_OR_RETURN_ERROR(outputs.size() == getNumOutputs(), Internal, "Outputs mismatch");
@@ -61,7 +61,7 @@ @interface MPSGraphExecutable()
6161
return Error::Ok;
6262
}
6363

64-
__ET_NODISCARD Error MPSExecutor::forward(std::vector<const Tensor*>& outputs) {
64+
ET_NODISCARD Error MPSExecutor::forward(std::vector<const Tensor*>& outputs) {
6565
Error err = Error::Ok;
6666
MPSStream* mpsStream = getDefaultMPSStream();
6767
if (mpsStream->commitAndContinueEnabled() || mpsStream->hasLiveCommandBuffer()) {

backends/apple/mps/runtime/MPSStream.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ class MPSStream {
6363
MPSCommandBuffer* commandBuffer();
6464
id<MTLComputeCommandEncoder> commandEncoder();
6565
void endKernelCoalescing();
66-
__ET_NODISCARD Error synchronize(SyncType syncType);
66+
ET_NODISCARD Error synchronize(SyncType syncType);
6767
bool commitAndContinueEnabled();
6868
void copy(
6969
id<MTLBuffer> srcBuffer,

backends/apple/mps/runtime/MPSStream.mm

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ @interface MPSGraphExecutionDescriptor ()
5555
return _commandEncoder;
5656
}
5757

58-
__ET_NODISCARD
58+
ET_NODISCARD
5959
Error MPSStream::synchronize(SyncType syncType) {
6060
endKernelCoalescing();
6161
switch(syncType) {
@@ -157,7 +157,7 @@ @interface MPSGraphExecutionDescriptor ()
157157
endKernelCoalescing();
158158
if (@available(iOS 13.0, *)) {
159159
id<MTLBlitCommandEncoder> blitEncoder = [commandBuffer() blitCommandEncoder];
160-
160+
161161
[blitEncoder copyFromBuffer:srcBuffer
162162
sourceOffset:(NSUInteger)srcOffset
163163
toBuffer:dstBuffer

backends/cadence/executor_runner.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -83,10 +83,10 @@ void et_pal_emit_log_message(
8383
et_timestamp_t timestamp,
8484
et_pal_log_level_t level,
8585
const char* filename,
86-
__ET_UNUSED const char* function,
86+
ET_UNUSED const char* function,
8787
size_t line,
8888
const char* message,
89-
__ET_UNUSED size_t length) {
89+
ET_UNUSED size_t length) {
9090
PRINTF("\r%s\n", message);
9191
}
9292

backends/mediatek/runtime/NeuronBackend.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ Result<DelegateHandle*> NeuronBackend::init(
6767
}
6868

6969
Error NeuronBackend::execute(
70-
__ET_UNUSED BackendExecutionContext& context,
70+
ET_UNUSED BackendExecutionContext& context,
7171
DelegateHandle* handle,
7272
EValue** args) const {
7373
NeuronExecuTorchDelegate* delegate =

backends/mediatek/runtime/include/NeuronBackend.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ class NeuronBackend final : public PyTorchBackendInterface {
3434
ArrayRef<CompileSpec> compile_specs) const override;
3535

3636
Error execute(
37-
__ET_UNUSED BackendExecutionContext& context,
37+
ET_UNUSED BackendExecutionContext& context,
3838
DelegateHandle* handle,
3939
EValue** args) const override;
4040

@@ -111,7 +111,7 @@ class NeuronExecuTorchDelegate {
111111
return NEURON_NO_ERROR;
112112
}
113113

114-
Error execute(__ET_UNUSED BackendExecutionContext& context, EValue** args)
114+
Error execute(ET_UNUSED BackendExecutionContext& context, EValue** args)
115115
const;
116116

117117
private:

backends/qualcomm/runtime/QnnExecuTorchBackend.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ class QnnExecuTorchBackend final : public PyTorchBackendInterface {
2424
ArrayRef<CompileSpec> compile_specs) const override;
2525

2626
Error execute(
27-
__ET_UNUSED BackendExecutionContext& context,
27+
ET_UNUSED BackendExecutionContext& context,
2828
DelegateHandle* handle,
2929
EValue** args) const override;
3030

backends/vulkan/runtime/VulkanBackend.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -421,7 +421,7 @@ class VulkanBackend final : public PyTorchBackendInterface {
421421
return true;
422422
}
423423

424-
__ET_NODISCARD Error
424+
ET_NODISCARD Error
425425
compileModel(const void* buffer_pointer, ComputeGraph* compute_graph) const {
426426
Result<VulkanDelegateHeader> header =
427427
VulkanDelegateHeader::parse(buffer_pointer);
@@ -485,7 +485,7 @@ class VulkanBackend final : public PyTorchBackendInterface {
485485
}
486486

487487
Error execute(
488-
__ET_UNUSED BackendExecutionContext& context,
488+
ET_UNUSED BackendExecutionContext& context,
489489
DelegateHandle* handle,
490490
EValue** args) const override {
491491
EXECUTORCH_SCOPE_PROF("VulkanBackend::execute");

backends/xnnpack/runtime/XNNCompiler.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1608,7 +1608,7 @@ Builds the xnnpack runtime object using the buffer pointer. The buffer pointer
16081608
must be a valid pointer to the serialized xnnpack object. It also fills the
16091609
XNNExecutor object with the built xnn_runtime and the input/output ids.
16101610
*/
1611-
__ET_NODISCARD Error XNNCompiler::compileModel(
1611+
ET_NODISCARD Error XNNCompiler::compileModel(
16121612
const void* buffer_pointer,
16131613
size_t num_bytes,
16141614
XNNExecutor* executor,

backends/xnnpack/runtime/XNNCompiler.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ class XNNCompiler {
2525
// Takes Flatbuffer Serialized XNNPACK Model and rebuilds the xnn-subgraph
2626
// returns an executor object that holds the xnn runtime object which we
2727
// can then use to set inputs and run inference using the xnn graph.
28-
__ET_NODISCARD static Error compileModel(
28+
ET_NODISCARD static Error compileModel(
2929
const void* buffer_pointer,
3030
size_t num_bytes,
3131
XNNExecutor* executor,

backends/xnnpack/runtime/XNNExecutor.cpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ using SizesType = exec_aten::SizesType;
2222
* inputs/outputs externals_ is resized to the total number of inputs and
2323
* outputs
2424
*/
25-
__ET_NODISCARD Error XNNExecutor::initialize(
25+
ET_NODISCARD Error XNNExecutor::initialize(
2626
xnn_runtime_t runtime,
2727
std::vector<uint32_t>&& input_ids,
2828
std::vector<uint32_t>&& output_ids) {
@@ -62,7 +62,7 @@ __ET_NODISCARD Error XNNExecutor::initialize(
6262
* runtime correspond to their index in the list of arg passed into
6363
* delegate->execute()
6464
*/
65-
__ET_NODISCARD Error XNNExecutor::prepare_args(EValue** args) {
65+
ET_NODISCARD Error XNNExecutor::prepare_args(EValue** args) {
6666
// Create xnn_externals_value from evalue args
6767
xnn_status status;
6868
for (uint32_t i = 0; i < externals_.size(); ++i) {
@@ -128,7 +128,7 @@ __ET_NODISCARD Error XNNExecutor::prepare_args(EValue** args) {
128128
* We first setup the runtime by feeding the externals_ to runtime setup.
129129
* After which we then execute the runtime through invoke_runtime.
130130
*/
131-
__ET_NODISCARD Error XNNExecutor::forward(BackendExecutionContext& context) {
131+
ET_NODISCARD Error XNNExecutor::forward(BackendExecutionContext& context) {
132132
ET_CHECK_OR_RETURN_ERROR(
133133
runtime_ != nullptr,
134134
Internal,
@@ -180,7 +180,7 @@ __ET_NODISCARD Error XNNExecutor::forward(BackendExecutionContext& context) {
180180
* XNNPACK gives the index tensor to us as int32, we need to convert it
181181
* back to int64 for ExecuTorch.
182182
*/
183-
__ET_NODISCARD Error XNNExecutor::resize_outputs(EValue** args) const {
183+
ET_NODISCARD Error XNNExecutor::resize_outputs(EValue** args) const {
184184
size_t output_idx_start = input_ids_.size();
185185
for (size_t i = output_idx_start; i < externals_.size(); ++i) {
186186
uint32_t ext_id = externals_[i].id;

backends/xnnpack/runtime/XNNExecutor.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ class XNNExecutor {
5151
* The input/output ids are expected to be sorted in order of their
5252
* flatbuffer id_outs
5353
*/
54-
__ET_NODISCARD Error initialize(
54+
ET_NODISCARD Error initialize(
5555
xnn_runtime_t runtime,
5656
std::vector<uint32_t>&& input_ids,
5757
std::vector<uint32_t>&& output_ids);
@@ -62,19 +62,19 @@ class XNNExecutor {
6262
* input shapes will be propagated through the runtime, and perform
6363
* any additional memory planning as needed
6464
*/
65-
__ET_NODISCARD Error prepare_args(EValue** args);
65+
ET_NODISCARD Error prepare_args(EValue** args);
6666

6767
/**
6868
* Executes the graph using the args prepared at prepare_args().
6969
*/
70-
__ET_NODISCARD Error forward(BackendExecutionContext& context);
70+
ET_NODISCARD Error forward(BackendExecutionContext& context);
7171

7272
/**
7373
* Prepares the outputs to be returned by the delegate
7474
*
7575
* Performs any post processing of outputs like tensor resizing
7676
*/
77-
__ET_NODISCARD Error resize_outputs(EValue** args) const;
77+
ET_NODISCARD Error resize_outputs(EValue** args) const;
7878

7979
friend class XNNCompiler;
8080
};

backends/xnnpack/threadpool/threadpool.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ ThreadPool* get_threadpool() {
117117
// @lint-ignore CLANGTIDY facebook-hte-std::call_once
118118
std::call_once(
119119
flag, []() { pthread_atfork(nullptr, nullptr, child_atfork); });
120-
if __ET_UNLIKELY (leak_corrupted_threadpool) {
120+
if ET_UNLIKELY (leak_corrupted_threadpool) {
121121
leak_corrupted_threadpool = false;
122122
if (auto leaked = threadpool.release()) {
123123
auto t = leaked->get_thread_count();

docs/Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
#
33

44
# You can set these variables from the command line.
5-
# SPHINXOPTS = -WT --keep-going TODO(T165752164) fix sphinx warnings around preprocess macros in cpp like __ET_DEPRECATED
5+
# SPHINXOPTS = -WT --keep-going TODO(T165752164) fix sphinx warnings around preprocess macros in cpp like ET_DEPRECATED
66
SPHINXBUILD = sphinx-build
77
SPHINXPROJ = ExecuTorch
88
SOURCEDIR = source

docs/source/api-life-cycle.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -113,11 +113,11 @@ usage</a>
113113
</td>
114114
<td>
115115

116-
Use <code>__ET_DEPRECATED</code> macros. See <a href="https://github.com/pytorch/executorch/blob/8e0f856ee269b319ac4195509cf31e3f548aa0e8/runtime/executor/program.h#L81">example usage</a>
116+
Use <code>ET_DEPRECATED</code> macros. See <a href="https://github.com/pytorch/executorch/blob/8e0f856ee269b319ac4195509cf31e3f548aa0e8/runtime/executor/program.h#L81">example usage</a>
117117

118118
<p>
119119
<p>
120-
Use <code>__ET_EXPERIMENTAL</code> macros (TODO not yet implemented)
120+
Use <code>ET_EXPERIMENTAL</code> macros (TODO not yet implemented)
121121
</ul>
122122
</td>
123123
<td>

docs/source/compiler-delegate-and-partitioner.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -87,22 +87,22 @@ function which will be called when the program is out of its lifespan.
8787

8888
```cpp
8989
// Runtime check
90-
__ET_NODISCARD bool is_available();
90+
ET_NODISCARD bool is_available();
9191

9292
// Runtime initialization
93-
__ET_NODISCARD virtual Result<DelegateHandle*> init(
93+
ET_NODISCARD virtual Result<DelegateHandle*> init(
9494
BackendInitContext& context,
9595
FreeableBuffer* processed,
9696
ArrayRef<CompileSpec> compile_specs);
9797

9898
// Runtime execution
99-
__ET_NODISCARD virtual Error execute(
99+
ET_NODISCARD virtual Error execute(
100100
BackendExecutionContext& context,
101101
DelegateHandle* handle,
102102
EValue** args);
103103

104104
// [optional] Runtime destroy. Destroy the resource held by the backend
105-
virtual void destroy(__ET_UNUSED DelegateHandle* handle);
105+
virtual void destroy(ET_UNUSED DelegateHandle* handle);
106106
```
107107
108108
The diagram looks like following
@@ -114,7 +114,7 @@ The diagram looks like following
114114
115115
In order to make backend available to ExecuTorch runtime, it must be registered via the `register_backend` API:
116116
```cpp
117-
__ET_NODISCARD Error register_backend(const Backend& backend);
117+
ET_NODISCARD Error register_backend(const Backend& backend);
118118
```
119119

120120
Static registeration, i.e., at libraray init or load time, of a backend can be achieved as follows:

docs/website/docs/tutorials/bundled_program.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ To execute the program on the bundled input, we need to load the bundled input i
8888
* @returns Return Error::Ok if load successfully, or the error happens during
8989
* execution.
9090
*/
91-
__ET_NODISCARD Error LoadBundledInput(
91+
ET_NODISCARD Error LoadBundledInput(
9292
Method& method,
9393
serialized_bundled_program* bundled_program_ptr,
9494
size_t testset_idx);
@@ -111,7 +111,7 @@ We call `torch::executor::bundled_program::VerifyResultWithBundledExpectedOutput
111111
* @returns Return Error::Ok if two outputs match, or the error happens during
112112
* execution.
113113
*/
114-
__ET_NODISCARD Error VerifyResultWithBundledExpectedOutput(
114+
ET_NODISCARD Error VerifyResultWithBundledExpectedOutput(
115115
Method& method,
116116
serialized_bundled_program* bundled_program_ptr,
117117
size_t testset_idx,

examples/apple/coreml/executor_runner/main.mm

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ Args parse_command_line_args(NSArray<NSString *> *args) {
160160
:data_(read_data(filePath))
161161
{}
162162

163-
Result<FreeableBuffer> load(size_t offset, size_t size, __ET_UNUSED const DataLoader::SegmentInfo& segment_info) const override {
163+
Result<FreeableBuffer> load(size_t offset, size_t size, ET_UNUSED const DataLoader::SegmentInfo& segment_info) const override {
164164
NSData *subdata = [data_ subdataWithRange:NSMakeRange(offset, size)];
165165
return FreeableBuffer(subdata.bytes, size, nullptr);
166166
}

examples/arm/executor_runner/arm_executor_runner.cpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ unsigned char __attribute__((
5252

5353
void et_pal_init(void) {}
5454

55-
__ET_NORETURN void et_pal_abort(void) {
55+
ET_NORETURN void et_pal_abort(void) {
5656
#ifndef SEMIHOSTING
5757
__builtin_trap();
5858
#else
@@ -64,13 +64,13 @@ __ET_NORETURN void et_pal_abort(void) {
6464
* Emit a log message via platform output (serial port, console, etc).
6565
*/
6666
void et_pal_emit_log_message(
67-
__ET_UNUSED et_timestamp_t timestamp,
67+
ET_UNUSED et_timestamp_t timestamp,
6868
et_pal_log_level_t level,
6969
const char* filename,
70-
__ET_UNUSED const char* function,
70+
ET_UNUSED const char* function,
7171
size_t line,
7272
const char* message,
73-
__ET_UNUSED size_t length) {
73+
ET_UNUSED size_t length) {
7474
fprintf(stderr, "%c executorch:%s:%zu] %s\n", level, filename, line, message);
7575
}
7676

exir/backend/test/demos/rpc/ExecutorBackend.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ class ExecutorBackend final : public PyTorchBackendInterface {
4747
Result<DelegateHandle*> init(
4848
BackendInitContext& context,
4949
FreeableBuffer* processed,
50-
__ET_UNUSED ArrayRef<CompileSpec> compile_specs) const override {
50+
ET_UNUSED ArrayRef<CompileSpec> compile_specs) const override {
5151
// `processed` contains an executorch program. Wrap it in a DataLoader that
5252
// will return the data directly without copying it.
5353
MemoryAllocator* runtime_allocator = context.get_runtime_allocator();
@@ -129,7 +129,7 @@ class ExecutorBackend final : public PyTorchBackendInterface {
129129
}
130130

131131
Error execute(
132-
__ET_UNUSED BackendExecutionContext& context,
132+
ET_UNUSED BackendExecutionContext& context,
133133
DelegateHandle* handle,
134134
EValue** args) const override {
135135
Method* client_method = static_cast<Method*>(handle);

extension/apple/ExecuTorch/Exported/ExecuTorchLog.mm

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -116,10 +116,10 @@ - (void)logWithLevel:(ExecuTorchLogLevel)level
116116
void et_pal_emit_log_message(et_timestamp_t timestamp,
117117
et_pal_log_level_t level,
118118
const char *__nonnull filename,
119-
__ET_UNUSED const char *function,
119+
ET_UNUSED const char *function,
120120
size_t line,
121121
const char *__nonnull message,
122-
__ET_UNUSED size_t length) {
122+
ET_UNUSED size_t length) {
123123
#if ET_LOG_ENABLED
124124
NSTimeInterval timeInterval = timestamp / 1000000000.0;
125125
NSUInteger totalSeconds = (NSUInteger)timeInterval;

0 commit comments

Comments
 (0)