Skip to content

Commit 038c54a

Browse files
committed
fix: Update logging scheme for converters
- Add debug statements for converter registrations in both FX and Dynamo
1 parent 73cab19 commit 038c54a

File tree

3 files changed

+20
-5
lines changed

3 files changed

+20
-5
lines changed

py/torch_tensorrt/dynamo/backend/lowering/_partition.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import logging
2-
from typing import Callable, Dict, List, Optional, Sequence, Set
2+
from typing import Dict, List, Optional, Sequence, Set
33

44
import torch
55

@@ -55,10 +55,6 @@ def __init__(
5555
)
5656

5757
self.min_block_size = min_block_size
58-
logger.debug(
59-
"Initialized Capability-Based Partitioner with available Converters:\n"
60-
+ f"{CONVERTERS.display_all_available_converters()}"
61-
)
6258

6359
def propose_partitions(self) -> List[Partition]:
6460
# Propose partitions using the default, then refine the results

py/torch_tensorrt/dynamo/converter_registry.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import logging
12
from dataclasses import dataclass, field
23
from typing import Any, Callable, Dict, Optional, Sequence, Union
34
from enum import Enum, auto
@@ -6,6 +7,9 @@
67
from torch_tensorrt.fx.converter_registry import CONVERTERS
78

89

10+
logger = logging.getLogger(__name__)
11+
12+
913
class ConverterPriority(Enum):
1014
"""Enum to set a converter's priority in the registry"""
1115

@@ -85,6 +89,10 @@ def register_converter(converter):
8589
else:
8690
DYNAMO_ATEN_CONVERTERS[key] = [converter_support]
8791

92+
logger.debug(
93+
f"Converter for {key} added to Dynamo ATen Converter Registry with priority: {priority}"
94+
)
95+
8896
return converter
8997

9098
def disable_converter(converter):

py/torch_tensorrt/fx/converter_registry.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import logging
12
from typing import Any, Callable, Dict
23

34
from torch.fx.node import Target
@@ -7,6 +8,9 @@
78
NO_EXPLICIT_BATCH_DIM_SUPPORT = {}
89

910

11+
logger = logging.getLogger(__name__)
12+
13+
1014
def tensorrt_converter(
1115
key: Target,
1216
no_implicit_batch_dim: bool = False,
@@ -19,6 +23,13 @@ def register_converter(converter):
1923
NO_IMPLICIT_BATCH_DIM_SUPPORT[key] = converter
2024
if no_explicit_batch_dim:
2125
NO_EXPLICIT_BATCH_DIM_SUPPORT[key] = converter
26+
27+
logger.debug(
28+
f"Converter for {key} added to FX Converter Registry "
29+
+ f"{'without' if no_explicit_batch_dim else 'with'} Explicit Batch Dim Support + "
30+
+ f"{'without' if no_implicit_batch_dim else 'with'} Implicit Batch Dim Support"
31+
)
32+
2233
return converter
2334

2435
def disable_converter(converter):

0 commit comments

Comments
 (0)