Skip to content

Commit 5d673a6

Browse files
Olivia-liufacebook-github-bot
authored andcommitted
Inspector APIs documentation - inline docstring part (#626)
Summary: As titled Differential Revision: D49893649
1 parent 86449bf commit 5d673a6

File tree

1 file changed

+59
-18
lines changed

1 file changed

+59
-18
lines changed

sdk/inspector/inspector.py

Lines changed: 59 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -129,20 +129,28 @@ def median(self) -> float:
129129
@dataclass
130130
class Event:
131131
"""
132-
Corresponds to an op instance
132+
An Event corresponds to an operator instance with perf data retrieved from the runtime and other metadata from `ETRecord`.
133+
134+
Args:
135+
name: Name of the profiling/debugging `Event`.
136+
perf_data: Performance data associated with the event retrived from the runtime (available attributes: p50, p90, avg, min, max and median).
137+
op_type: List of op types corresponding to the event.
138+
instruction_id: Instruction id of the profiling event.
139+
delegate_debug_identifier: Supplemental identifier used in combination with instruction id.
140+
debug_handles: Debug handles in the model graph to which this event is correlated.
141+
stack_trace: A dictionary mapping the name of each associated op to its stack trace.
142+
module_hierarchy: A dictionary mapping the name of each associated op to its module hierarchy.
143+
is_delegated_op: Whether or not the event was delegated.
144+
delegate_backend_name: Name of the backend this event was delegated to.
145+
debug_data: Intermediate data collected during runtime.
133146
"""
134147

135148
name: str
136149
perf_data: PerfData
137150
op_types: List[str] = dataclasses.field(default_factory=list)
138151

139-
# Instruction Id of the original profiling event
140152
instruction_id: Optional[int] = None
141-
142-
# Supplemental Identifier used in combination with instruction_identifier
143153
delegate_debug_identifier: Optional[Union[int, str]] = None
144-
145-
# Debug Handles in the model graph to which this event is correlated
146154
debug_handles: Optional[Union[int, Sequence[int]]] = None
147155

148156
stack_traces: Dict[str, str] = dataclasses.field(default_factory=dict)
@@ -213,11 +221,14 @@ def _associate_with_op_graph_nodes(
213221

214222
@dataclass
215223
class EventBlock:
216-
"""
217-
EventBlock contains a collection of events associated with a particular profiling/debugging block retrieved from the runtime.
218-
Attributes:
219-
name (str): Name of the profiling/debugging block
220-
events (List[Event]): List of events associated with the profiling/debugging block
224+
r"""
225+
An `EventBlock` contains a collection of events associated with a particular profiling/debugging block retrieved from the runtime.
226+
Each `EventBlock` represents a pattern of execution. For example, model initiation and loading lives in a single `EventBlock`.
227+
If there's a control flow, each branch will be represented by a separate `EventBlock`.
228+
229+
Args:
230+
name: Name of the profiling/debugging block.
231+
events: List of `Event`\ s associated with the profiling/debugging block.
221232
"""
222233

223234
name: str
@@ -226,7 +237,14 @@ class EventBlock:
226237
def to_dataframe(self) -> pd.DataFrame:
227238
"""
228239
Converts the EventBlock into a DataFrame with each row being an event instance
240+
241+
Args:
242+
None
243+
244+
Returns:
245+
A Pandas DataFrame containing the data of each Event instance in this EventBlock.
229246
"""
247+
230248
# TODO: push row generation down to Event
231249
data = {
232250
"event_block_name": [self.name] * len(self.events),
@@ -376,14 +394,18 @@ def __init__(
376394
etrecord_path: Optional[str] = None,
377395
etdump_scale: int = 1000,
378396
) -> None:
379-
"""
380-
Create an inspector instance from the provided ETDump/ETRecord
397+
r"""
398+
Initialize an `Inspector` instance with the underlying `EventBlock`\ s populated with data from the provided ETDump path
399+
and optional ETRecord path.
381400
382401
Args:
383402
etdump_path: Path to the ETDump file.
384-
etrecord_path: Path to the ETRecord file.
403+
etrecord_path: Optional path to the ETRecord file.
385404
etdump_scale: Inverse Scale Factor used to cast the timestamps in ETDump
386405
defaults to milli (1000ms = 1s).
406+
407+
Returns:
408+
None
387409
"""
388410

389411
self._etrecord = (
@@ -422,7 +444,13 @@ def __init__(
422444

423445
def print_data_tabular(self) -> None:
424446
"""
425-
Prints the underlying EventBlocks (essentially all the performance data)
447+
Displays the underlying EventBlocks in a structured tabular format, with each row representing an Event.
448+
449+
Args:
450+
None
451+
452+
Returns:
453+
None
426454
"""
427455

428456
def style_text_size(val, size=12):
@@ -447,7 +475,17 @@ def style_text_size(val, size=12):
447475
print(tabulate(filtered_df, headers="keys", tablefmt="fancy_grid"))
448476

449477
# TODO: write unit test
450-
def find_total_for_module(self, module_name: str):
478+
def find_total_for_module(self, module_name: str) -> float:
479+
"""
480+
Returns the total average compute time of all operators within the specified module.
481+
482+
Args:
483+
module_name: Name of the module to be aggregated against.
484+
485+
Returns:
486+
Sum of the average compute time (in seconds) of all operators within the module with "module_name".
487+
"""
488+
451489
total = 0.0
452490
for block in self.event_blocks:
453491
for event in block.events:
@@ -481,10 +519,13 @@ def get_exported_program(
481519
self, graph: Optional[str] = None
482520
) -> Optional[ExportedProgram]:
483521
"""
484-
Access helper for ETRecord, defaults to returning Edge Dialect Program
522+
Access helper for ETRecord, defaults to returning the Edge Dialect program.
485523
486524
Args:
487-
graph: Name of the graph to access. If None, returns the Edge Dialect Program.
525+
graph: Optional name of the graph to access. If None, returns the Edge Dialect program.
526+
527+
Returns:
528+
The ExportedProgram object of "graph".
488529
"""
489530
if self._etrecord is None:
490531
log.warning(

0 commit comments

Comments
 (0)