Skip to content

Improve STL operator so the enclosing app obj can run repetitively #337

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
Sep 14, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 52 additions & 1 deletion examples/apps/ai_unetr_seg_app/__main__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,55 @@
import logging
import shutil
import traceback
from pathlib import Path
from typing import List

from app import AIUnetrSegApp

if __name__ == "__main__":
AIUnetrSegApp(do_run=True)
logging.basicConfig(level=logging.DEBUG)
# This main function is an example to show how a batch of input can be processed.
# It assumes that in the app input folder there are a number of subfolders, each
# containing a discrete input to be processed. Each discrete payload can have
# multiple DICOM instances file, optionally organized in its own folder structure.
# The application object is first created, and on its init the model network is
# loaded as well as pre and post processing transforms. This app object is then
# run multiple times, each time with a single discrete payload.

app = AIUnetrSegApp(do_run=False)

# Preserve the application top level input and output folder path, as the path
# in the context may change on each run if the I/O arguments are passed in.
app_input_path = Path(app.context.input_path)
app_output_path = Path(app.context.output_path)

# Get subfolders in the input path, assume each one contains a discrete payload
input_dirs = [path for path in app_input_path.iterdir() if path.is_dir()]

# Set the output path for each run under the app's output path, and do run
work_dirs: List[str] = [] # strings resprenting folder path
for idx, dir in enumerate(input_dirs):
try:
output_path = app_output_path / f"{dir.name}_output"
# Note: the work_dir should be mapped to the host drive when used in
# a container for better performance.
work_dir = f".unetr_app_workdir{idx}"
work_dirs.extend(work_dir)

logging.info(f"Start processing input in: {dir} with results in: {output_path}")

# Run app with specific input and output path.
# Passing in the input and output do have the side effect of changing
# app context. This side effect will likely be eliminated in later releases.
app.run(input=dir, output=output_path, workdir=work_dir)

logging.info(f"Completed processing input in: {dir} with results in: {output_path}")
except Exception as ex:
logging.error(f"Failed processing input in {dir}, due to: {ex}\n")
traceback.print_exc()
finally:
# Remove the workdir; alternatively do this later, if storage space is not a concern.
shutil.rmtree(work_dir, ignore_errors=True)

# Alternative. Explicitly remove the working dirs at the end of main.
# [shutil.rmtree(work_dir, ignore_errors=True) for work_dir in work_dirs]
52 changes: 50 additions & 2 deletions examples/apps/ai_unetr_seg_app/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
# limitations under the License.

import logging
from typing import List

# Required for setting SegmentDescription attributes. Direct import as this is not part of App SDK package.
from pydicom.sr.codedict import codes
Expand Down Expand Up @@ -131,6 +132,53 @@ def compose(self):
# e.g.
# python3 app.py -i input -m model/model.ts
#
import shutil
import traceback
from pathlib import Path

logging.basicConfig(level=logging.DEBUG)
app_instance = AIUnetrSegApp() # Optional params' defaults are fine.
app_instance.run()
# This main function is an example to show how a batch of input can be processed.
# It assumes that in the app input folder there are a number of subfolders, each
# containing a discrete input to be processed. Each discrete payload can have
# multiple DICOM instances file, optionally organized in its own folder structure.
# The application object is first created, and on its init the model network is
# loaded as well as pre and post processing transforms. This app object is then
# run multiple times, each time with a single discrete payload.

app = AIUnetrSegApp(do_run=False)

# Preserve the application top level input and output folder path, as the path
# in the context may change on each run if the I/O arguments are passed in.
app_input_path = Path(app.context.input_path)
app_output_path = Path(app.context.output_path)

# Get subfolders in the input path, assume each one contains a discrete payload
input_dirs = [path for path in app_input_path.iterdir() if path.is_dir()]

# Set the output path for each run under the app's output path, and do run
work_dirs: List[str] = [] # strings resprenting folder path
for idx, dir in enumerate(input_dirs):
try:
output_path = app_output_path / f"{dir.name}_output"
# Note: the work_dir should be mapped to the host drive when used in
# a container for better performance.
work_dir = f".unetr_app_workdir{idx}"
work_dirs.extend(work_dir)

logging.info(f"Start processing input in: {dir} with results in: {output_path}")

# Run app with specific input and output path.
# Passing in the input and output do have the side effect of changing
# app context. This side effect will likely be eliminated in later releases.
app.run(input=dir, output=output_path, workdir=work_dir)

logging.info(f"Completed processing input in: {dir} with results in: {output_path}")
except Exception as ex:
logging.error(f"Failed processing input in {dir}, due to: {ex}\n")
traceback.print_exc()
finally:
# Remove the workdir; alternatively do this later, if storage space is not a concern.
shutil.rmtree(work_dir, ignore_errors=True)

# Alternative. Explicitly remove the working dirs at the end of main.
# [shutil.rmtree(work_dir, ignore_errors=True) for work_dir in work_dirs]
51 changes: 27 additions & 24 deletions monai/deploy/operators/stl_conversion_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import os
import shutil
import tempfile
from ast import Bytes
from pathlib import Path
from typing import Dict, Optional

Expand All @@ -29,19 +30,23 @@
trimesh, _ = optional_import("trimesh")

import monai.deploy.core as md
from monai.deploy.core import DataPath, ExecutionContext, Image, InputContext, IOType, Operator, OutputContext
from monai.deploy.core import ExecutionContext, Image, InputContext, IOType, Operator, OutputContext

__all__ = ["STLConversionOperator", "STLConverter"]


@md.input("image", Image, IOType.IN_MEMORY)
@md.output("stl_output", DataPath, IOType.DISK)
@md.output("stl_output", Bytes, IOType.IN_MEMORY) # Only available when run as non-leaf operator
# nibabel is required by the dependent class STLConverter.
@md.env(
pip_packages=["numpy>=1.21", "nibabel >= 3.2.1", "numpy-stl>=2.12.0", "scikit-image>=0.17.2", "trimesh>=3.8.11"]
)
class STLConversionOperator(Operator):
"""Converts volumetric image to surface mesh in STL format, file output only."""
"""Converts volumetric image to surface mesh in STL format, file output only.

Only when used as a non-leaf operator is the output of STL binary stored in memory idenfied by the output label.
If a file path is provided, the STL binary will be saved in the the application's output folder of the current run.
"""

def __init__(
self, output_file=None, class_id=None, is_smooth=True, keep_largest_connected_component=True, *args, **kwargs
Expand All @@ -59,16 +64,17 @@ def __init__(
self._class_id = class_id
self._is_smooth = is_smooth
self._keep_largest_connected_component = keep_largest_connected_component
self._output_file = output_file if output_file and len(output_file) > 0 else None
self._output_file = output_file if output_file and len(str(output_file)) > 0 else None

self._converter = STLConverter(*args, **kwargs)

def compute(self, op_input: InputContext, op_output: OutputContext, context: ExecutionContext):
"""Gets the input (image), processes it and sets results in the output.

When used in a leaf operator, this function cannot set its output as in-memory object due to
current limitation, and only file output, for DataPath IOType_DISK, will be saved in the
op_output path, which is mapped to the application's output path by the execution engine.
current limitation.
If a file path is provided, the STL binary will be saved in the the application's output
folder of the current run.

Args:
op_input (InputContext): An input context for the operator.
Expand All @@ -80,20 +86,21 @@ def compute(self, op_input: InputContext, op_output: OutputContext, context: Exe
if not input_image:
raise ValueError("Input is None.")

op_output_config = op_output.get()
if self._output_file and len(self._output_file) > 0:
# The file output folder is either the op_output or app's output depending on output types.
output_folder = (
op_output_config.path if isinstance(op_output_config, DataPath) else context.output.get().path
)
self._output_file = output_folder / self._output_file
self._output_file.parent.mkdir(exist_ok=True)
self._logger.info(f"Output will be saved in file {self._output_file}.")
# Use the app's current run output folder as parent to the STL output path.
if self._output_file and len(str(self._output_file)) > 0:
_output_file = context.output.get().path / self._output_file
_output_file.parent.mkdir(parents=True, exist_ok=True)
self._logger.info(f"Output will be saved in file {_output_file}.")

stl_bytes = self._convert(input_image, self._output_file)
stl_bytes = self._convert(input_image, _output_file)

if not isinstance(op_output_config, DataPath):
op_output.set(stl_bytes)
try:
# TODO: Need a way to find if the operator is run as leaf node in order to
# avoid setting in_memory object.
if self.op_info.get_storage_type("output", "stl_output") == IOType.IN_MEMORY:
op_output.set(stl_bytes)
except Exception as ex:
self._logger.warn(f"In_memory output cannot be used when run as non-leaf operator. {ex}")

def _convert(self, image: Image, output_file: Optional[Path] = None):
"""
Expand Down Expand Up @@ -152,12 +159,8 @@ def convert(
if not image or not isinstance(image, Image):
raise ValueError("image is not a Image object.")

if not isinstance(output_file, Path):
raise ValueError("output_file is not a Path")

# Ensure output file's folder exists
if output_file.parent:
output_file.parent.mkdir(exist_ok=True)
if isinstance(output_file, Path):
output_file.parent.mkdir(parents=True, exist_ok=True)

s_image = self.SpatialImage(image)
nda = s_image.image_array
Expand Down
3 changes: 2 additions & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ max_line_length = 120
ignore =
E203,E305,E402,E501,E721,E741,F821,F841,F999,W503,W504,C408,E302,W291,E303,
# N812 lowercase 'torch.nn.functional' imported as non lowercase 'F'
N812
N812,
B024 #abstract base class, but it has no abstract methods
per_file_ignores =
__init__.py: F401
# Allow using camel case for variable/argument names for the sake of readability.
Expand Down