Skip to content

Commit e73257c

Browse files
KumoLiuborisfompre-commit-ci[bot]ericspod
authored
Add PythonicWorkflow (#8151)
Fixes # . ### Description Add PythonicWorkflow ### Types of changes <!--- Put an `x` in all the boxes that apply, and remove the not applicable items --> - [x] Non-breaking change (fix or new feature that would not break existing functionality). - [ ] Breaking change (fix or new feature that would cause existing functionality to change). - [ ] New tests added to cover the changes. - [ ] Integration tests passed locally by running `./runtests.sh -f -u --net --coverage`. - [ ] Quick tests passed locally by running `./runtests.sh --quick --unittests --disttests`. - [ ] In-line docstrings updated. - [ ] Documentation updated, tested `make html` command in the `docs/` folder. --------- Signed-off-by: Boris Fomitchev <[email protected]> Signed-off-by: YunLiu <[email protected]> Co-authored-by: Boris Fomitchev <[email protected]> Co-authored-by: Boris Fomitchev <[email protected]> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Eric Kerfoot <[email protected]>
1 parent 649c7c8 commit e73257c

11 files changed

+409
-96
lines changed

monai/bundle/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -43,4 +43,4 @@
4343
MACRO_KEY,
4444
load_bundle_config,
4545
)
46-
from .workflows import BundleWorkflow, ConfigWorkflow
46+
from .workflows import BundleWorkflow, ConfigWorkflow, PythonicWorkflow

monai/bundle/workflows.py

+170-20
Original file line numberDiff line numberDiff line change
@@ -44,12 +44,18 @@ class BundleWorkflow(ABC):
4444
workflow_type: specifies the workflow type: "train" or "training" for a training workflow,
4545
or "infer", "inference", "eval", "evaluation" for a inference workflow,
4646
other unsupported string will raise a ValueError.
47-
default to `train` for train workflow.
47+
default to `None` for only using meta properties.
4848
workflow: specifies the workflow type: "train" or "training" for a training workflow,
4949
or "infer", "inference", "eval", "evaluation" for a inference workflow,
5050
other unsupported string will raise a ValueError.
5151
default to `None` for common workflow.
52-
properties_path: the path to the JSON file of properties.
52+
properties_path: the path to the JSON file of properties. If `workflow_type` is specified, properties will be
53+
loaded from the file based on the provided `workflow_type` and meta. If no `workflow_type` is specified,
54+
properties will default to loading from "meta". If `properties_path` is None, default properties
55+
will be sourced from "monai/bundle/properties.py" based on the workflow_type:
56+
For a training workflow, properties load from `TrainProperties` and `MetaProperties`.
57+
For a inference workflow, properties load from `InferProperties` and `MetaProperties`.
58+
For workflow_type = None : only `MetaProperties` will be loaded.
5359
meta_file: filepath of the metadata file, if this is a list of file paths, their contents will be merged in order.
5460
logging_file: config file for `logging` module in the program. for more details:
5561
https://docs.python.org/3/library/logging.config.html#logging.config.fileConfig.
@@ -97,29 +103,50 @@ def __init__(
97103
meta_file = None
98104

99105
workflow_type = workflow if workflow is not None else workflow_type
100-
if workflow_type is None and properties_path is None:
101-
self.properties = copy(MetaProperties)
102-
self.workflow_type = None
103-
self.meta_file = meta_file
104-
return
106+
if workflow_type is not None:
107+
if workflow_type.lower() in self.supported_train_type:
108+
workflow_type = "train"
109+
elif workflow_type.lower() in self.supported_infer_type:
110+
workflow_type = "infer"
111+
else:
112+
raise ValueError(f"Unsupported workflow type: '{workflow_type}'.")
113+
105114
if properties_path is not None:
106115
properties_path = Path(properties_path)
107116
if not properties_path.is_file():
108117
raise ValueError(f"Property file {properties_path} does not exist.")
109118
with open(properties_path) as json_file:
110-
self.properties = json.load(json_file)
111-
self.workflow_type = None
112-
self.meta_file = meta_file
113-
return
114-
if workflow_type.lower() in self.supported_train_type: # type: ignore[union-attr]
115-
self.properties = {**TrainProperties, **MetaProperties}
116-
self.workflow_type = "train"
117-
elif workflow_type.lower() in self.supported_infer_type: # type: ignore[union-attr]
118-
self.properties = {**InferProperties, **MetaProperties}
119-
self.workflow_type = "infer"
119+
try:
120+
properties = json.load(json_file)
121+
self.properties: dict = {}
122+
if workflow_type is not None and workflow_type in properties:
123+
self.properties = properties[workflow_type]
124+
if "meta" in properties:
125+
self.properties.update(properties["meta"])
126+
elif workflow_type is None:
127+
if "meta" in properties:
128+
self.properties = properties["meta"]
129+
logger.info(
130+
"No workflow type specified, default to load meta properties from property file."
131+
)
132+
else:
133+
logger.warning("No 'meta' key found in properties while workflow_type is None.")
134+
except KeyError as e:
135+
raise ValueError(f"{workflow_type} not found in property file {properties_path}") from e
136+
except json.JSONDecodeError as e:
137+
raise ValueError(f"Error decoding JSON from property file {properties_path}") from e
120138
else:
121-
raise ValueError(f"Unsupported workflow type: '{workflow_type}'.")
139+
if workflow_type == "train":
140+
self.properties = {**TrainProperties, **MetaProperties}
141+
elif workflow_type == "infer":
142+
self.properties = {**InferProperties, **MetaProperties}
143+
elif workflow_type is None:
144+
self.properties = copy(MetaProperties)
145+
logger.info("No workflow type and property file specified, default to 'meta' properties.")
146+
else:
147+
raise ValueError(f"Unsupported workflow type: '{workflow_type}'.")
122148

149+
self.workflow_type = workflow_type
123150
self.meta_file = meta_file
124151

125152
@abstractmethod
@@ -226,6 +253,124 @@ def check_properties(self) -> list[str] | None:
226253
return [n for n, p in self.properties.items() if p.get(BundleProperty.REQUIRED, False) and not hasattr(self, n)]
227254

228255

256+
class PythonicWorkflow(BundleWorkflow):
257+
"""
258+
Base class for the pythonic workflow specification in bundle, it can be a training, evaluation or inference workflow.
259+
It defines the basic interfaces for the bundle workflow behavior: `initialize`, `finalize`, etc.
260+
This also provides the interface to get / set public properties to interact with a bundle workflow through
261+
defined `get_<property>` accessor methods or directly defining members of the object.
262+
For how to set the properties, users can define the `_set_<property>` methods or directly set the members of the object.
263+
The `initialize` method is called to set up the workflow before running. This method sets up internal state
264+
and prepares properties. If properties are modified after the workflow has been initialized, `self._is_initialized`
265+
is set to `False`. Before running the workflow again, `initialize` should be called to ensure that the workflow is
266+
properly set up with the new property values.
267+
268+
Args:
269+
workflow_type: specifies the workflow type: "train" or "training" for a training workflow,
270+
or "infer", "inference", "eval", "evaluation" for a inference workflow,
271+
other unsupported string will raise a ValueError.
272+
default to `None` for only using meta properties.
273+
workflow: specifies the workflow type: "train" or "training" for a training workflow,
274+
or "infer", "inference", "eval", "evaluation" for a inference workflow,
275+
other unsupported string will raise a ValueError.
276+
default to `None` for common workflow.
277+
properties_path: the path to the JSON file of properties. If `workflow_type` is specified, properties will be
278+
loaded from the file based on the provided `workflow_type` and meta. If no `workflow_type` is specified,
279+
properties will default to loading from "meta". If `properties_path` is None, default properties
280+
will be sourced from "monai/bundle/properties.py" based on the workflow_type:
281+
For a training workflow, properties load from `TrainProperties` and `MetaProperties`.
282+
For a inference workflow, properties load from `InferProperties` and `MetaProperties`.
283+
For workflow_type = None : only `MetaProperties` will be loaded.
284+
config_file: path to the config file, typically used to store hyperparameters.
285+
meta_file: filepath of the metadata file, if this is a list of file paths, their contents will be merged in order.
286+
logging_file: config file for `logging` module in the program. for more details:
287+
https://docs.python.org/3/library/logging.config.html#logging.config.fileConfig.
288+
289+
"""
290+
291+
supported_train_type: tuple = ("train", "training")
292+
supported_infer_type: tuple = ("infer", "inference", "eval", "evaluation")
293+
294+
def __init__(
295+
self,
296+
workflow_type: str | None = None,
297+
properties_path: PathLike | None = None,
298+
config_file: str | Sequence[str] | None = None,
299+
meta_file: str | Sequence[str] | None = None,
300+
logging_file: str | None = None,
301+
**override: Any,
302+
):
303+
meta_file = str(Path(os.getcwd()) / "metadata.json") if meta_file is None else meta_file
304+
super().__init__(
305+
workflow_type=workflow_type, properties_path=properties_path, meta_file=meta_file, logging_file=logging_file
306+
)
307+
self._props_vals: dict = {}
308+
self._set_props_vals: dict = {}
309+
self.parser = ConfigParser()
310+
if config_file is not None:
311+
self.parser.read_config(f=config_file)
312+
if self.meta_file is not None:
313+
self.parser.read_meta(f=self.meta_file)
314+
315+
# the rest key-values in the _args are to override config content
316+
self.parser.update(pairs=override)
317+
self._is_initialized: bool = False
318+
319+
def initialize(self, *args: Any, **kwargs: Any) -> Any:
320+
"""
321+
Initialize the bundle workflow before running.
322+
"""
323+
self._props_vals = {}
324+
self._is_initialized = True
325+
326+
def _get_property(self, name: str, property: dict) -> Any:
327+
"""
328+
With specified property name and information, get the expected property value.
329+
If the property is already generated, return from the bucket directly.
330+
If user explicitly set the property, return it directly.
331+
Otherwise, generate the expected property as a class private property with prefix "_".
332+
333+
Args:
334+
name: the name of target property.
335+
property: other information for the target property, defined in `TrainProperties` or `InferProperties`.
336+
"""
337+
if not self._is_initialized:
338+
raise RuntimeError("Please execute 'initialize' before getting any properties.")
339+
value = None
340+
if name in self._set_props_vals:
341+
value = self._set_props_vals[name]
342+
elif name in self._props_vals:
343+
value = self._props_vals[name]
344+
elif name in self.parser.config[self.parser.meta_key]: # type: ignore[index]
345+
id = self.properties.get(name, None).get(BundlePropertyConfig.ID, None)
346+
value = self.parser[id]
347+
else:
348+
try:
349+
value = getattr(self, f"get_{name}")()
350+
except AttributeError as e:
351+
if property[BundleProperty.REQUIRED]:
352+
raise ValueError(
353+
f"unsupported property '{name}' is required in the bundle properties,"
354+
f"need to implement a method 'get_{name}' to provide the property."
355+
) from e
356+
self._props_vals[name] = value
357+
return value
358+
359+
def _set_property(self, name: str, property: dict, value: Any) -> Any:
360+
"""
361+
With specified property name and information, set value for the expected property.
362+
Stores user-reset initialized objects that should not be re-initialized and marks the workflow as not initialized.
363+
364+
Args:
365+
name: the name of target property.
366+
property: other information for the target property, defined in `TrainProperties` or `InferProperties`.
367+
value: value to set for the property.
368+
369+
"""
370+
self._set_props_vals[name] = value
371+
self._is_initialized = False
372+
373+
229374
class ConfigWorkflow(BundleWorkflow):
230375
"""
231376
Specification for the config-based bundle workflow.
@@ -262,7 +407,13 @@ class ConfigWorkflow(BundleWorkflow):
262407
or "infer", "inference", "eval", "evaluation" for a inference workflow,
263408
other unsupported string will raise a ValueError.
264409
default to `None` for common workflow.
265-
properties_path: the path to the JSON file of properties.
410+
properties_path: the path to the JSON file of properties. If `workflow_type` is specified, properties will be
411+
loaded from the file based on the provided `workflow_type` and meta. If no `workflow_type` is specified,
412+
properties will default to loading from "train". If `properties_path` is None, default properties
413+
will be sourced from "monai/bundle/properties.py" based on the workflow_type:
414+
For a training workflow, properties load from `TrainProperties` and `MetaProperties`.
415+
For a inference workflow, properties load from `InferProperties` and `MetaProperties`.
416+
For workflow_type = None : only `MetaProperties` will be loaded.
266417
override: id-value pairs to override or add the corresponding config content.
267418
e.g. ``--net#input_chns 42``, ``--net %/data/other.json#net_arg``
268419
@@ -324,7 +475,6 @@ def __init__(
324475
self.parser.read_config(f=config_file)
325476
if self.meta_file is not None:
326477
self.parser.read_meta(f=self.meta_file)
327-
328478
# the rest key-values in the _args are to override config content
329479
self.parser.update(pairs=override)
330480
self.init_id = init_id

monai/utils/module.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -649,7 +649,7 @@ def compute_capabilities_after(major: int, minor: int = 0, current_ver_string: s
649649
current_ver_string: if None, the current system GPU CUDA compute capability will be used.
650650
651651
Returns:
652-
True if the current system GPU CUDA compute capability is greater than the specified version.
652+
True if the current system GPU CUDA compute capability is greater than or equal to the specified version.
653653
"""
654654
if current_ver_string is None:
655655
cuda_available = torch.cuda.is_available()
@@ -667,11 +667,11 @@ def compute_capabilities_after(major: int, minor: int = 0, current_ver_string: s
667667

668668
ver, has_ver = optional_import("packaging.version", name="parse")
669669
if has_ver:
670-
return ver(".".join((f"{major}", f"{minor}"))) < ver(f"{current_ver_string}") # type: ignore
670+
return ver(".".join((f"{major}", f"{minor}"))) <= ver(f"{current_ver_string}") # type: ignore
671671
parts = f"{current_ver_string}".split("+", 1)[0].split(".", 2)
672672
while len(parts) < 2:
673673
parts += ["0"]
674674
c_major, c_minor = parts[:2]
675675
c_mn = int(c_major), int(c_minor)
676676
mn = int(major), int(minor)
677-
return c_mn >= mn
677+
return c_mn > mn

tests/nonconfig_workflow.py

+62-2
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313

1414
import torch
1515

16-
from monai.bundle import BundleWorkflow
16+
from monai.bundle import BundleWorkflow, PythonicWorkflow
1717
from monai.data import DataLoader, Dataset
1818
from monai.engines import SupervisedEvaluator
1919
from monai.inferers import SlidingWindowInferer
@@ -26,8 +26,9 @@
2626
LoadImaged,
2727
SaveImaged,
2828
ScaleIntensityd,
29+
ScaleIntensityRanged,
2930
)
30-
from monai.utils import BundleProperty, set_determinism
31+
from monai.utils import BundleProperty, CommonKeys, set_determinism
3132

3233

3334
class NonConfigWorkflow(BundleWorkflow):
@@ -176,3 +177,62 @@ def _set_property(self, name, property, value):
176177
self._numpy_version = value
177178
elif property[BundleProperty.REQUIRED]:
178179
raise ValueError(f"unsupported property '{name}' is required in the bundle properties.")
180+
181+
182+
class PythonicWorkflowImpl(PythonicWorkflow):
183+
"""
184+
Test class simulates the bundle workflow defined by Python script directly.
185+
"""
186+
187+
def __init__(
188+
self,
189+
workflow_type: str = "inference",
190+
config_file: str | None = None,
191+
properties_path: str | None = None,
192+
meta_file: str | None = None,
193+
):
194+
super().__init__(
195+
workflow_type=workflow_type, properties_path=properties_path, config_file=config_file, meta_file=meta_file
196+
)
197+
self.dataflow: dict = {}
198+
199+
def initialize(self):
200+
self._props_vals = {}
201+
self._is_initialized = True
202+
self.net = UNet(
203+
spatial_dims=3,
204+
in_channels=1,
205+
out_channels=2,
206+
channels=(16, 32, 64, 128),
207+
strides=(2, 2, 2),
208+
num_res_units=2,
209+
).to(self.device)
210+
preprocessing = Compose(
211+
[
212+
EnsureChannelFirstd(keys=["image"]),
213+
ScaleIntensityd(keys="image"),
214+
ScaleIntensityRanged(keys="image", a_min=-57, a_max=164, b_min=0.0, b_max=1.0, clip=True),
215+
]
216+
)
217+
self.dataset = Dataset(data=[self.dataflow], transform=preprocessing)
218+
self.postprocessing = Compose([Activationsd(keys="pred", softmax=True), AsDiscreted(keys="pred", argmax=True)])
219+
220+
def run(self):
221+
data = self.dataset[0]
222+
inputs = data[CommonKeys.IMAGE].unsqueeze(0).to(self.device)
223+
self.net.eval()
224+
with torch.no_grad():
225+
data[CommonKeys.PRED] = self.inferer(inputs, self.net)
226+
self.dataflow.update({CommonKeys.PRED: self.postprocessing(data)[CommonKeys.PRED]})
227+
228+
def finalize(self):
229+
pass
230+
231+
def get_bundle_root(self):
232+
return "."
233+
234+
def get_device(self):
235+
return torch.device("cuda" if torch.cuda.is_available() else "cpu")
236+
237+
def get_inferer(self):
238+
return SlidingWindowInferer(roi_size=self.parser.roi_size, sw_batch_size=1, overlap=0)

tests/test_bundle_trt_export.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@
5353
@skip_if_windows
5454
@skip_if_no_cuda
5555
@skip_if_quick
56-
@SkipIfBeforeComputeCapabilityVersion((7, 0))
56+
@SkipIfBeforeComputeCapabilityVersion((7, 5))
5757
class TestTRTExport(unittest.TestCase):
5858

5959
def setUp(self):

0 commit comments

Comments
 (0)