Skip to content

Commit df177fd

Browse files
wyliericspod
andauthored
add adn unet (#4185)
Signed-off-by: Wenqi Li <[email protected]> Co-authored-by: Eric Kerfoot <[email protected]>
1 parent be3d22f commit df177fd

File tree

3 files changed

+11
-2
lines changed

3 files changed

+11
-2
lines changed

monai/networks/blocks/acti_norm.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,8 @@
1818

1919
class ADN(nn.Sequential):
2020
"""
21-
Constructs a sequential module of optional activation, dropout, and normalization layers
22-
(with an arbitrary order)::
21+
Constructs a sequential module of optional activation (A), dropout (D), and normalization (N) layers
22+
with an arbitrary order::
2323
2424
-- (Norm) -- (Dropout) -- (Acti) --
2525

monai/networks/nets/unet.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,8 @@ class UNet(nn.Module):
6868
bias: whether to have a bias term in convolution blocks. Defaults to True.
6969
According to `Performance Tuning Guide <https://pytorch.org/tutorials/recipes/recipes/tuning_guide.html>`_,
7070
if a conv layer is directly followed by a batch norm layer, bias should be False.
71+
adn_ordering: a string representing the ordering of activation (A), normalization (N), and dropout (D).
72+
Defaults to "NDA". See also: :py:class:`monai.networks.blocks.ADN`.
7173
7274
Examples::
7375
@@ -122,6 +124,7 @@ def __init__(
122124
norm: Union[Tuple, str] = Norm.INSTANCE,
123125
dropout: float = 0.0,
124126
bias: bool = True,
127+
adn_ordering: str = "NDA",
125128
dimensions: Optional[int] = None,
126129
) -> None:
127130

@@ -155,6 +158,7 @@ def __init__(
155158
self.norm = norm
156159
self.dropout = dropout
157160
self.bias = bias
161+
self.adn_ordering = adn_ordering
158162

159163
def _create_block(
160164
inc: int, outc: int, channels: Sequence[int], strides: Sequence[int], is_top: bool
@@ -229,6 +233,7 @@ def _get_down_layer(self, in_channels: int, out_channels: int, strides: int, is_
229233
norm=self.norm,
230234
dropout=self.dropout,
231235
bias=self.bias,
236+
adn_ordering=self.adn_ordering,
232237
)
233238
return mod
234239
mod = Convolution(
@@ -241,6 +246,7 @@ def _get_down_layer(self, in_channels: int, out_channels: int, strides: int, is_
241246
norm=self.norm,
242247
dropout=self.dropout,
243248
bias=self.bias,
249+
adn_ordering=self.adn_ordering,
244250
)
245251
return mod
246252

@@ -279,6 +285,7 @@ def _get_up_layer(self, in_channels: int, out_channels: int, strides: int, is_to
279285
bias=self.bias,
280286
conv_only=is_top and self.num_res_units == 0,
281287
is_transposed=True,
288+
adn_ordering=self.adn_ordering,
282289
)
283290

284291
if self.num_res_units > 0:
@@ -294,6 +301,7 @@ def _get_up_layer(self, in_channels: int, out_channels: int, strides: int, is_to
294301
dropout=self.dropout,
295302
bias=self.bias,
296303
last_conv_only=is_top,
304+
adn_ordering=self.adn_ordering,
297305
)
298306
conv = nn.Sequential(conv, ru)
299307

tests/test_unet.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,7 @@
9696
"strides": (2, 2),
9797
"num_res_units": 1,
9898
"act": (Act.LEAKYRELU, {"negative_slope": 0.2}),
99+
"adn_ordering": "NA",
99100
},
100101
(16, 4, 32, 64, 48),
101102
(16, 3, 32, 64, 48),

0 commit comments

Comments
 (0)