File tree Expand file tree Collapse file tree 3 files changed +21
-0
lines changed Expand file tree Collapse file tree 3 files changed +21
-0
lines changed Original file line number Diff line number Diff line change 17
17
18
18
19
19
def pick_multiple_gpus (nb ):
20
+ '''
21
+ Raises:
22
+ MisconfigurationException:
23
+ If ``gpus`` is set to 0, when ``auto_select_gpus=True``.
24
+ '''
20
25
if nb == 0 :
21
26
raise MisconfigurationException (
22
27
r"auto_select_gpus=True, gpus=0 is not a valid configuration.\
@@ -33,6 +38,11 @@ def pick_multiple_gpus(nb):
33
38
34
39
35
40
def pick_single_gpu (exclude_gpus : list ):
41
+ '''
42
+ Raises:
43
+ RuntimeError:
44
+ If you try to allocate a GPU, when no GPUs are available.
45
+ '''
36
46
for i in range (torch .cuda .device_count ()):
37
47
if i in exclude_gpus :
38
48
continue
Original file line number Diff line number Diff line change @@ -70,6 +70,13 @@ def scale_batch_size(
70
70
71
71
**fit_kwargs: remaining arguments to be passed to .fit(), e.g., dataloader
72
72
or datamodule.
73
+
74
+ Raises:
75
+ MisconfigurationException:
76
+ If field ``batch_arg_name`` is not found in ``model`` and ``model.hparams``, or
77
+ if batch scaling feature is used with dataloaders passed directly to ``.fit()``.
78
+ ValueError:
79
+ If mode in method ``scale_batch_size`` is neither ``power`` nor ``binsearch``.
73
80
"""
74
81
if trainer .fast_dev_run :
75
82
rank_zero_warn ('Skipping batch size scaler since fast_dev_run is enabled.' , UserWarning )
Original file line number Diff line number Diff line change @@ -106,6 +106,10 @@ def lr_find(
106
106
107
107
update_attr: Whether to update the learning rate attribute or not.
108
108
109
+ Raises:
110
+ MisconfigurationException:
111
+ If learning rate/lr in ``model`` or ``model.hparams`` isn't overriden when ``auto_lr_find=True``, or
112
+ if you are using `more than one optimizer` with learning rate finder.
109
113
110
114
Example::
111
115
You can’t perform that action at this time.
0 commit comments