Skip to content

Fix Exception misuse in ignite.contrib.handlers.param_scheduler.py #1206

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
5332772
ValueError -> TypeError
WrRan Jul 3, 2020
0be5ba0
NotImplementedError -> NotImplemented
WrRan Jul 4, 2020
2c2aa65
Merge branch 'patch-2' of github.com:WrRan/ignite
WrRan Jul 4, 2020
c1ea09e
Merge pull request #1 from pytorch/master
WrRan Jul 6, 2020
cf21749
Merge branch 'master' of github.com:WrRan/ignite
WrRan Jul 7, 2020
8ffc20d
rollback ignite/engine/events [raise NotImplementedError]
WrRan Jul 7, 2020
0a88579
Merge pull request #2 from pytorch/master
WrRan Jul 7, 2020
9f232a9
Merge branch 'master' of github.com:WrRan/ignite
WrRan Jul 7, 2020
cf0c57f
fix misuses of exceptions in ignite/contrib/handlers/custom_events.py
WrRan Jul 8, 2020
f77b541
remove period in exceptions
WrRan Jul 8, 2020
c24f027
refactor corresponding unit tests
WrRan Jul 8, 2020
00c7970
Merge pull request #3 from pytorch/master
WrRan Jul 17, 2020
68ca5f0
Merge branch 'master' of github.com:WrRan/ignite
WrRan Jul 17, 2020
bcc52e3
fix misuses of exceptions in ignite/contrib/handlers/param_scheduler.…
WrRan Jul 17, 2020
952b186
fix misuses of exceptions in ignite/contrib/handlers/param_scheduler.…
WrRan Jul 17, 2020
304c04d
autopep8 fix
Jul 17, 2020
0543fa8
Merge branch 'master' into fix-ex-misuse-in-contrib-handers-param-sch…
sdesrozis Jul 24, 2020
97bc2ef
Merge branch 'master' into fix-ex-misuse-in-contrib-handers-param-sch…
vfdev-5 Aug 14, 2020
85725dd
remove extra spaces
WrRan Aug 16, 2020
8f98277
autopep8 fix
Aug 16, 2020
60e71b1
add matches to pytest.raises
WrRan Aug 16, 2020
e040e64
add match to pytest.raises
WrRan Aug 16, 2020
26c7ef1
autopep8 fix
Aug 16, 2020
bfe7385
Merge branch 'fix-ex-misuse-in-contrib-handers-param-scheduler' of gi…
WrRan Aug 16, 2020
41cf960
add missing tests
WrRan Aug 16, 2020
b98d542
autopep8 fix
Aug 16, 2020
5e5bea7
Update param_scheduler.py
vfdev-5 Aug 16, 2020
5a1ee69
revert previous modification
vfdev-5 Aug 16, 2020
a60e419
Merge branch 'master' into fix-ex-misuse-in-contrib-handers-param-sch…
vfdev-5 Aug 27, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 45 additions & 19 deletions ignite/contrib/handlers/param_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def __call__(self, engine, name=None):

if isinstance(value, list):
if len(value) != len(self.optimizer_param_groups):
raise RuntimeError(
raise ValueError(
"size of value is different than optimizer_param_groups {} != {}".format(
len(value), len(self.optimizer_param_groups)
)
Expand Down Expand Up @@ -266,7 +266,7 @@ def __init__(

if self.cycle_size < 2:
raise ValueError(
"Argument cycle_size should be positive and larger than 1, " "but given {}".format(cycle_size)
"Argument cycle_size should be positive and larger than 1, but given {}".format(cycle_size)
)

self._state_attrs += [
Expand Down Expand Up @@ -437,14 +437,20 @@ class ConcatScheduler(ParamScheduler):

def __init__(self, schedulers, durations, save_history=False):

if not isinstance(schedulers, Sequence) or len(schedulers) < 2:
if not isinstance(schedulers, Sequence):
raise TypeError("Argument schedulers should be a sequence, but given {}".format(schedulers))

if len(schedulers) < 2:
raise ValueError(
"Argument schedulers should be a sequence of more than one parameter schedulers, "
"Argument schedulers should be of more than one parameter schedulers, "
"but given {}".format(schedulers)
)

if not isinstance(durations, Sequence) or not all([isinstance(t, numbers.Integral) for t in durations]):
raise ValueError("Argument durations should be list/tuple of integers, " "but given {}".format(durations))
if not isinstance(durations, (list, tuple)):
raise TypeError("Argument durations should be list/tuple, but given {}".format(durations))

if not all([isinstance(t, numbers.Integral) for t in durations]):
raise ValueError("Argument durations should be list/tuple of integers, but given {}".format(durations))

if len(schedulers) != len(durations) + 1:
raise ValueError(
Expand Down Expand Up @@ -567,8 +573,13 @@ def simulate_values(cls, num_events, schedulers, durations, param_names=None, **
list of [event_index, value_0, value_1, ...], where values correspond to `param_names`.

"""
if param_names is not None and not isinstance(param_names, (list, tuple)):
raise ValueError("Argument param_names should be list or tuple of strings")
if param_names is not None:
if not isinstance(param_names, (list, tuple)):
raise TypeError("Argument param_names should be list or tuple, but given {}".format(type(param_names)))
if not all(isinstance(item, str) for item in param_names):
raise ValueError(
"Argument param_names should be list or tuple of strings, but given {}".format(param_names)
)

# This scheduler uses `ParamScheduler` which
# should be replicated in order to simulate LR values and
Expand Down Expand Up @@ -762,7 +773,10 @@ def create_lr_scheduler_with_warmup(
"ParamScheduler, but given {}".format(type(lr_scheduler))
)

if not (isinstance(warmup_duration, numbers.Integral) and warmup_duration > 1):
if not isinstance(warmup_duration, numbers.Integral):
raise TypeError("Argument warmup_duration should be integer, but given {}".format(warmup_duration))

if not (warmup_duration > 1):
raise ValueError("Argument warmup_duration should be at least 2 events, but given {}".format(warmup_duration))

warmup_schedulers = []
Expand Down Expand Up @@ -857,10 +871,14 @@ class PiecewiseLinear(ParamScheduler):
def __init__(self, optimizer, param_name, milestones_values, save_history=False, param_group_index=None):
super(PiecewiseLinear, self).__init__(optimizer, param_name, save_history, param_group_index=param_group_index)

if not isinstance(milestones_values, Sequence) or len(milestones_values) < 1:
if not isinstance(milestones_values, Sequence):
raise TypeError(
"Argument milestones_values should be a list or tuple, but given {}".format(type(milestones_values))
)
if len(milestones_values) < 1:
raise ValueError(
"Argument milestones_values should be a list or tuple with at least one value, "
"but given {}".format(type(milestones_values))
"Argument milestones_values should be with at least one value, "
"but given {}".format(milestones_values)
)

values = []
Expand All @@ -869,7 +887,7 @@ def __init__(self, optimizer, param_name, milestones_values, save_history=False,
if not isinstance(pair, Sequence) or len(pair) != 2:
raise ValueError("Argument milestones_values should be a list of pairs (milestone, param_value)")
if not isinstance(pair[0], numbers.Integral):
raise ValueError("Value of a milestone should be integer, but given {}".format(type(pair[0])))
raise TypeError("Value of a milestone should be integer, but given {}".format(type(pair[0])))
if len(milestones) > 0 and pair[0] < milestones[-1]:
raise ValueError(
"Milestones should be increasing integers, but given {} is smaller "
Expand Down Expand Up @@ -938,16 +956,24 @@ class ParamGroupScheduler(ParamScheduler):
"""

def __init__(self, schedulers: List[ParamScheduler], names: Optional[List[str]] = None, save_history=False):
if not (
isinstance(schedulers, Sequence) and all(isinstance(scheduler, ParamScheduler) for scheduler in schedulers)
):
raise ValueError("Argument schedulers should be a list/tuple of parameter schedulers")
if not isinstance(schedulers, Sequence):
raise TypeError("Argument schedulers should be a list/tuple, but given {}".format(schedulers))

if not all(isinstance(scheduler, ParamScheduler) for scheduler in schedulers):
raise ValueError(
"Argument schedulers should be a list/tuple of parameter schedulers, but given {}".format(schedulers)
)

if names is None:
names = [s.param_name for s in schedulers]

if not (isinstance(names, (list, tuple)) and all(isinstance(n, str) for n in names)):
raise ValueError("Argument names should be a list/tuple of parameter scheduler's names")
if not isinstance(names, (list, tuple)):
raise TypeError("Argument names should be a list/tuple, but given {}".format(names))

if not all(isinstance(n, str) for n in names):
raise ValueError(
"Argument names should be a list/tuple of parameter scheduler's names, but given {}".format(names)
)

if len(names) != len(schedulers):
raise ValueError("{} should be equal {}".format(len(schedulers), len(names)))
Expand Down
61 changes: 41 additions & 20 deletions tests/ignite/contrib/handlers/test_param_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def test_param_scheduler_asserts():

lr_scheduler = FakeParamScheduler(optimizer, "lr")

with pytest.raises(RuntimeError, match=r"size of value is different than optimizer_param_groups"):
with pytest.raises(ValueError, match=r"size of value is different than optimizer_param_groups"):
lr_scheduler(None)

with pytest.raises(TypeError, match=r"Argument state_dict should be a dictionary, but given"):
Expand Down Expand Up @@ -302,29 +302,37 @@ def test_concat_scheduler_asserts():
scheduler_1 = LinearCyclicalScheduler(optimizer, "lr", start_value=1.0, end_value=0.0, cycle_size=10)
scheduler_2 = CosineAnnealingScheduler(optimizer, "lr", start_value=0.0, end_value=1.0, cycle_size=10)

with pytest.raises(ValueError):
with pytest.raises(TypeError, match=r"Argument schedulers should be a sequence"):
ConcatScheduler(schedulers=None, durations=[])

with pytest.raises(ValueError, match=r"Argument schedulers should be of more than one parameter schedulers"):
ConcatScheduler(schedulers=[], durations=[])

with pytest.raises(ValueError):
with pytest.raises(ValueError, match=r"Argument schedulers should be of more than one parameter schedulers"):
ConcatScheduler(schedulers=[scheduler_1], durations=[10])

with pytest.raises(TypeError):
with pytest.raises(TypeError, match=r"Value at index 1 of schedulers should be a parameter scheduler"):
ConcatScheduler(schedulers=[scheduler_1, 12], durations=[10])

with pytest.raises(ValueError):
with pytest.raises(ValueError, match=r"Incorrect number schedulers or duration values"):
ConcatScheduler(schedulers=[scheduler_1, scheduler_2], durations=[10, 5])

with pytest.raises(ValueError):
with pytest.raises(ValueError, match=r"Argument durations should be list/tuple of integers"):
ConcatScheduler(schedulers=[scheduler_1, scheduler_2, scheduler_2], durations=[15, 12.0])

with pytest.raises(ValueError):
with pytest.raises(TypeError, match=r"Argument durations should be list/tuple"):
ConcatScheduler(schedulers=[scheduler_1, scheduler_2], durations="abc")

with pytest.raises(ValueError):
with pytest.raises(TypeError, match=r"Argument param_names should be list or tuple"):
ConcatScheduler.simulate_values(
num_events=123, schedulers=[scheduler_1, scheduler_2], durations=[15], param_names="abc"
)

with pytest.raises(ValueError, match=r"Argument param_names should be list or tuple of strings"):
ConcatScheduler.simulate_values(
num_events=123, schedulers=[scheduler_1, scheduler_2], durations=[15], param_names=[1]
)

optimizer_2 = torch.optim.SGD([tensor], lr=0)
scheduler_3 = CosineAnnealingScheduler(optimizer_2, "lr", start_value=0.0, end_value=1.0, cycle_size=10)

Expand Down Expand Up @@ -609,10 +617,14 @@ def save_lr(engine):

def test_lr_scheduler_asserts():

with pytest.raises(TypeError):
with pytest.raises(
TypeError, match=r"Argument lr_scheduler should be a subclass of torch.optim.lr_scheduler._LRScheduler"
):
LRScheduler(123)

with pytest.raises(TypeError):
with pytest.raises(
TypeError, match=r"Argument lr_scheduler should be a subclass of torch.optim.lr_scheduler._LRScheduler"
):
LRScheduler.simulate_values(1, None)


Expand Down Expand Up @@ -686,19 +698,22 @@ def test_piecewiselinear_asserts():
tensor = torch.zeros([1], requires_grad=True)
optimizer = torch.optim.SGD([tensor], lr=0)

with pytest.raises(ValueError):
with pytest.raises(TypeError, match=r"Argument milestones_values should be a list or tuple"):
PiecewiseLinear(optimizer, "lr", milestones_values=None)

with pytest.raises(ValueError, match=r"Argument milestones_values should be with at least one value"):
PiecewiseLinear(optimizer, "lr", milestones_values=[])

with pytest.raises(ValueError):
with pytest.raises(ValueError, match=r"Argument milestones_values should be a list of pairs"):
PiecewiseLinear(optimizer, "lr", milestones_values=[(0.5,)])

with pytest.raises(ValueError):
with pytest.raises(ValueError, match=r"Argument milestones_values should be a list of pairs"):
PiecewiseLinear(optimizer, "lr", milestones_values=[(10, 0.5), (0.6,)])

with pytest.raises(ValueError):
with pytest.raises(ValueError, match=r"Milestones should be increasing integers"):
PiecewiseLinear(optimizer, "lr", milestones_values=[(10, 0.5), (5, 0.6)])

with pytest.raises(ValueError):
with pytest.raises(TypeError, match=r"Value of a milestone should be integer"):
PiecewiseLinear(optimizer, "lr", milestones_values=[(0.5, 1)])


Expand Down Expand Up @@ -895,7 +910,7 @@ def test_create_lr_scheduler_with_warmup():
torch_lr_scheduler, warmup_start_value=0.0, warmup_end_value=0.1, warmup_duration=1
)

with pytest.raises(ValueError, match=r"Argument warmup_duration should be at least 2 events"):
with pytest.raises(TypeError, match=r"Argument warmup_duration should be integer"):
create_lr_scheduler_with_warmup(
torch_lr_scheduler, warmup_start_value=0.0, warmup_end_value=0.1, warmup_duration="abc"
)
Expand Down Expand Up @@ -1109,16 +1124,22 @@ def test_param_group_scheduler_asserts():
optimizer, "lr", param_group_index=1, start_value=1.0, end_value=0.0, cycle_size=10
)

with pytest.raises(ValueError):
with pytest.raises(TypeError, match=r"Argument schedulers should be a list/tuple"):
ParamGroupScheduler(schedulers=None, names=["a", "b", "c"])

with pytest.raises(ValueError, match=r"Argument schedulers should be a list/tuple of parameter schedulers"):
ParamGroupScheduler(schedulers=[0, 1, 2], names=["a", "b", "c"])

with pytest.raises(ValueError):
with pytest.raises(ValueError, match=r"Argument schedulers should be a list/tuple of parameter schedulers"):
ParamGroupScheduler(schedulers=[lr_scheduler1, "2"], names=["a", "b"])

with pytest.raises(ValueError):
with pytest.raises(TypeError, match=r"Argument names should be a list/tuple"):
ParamGroupScheduler(schedulers=[lr_scheduler1, lr_scheduler2], names="ab")

with pytest.raises(ValueError):
with pytest.raises(ValueError, match=r"Argument names should be a list/tuple of parameter scheduler's names"):
ParamGroupScheduler(schedulers=[lr_scheduler1, lr_scheduler2], names=[1, 2])

with pytest.raises(ValueError, match=r"\d should be equal \d"):
ParamGroupScheduler(schedulers=[lr_scheduler1, lr_scheduler2], names=["a"])

scheduler = ParamGroupScheduler(schedulers=[lr_scheduler1, lr_scheduler2], names=["a", "b"])
Expand Down