Skip to content

Commit 586dfaa

Browse files
committed
change from **kwargs: Mapping to **kwargs: Any
1 parent c858501 commit 586dfaa

File tree

11 files changed

+47
-47
lines changed

11 files changed

+47
-47
lines changed

ignite/contrib/engines/common.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ def setup_common_training_handlers(
4747
stop_on_nan: Optional[bool] = True,
4848
clear_cuda_cache: Optional[bool] = True,
4949
save_handler: Optional[Union[Callable, BaseSaveHandler]] = None,
50-
**kwargs: Mapping,
50+
**kwargs: Any,
5151
):
5252
"""Helper method to setup trainer with common handlers (it also supports distributed configuration):
5353
- :class:`~ignite.handlers.TerminateOnNan`
@@ -137,7 +137,7 @@ def _setup_common_training_handlers(
137137
stop_on_nan: Optional[bool] = True,
138138
clear_cuda_cache: Optional[bool] = True,
139139
save_handler: Optional[Union[Callable, BaseSaveHandler]] = None,
140-
**kwargs: Mapping,
140+
**kwargs: Any,
141141
):
142142
if output_path is not None and save_handler is not None:
143143
raise ValueError(
@@ -219,7 +219,7 @@ def _setup_common_distrib_training_handlers(
219219
stop_on_nan: Optional[bool] = True,
220220
clear_cuda_cache: Optional[bool] = True,
221221
save_handler: Optional[Union[Callable, BaseSaveHandler]] = None,
222-
**kwargs: Mapping,
222+
**kwargs: Any,
223223
):
224224

225225
_setup_common_training_handlers(
@@ -313,7 +313,7 @@ def setup_tb_logging(
313313
optimizers: Optional[Union[Optimizer, Dict[str, Optimizer]]] = None,
314314
evaluators: Optional[Union[Engine, Dict[str, Engine]]] = None,
315315
log_every_iters: Optional[int] = 100,
316-
**kwargs: Mapping,
316+
**kwargs: Any,
317317
):
318318
"""Method to setup TensorBoard logging on trainer and a list of evaluators. Logged metrics are:
319319
- Training metrics, e.g. running average loss values
@@ -344,7 +344,7 @@ def setup_visdom_logging(
344344
optimizers: Optional[Union[Optimizer, Dict[str, Optimizer]]] = None,
345345
evaluators: Optional[Union[Engine, Dict[str, Engine]]] = None,
346346
log_every_iters: Optional[int] = 100,
347-
**kwargs: Mapping,
347+
**kwargs: Any,
348348
):
349349
"""Method to setup Visdom logging on trainer and a list of evaluators. Logged metrics are:
350350
- Training metrics, e.g. running average loss values
@@ -374,7 +374,7 @@ def setup_mlflow_logging(
374374
optimizers: Optional[Union[Optimizer, Dict[str, Optimizer]]] = None,
375375
evaluators: Optional[Union[Engine, Dict[str, Engine]]] = None,
376376
log_every_iters: Optional[int] = 100,
377-
**kwargs: Mapping,
377+
**kwargs: Any,
378378
):
379379
"""Method to setup MLflow logging on trainer and a list of evaluators. Logged metrics are:
380380
- Training metrics, e.g. running average loss values
@@ -404,7 +404,7 @@ def setup_neptune_logging(
404404
optimizers: Optional[Union[Optimizer, Dict[str, Optimizer]]] = None,
405405
evaluators: Optional[Union[Engine, Dict[str, Engine]]] = None,
406406
log_every_iters: Optional[int] = 100,
407-
**kwargs: Mapping,
407+
**kwargs: Any,
408408
):
409409
"""Method to setup Neptune logging on trainer and a list of evaluators. Logged metrics are:
410410
- Training metrics, e.g. running average loss values
@@ -434,7 +434,7 @@ def setup_wandb_logging(
434434
optimizers: Optional[Union[Optimizer, Dict[str, Optimizer]]] = None,
435435
evaluators: Optional[Union[Engine, Dict[str, Engine]]] = None,
436436
log_every_iters: Optional[int] = 100,
437-
**kwargs: Mapping,
437+
**kwargs: Any,
438438
):
439439
"""Method to setup WandB logging on trainer and a list of evaluators. Logged metrics are:
440440
- Training metrics, e.g. running average loss values
@@ -464,7 +464,7 @@ def setup_plx_logging(
464464
optimizers: Optional[Union[Optimizer, Dict[str, Optimizer]]] = None,
465465
evaluators: Optional[Union[Engine, Dict[str, Engine]]] = None,
466466
log_every_iters: Optional[int] = 100,
467-
**kwargs: Mapping,
467+
**kwargs: Any,
468468
):
469469
"""Method to setup Polyaxon logging on trainer and a list of evaluators. Logged metrics are:
470470
- Training metrics, e.g. running average loss values
@@ -494,7 +494,7 @@ def setup_trains_logging(
494494
optimizers: Optional[Union[Optimizer, Dict[str, Optimizer]]] = None,
495495
evaluators: Optional[Union[Engine, Dict[str, Engine]]] = None,
496496
log_every_iters: Optional[int] = 100,
497-
**kwargs: Mapping,
497+
**kwargs: Any,
498498
):
499499
"""Method to setup Trains logging on trainer and a list of evaluators. Logged metrics are:
500500
- Training metrics, e.g. running average loss values
@@ -535,7 +535,7 @@ def gen_save_best_models_by_val_score(
535535
n_saved: Optional[int] = 3,
536536
trainer: Optional[Engine] = None,
537537
tag: Optional[str] = "val",
538-
**kwargs: Mapping,
538+
**kwargs: Any,
539539
):
540540
"""Method adds a handler to ``evaluator`` to save ``n_saved`` of best models based on the metric
541541
(named by ``metric_name``) provided by ``evaluator`` (i.e. ``evaluator.state.metrics[metric_name]``).
@@ -593,7 +593,7 @@ def save_best_model_by_val_score(
593593
n_saved: Optional[int] = 3,
594594
trainer: Optional[Engine] = None,
595595
tag: Optional[str] = "val",
596-
**kwargs: Mapping,
596+
**kwargs: Any,
597597
):
598598
"""Method adds a handler to ``evaluator`` to save on a disk ``n_saved`` of best models based on the metric
599599
(named by ``metric_name``) provided by ``evaluator`` (i.e. ``evaluator.state.metrics[metric_name]``).

ignite/contrib/handlers/base_logger.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ def attach(self, engine: Engine, log_handler: Callable, event_name: Any):
166166

167167
return engine.add_event_handler(event_name, log_handler, self, name)
168168

169-
def attach_output_handler(self, engine: Engine, event_name: Any, *args: Any, **kwargs: Mapping):
169+
def attach_output_handler(self, engine: Engine, event_name: Any, *args: Any, **kwargs: Any):
170170
"""Shortcut method to attach `OutputHandler` to the logger.
171171
172172
Args:
@@ -182,7 +182,7 @@ def attach_output_handler(self, engine: Engine, event_name: Any, *args: Any, **k
182182
"""
183183
return self.attach(engine, self._create_output_handler(*args, **kwargs), event_name=event_name)
184184

185-
def attach_opt_params_handler(self, engine: Engine, event_name: Any, *args: Any, **kwargs: Mapping):
185+
def attach_opt_params_handler(self, engine: Engine, event_name: Any, *args: Any, **kwargs: Any):
186186
"""Shortcut method to attach `OptimizerParamsHandler` to the logger.
187187
188188
Args:
@@ -199,11 +199,11 @@ def attach_opt_params_handler(self, engine: Engine, event_name: Any, *args: Any,
199199
self.attach(engine, self._create_opt_params_handler(*args, **kwargs), event_name=event_name)
200200

201201
@abstractmethod
202-
def _create_output_handler(self, engine, *args: Any, **kwargs: Mapping):
202+
def _create_output_handler(self, engine, *args: Any, **kwargs: Any):
203203
pass
204204

205205
@abstractmethod
206-
def _create_opt_params_handler(self, *args: Any, **kwargs: Mapping):
206+
def _create_opt_params_handler(self, *args: Any, **kwargs: Any):
207207
pass
208208

209209
def __enter__(self):

ignite/contrib/handlers/mlflow_logger.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -113,10 +113,10 @@ def close(self):
113113

114114
mlflow.end_run()
115115

116-
def _create_output_handler(self, *args: Any, **kwargs: Mapping):
116+
def _create_output_handler(self, *args: Any, **kwargs: Any):
117117
return OutputHandler(*args, **kwargs)
118118

119-
def _create_opt_params_handler(self, *args: Any, **kwargs: Mapping):
119+
def _create_opt_params_handler(self, *args: Any, **kwargs: Any):
120120
return OptimizerParamsHandler(*args, **kwargs)
121121

122122

ignite/contrib/handlers/neptune_logger.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -182,7 +182,7 @@ def __getattr__(self, attr: Any):
182182

183183
return getattr(neptune, attr)
184184

185-
def __init__(self, *args: Any, **kwargs: Mapping):
185+
def __init__(self, *args: Any, **kwargs: Any):
186186
try:
187187
import neptune
188188
except ImportError:
@@ -208,10 +208,10 @@ def __init__(self, *args: Any, **kwargs: Mapping):
208208
def close(self):
209209
self.stop()
210210

211-
def _create_output_handler(self, *args: Any, **kwargs: Mapping):
211+
def _create_output_handler(self, *args: Any, **kwargs: Any):
212212
return OutputHandler(*args, **kwargs)
213213

214-
def _create_opt_params_handler(self, *args: Any, **kwargs: Mapping):
214+
def _create_opt_params_handler(self, *args: Any, **kwargs: Any):
215215
return OptimizerParamsHandler(*args, **kwargs)
216216

217217

ignite/contrib/handlers/param_scheduler.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -583,7 +583,7 @@ def simulate_values(
583583
schedulers: List[ParamScheduler],
584584
durations: List[int],
585585
param_names: Union[List[str], Tuple[str]] = None,
586-
**kwargs: Mapping,
586+
**kwargs: Any,
587587
):
588588
"""Method to simulate scheduled values during num_events events.
589589
@@ -706,7 +706,7 @@ def get_param(self) -> Union[float, List[float]]:
706706
return lr_list
707707

708708
@classmethod
709-
def simulate_values(cls, num_events: int, lr_scheduler: _LRScheduler, **kwargs: Mapping):
709+
def simulate_values(cls, num_events: int, lr_scheduler: _LRScheduler, **kwargs: Any):
710710
"""Method to simulate scheduled values during num_events events.
711711
712712
Args:
@@ -1081,7 +1081,7 @@ def load_state_dict(self, state_dict: Dict):
10811081
s.load_state_dict(sd)
10821082

10831083
@classmethod
1084-
def simulate_values(cls, num_events: int, schedulers: _LRScheduler, **kwargs: Mapping):
1084+
def simulate_values(cls, num_events: int, schedulers: _LRScheduler, **kwargs: Any):
10851085
"""Method to simulate scheduled values during num_events events.
10861086
10871087
Args:
@@ -1119,7 +1119,7 @@ def simulate_values(cls, num_events: int, schedulers: _LRScheduler, **kwargs: Ma
11191119
return values
11201120

11211121

1122-
def _get_fake_optimizer(optimizer_cls: Optional[Optimizer] = None, **kwargs: Mapping):
1122+
def _get_fake_optimizer(optimizer_cls: Optional[Optimizer] = None, **kwargs: Any):
11231123
t = torch.zeros([1], requires_grad=True)
11241124
if optimizer_cls is None:
11251125
optimizer_cls = torch.optim.SGD

ignite/contrib/handlers/polyaxon_logger.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ class PolyaxonLogger(BaseLogger):
9191
9292
"""
9393

94-
def __init__(self, *args: Any, **kwargs: Mapping):
94+
def __init__(self, *args: Any, **kwargs: Any):
9595
try:
9696
from polyaxon_client.tracking import Experiment
9797
except ImportError:
@@ -105,10 +105,10 @@ def __init__(self, *args: Any, **kwargs: Mapping):
105105
def __getattr__(self, attr: Any):
106106
return getattr(self.experiment, attr)
107107

108-
def _create_output_handler(self, *args: Any, **kwargs: Mapping):
108+
def _create_output_handler(self, *args: Any, **kwargs: Any):
109109
return OutputHandler(*args, **kwargs)
110110

111-
def _create_opt_params_handler(self, *args: Any, **kwargs: Mapping):
111+
def _create_opt_params_handler(self, *args: Any, **kwargs: Any):
112112
return OptimizerParamsHandler(*args, **kwargs)
113113

114114

ignite/contrib/handlers/tensorboard_logger.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -149,7 +149,7 @@ class TensorboardLogger(BaseLogger):
149149
150150
"""
151151

152-
def __init__(self, *args: Any, **kwargs: Mapping):
152+
def __init__(self, *args: Any, **kwargs: Any):
153153
try:
154154
from tensorboardX import SummaryWriter
155155
except ImportError:
@@ -167,10 +167,10 @@ def __init__(self, *args: Any, **kwargs: Mapping):
167167
def close(self):
168168
self.writer.close()
169169

170-
def _create_output_handler(self, *args: Any, **kwargs: Mapping):
170+
def _create_output_handler(self, *args: Any, **kwargs: Any):
171171
return OutputHandler(*args, **kwargs)
172172

173-
def _create_opt_params_handler(self, *args: Any, **kwargs: Mapping):
173+
def _create_opt_params_handler(self, *args: Any, **kwargs: Any):
174174
return OptimizerParamsHandler(*args, **kwargs)
175175

176176

ignite/contrib/handlers/tqdm_logger.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -200,14 +200,14 @@ def attach(
200200
super(ProgressBar, self).attach(engine, log_handler, event_name)
201201
engine.add_event_handler(closing_event_name, self._close)
202202

203-
def attach_opt_params_handler(self, engine: Engine, event_name: Any, *args: Any, **kwargs: Mapping):
203+
def attach_opt_params_handler(self, engine: Engine, event_name: Any, *args: Any, **kwargs: Any):
204204
"""Intentionally empty"""
205205
pass
206206

207-
def _create_output_handler(self, *args: Any, **kwargs: Mapping):
207+
def _create_output_handler(self, *args: Any, **kwargs: Any):
208208
return _OutputHandler(*args, **kwargs)
209209

210-
def _create_opt_params_handler(self, *args: Any, **kwargs: Mapping):
210+
def _create_opt_params_handler(self, *args: Any, **kwargs: Any):
211211
"""Intentionally empty"""
212212
pass
213213

ignite/contrib/handlers/trains_logger.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ class TrainsLogger(BaseLogger):
119119
120120
"""
121121

122-
def __init__(self, *_, **kwargs: Mapping):
122+
def __init__(self, *_, **kwargs: Any):
123123
try:
124124
from trains import Task
125125
from trains.binding.frameworks.tensorflow_bind import WeightsGradientHistHelper
@@ -184,10 +184,10 @@ def bypass_mode(cls) -> bool:
184184
def close(self):
185185
self.trains_logger.flush()
186186

187-
def _create_output_handler(self, *args: Any, **kwargs: Mapping):
187+
def _create_output_handler(self, *args: Any, **kwargs: Any):
188188
return OutputHandler(*args, **kwargs)
189189

190-
def _create_opt_params_handler(self, *args: Any, **kwargs: Mapping):
190+
def _create_opt_params_handler(self, *args: Any, **kwargs: Any):
191191
return OptimizerParamsHandler(*args, **kwargs)
192192

193193

@@ -632,7 +632,7 @@ class TrainsSaver(DiskSaver):
632632
"""
633633

634634
def __init__(
635-
self, logger: TrainsLogger = None, output_uri: str = None, dirname: str = None, *args: Any, **kwargs: Mapping
635+
self, logger: TrainsLogger = None, output_uri: str = None, dirname: str = None, *args: Any, **kwargs: Any
636636
):
637637

638638
self._setup_check_trains(logger, output_uri)

ignite/contrib/handlers/visdom_logger.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ class VisdomLogger(BaseLogger):
138138
139139
"""
140140

141-
def __init__(self, server=None, port=None, num_workers=1, raise_exceptions=True, **kwargs: Mapping):
141+
def __init__(self, server=None, port=None, num_workers=1, raise_exceptions=True, **kwargs: Any):
142142
try:
143143
import visdom
144144
except ImportError:
@@ -195,10 +195,10 @@ def close(self):
195195
self.executor.shutdown()
196196
self.vis = None
197197

198-
def _create_output_handler(self, *args: Any, **kwargs: Mapping):
198+
def _create_output_handler(self, *args: Any, **kwargs: Any):
199199
return OutputHandler(*args, **kwargs)
200200

201-
def _create_opt_params_handler(self, *args: Any, **kwargs: Mapping):
201+
def _create_opt_params_handler(self, *args: Any, **kwargs: Any):
202202
return OptimizerParamsHandler(*args, **kwargs)
203203

204204

@@ -553,11 +553,11 @@ def __init__(self, result: Any):
553553
def result(self):
554554
return self._output
555555

556-
def __init__(self, *args: Any, **kwargs: Mapping):
556+
def __init__(self, *args: Any, **kwargs: Any):
557557
pass
558558

559-
def submit(self, fn: Callable, **kwargs: Mapping):
559+
def submit(self, fn: Callable, **kwargs: Any):
560560
return _DummyExecutor._DummyFuture(fn(**kwargs))
561561

562-
def shutdown(self, *args: Any, **kwargs: Mapping):
562+
def shutdown(self, *args: Any, **kwargs: Any):
563563
pass

ignite/contrib/handlers/wandb_logger.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,7 @@ def score_function(engine):
116116
evaluator.add_event_handler(Events.COMPLETED, model_checkpoint, {'model': model})
117117
"""
118118

119-
def __init__(self, *args: Any, **kwargs: Mapping):
119+
def __init__(self, *args: Any, **kwargs: Any):
120120
try:
121121
import wandb
122122

@@ -132,10 +132,10 @@ def __init__(self, *args: Any, **kwargs: Mapping):
132132
def __getattr__(self, attr: Any):
133133
return getattr(self._wandb, attr)
134134

135-
def _create_output_handler(self, *args: Any, **kwargs: Mapping):
135+
def _create_output_handler(self, *args: Any, **kwargs: Any):
136136
return OutputHandler(*args, **kwargs)
137137

138-
def _create_opt_params_handler(self, *args: Any, **kwargs: Mapping):
138+
def _create_opt_params_handler(self, *args: Any, **kwargs: Any):
139139
return OptimizerParamsHandler(*args, **kwargs)
140140

141141

0 commit comments

Comments
 (0)