You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardExpand all lines: deepspeed/runtime/config.py
+3-1Lines changed: 3 additions & 1 deletion
Original file line number
Diff line number
Diff line change
@@ -630,10 +630,12 @@ def _do_error_check(self):
630
630
ifself.zero_enabled:
631
631
assertself.fp16_enabled, "DeepSpeedConfig: ZeRO is only supported if fp16 is enabled"
632
632
assertself.zero_optimization_stage<=MAX_STAGE_ZERO_OPTIMIZATION, "DeepSpeedConfig: Maximum supported ZeRO stage is {}".format(MAX_STAGE_ZERO_OPTIMIZATION)
633
+
ifself.zero_config.cpu_offloadisTrue:
634
+
assertself.zero_optimization_stage==ZERO_OPTIMIZATION_GRADIENTS, "DeepSpeedConfig: cpu-offload supported ZeRO stage is {}".format(ZERO_OPTIMIZATION_GRADIENTS)
633
635
634
636
assertself.train_micro_batch_size_per_gpu, "DeepSpeedConfig: {} is not defined".format(TRAIN_MICRO_BATCH_SIZE_PER_GPU)
635
637
636
-
assertself.gradient_accumulation_steps, 'DeepSpeedConfig: {} is not defined'.format(
638
+
assertself.gradient_accumulation_steps, "DeepSpeedConfig: {} is not defined".format(
"'max_grad_norm' is not supported as an optimizer parameter, please switch to using the deepspeed parameter 'gradient_clipping' see: https://www.deepspeed.ai/docs/config-json/#gradient-clipping for more details"
0 commit comments