Skip to content

Commit f356a92

Browse files
committed
disable amp by default on cpu
1 parent 01b5408 commit f356a92

File tree

1 file changed

+4
-0
lines changed

1 file changed

+4
-0
lines changed

benchmarks/torchbench_model.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -191,6 +191,7 @@ def add_torchbench_dir(self):
191191
else:
192192
raise Exception("Torch Benchmark folder not found.")
193193

194+
print("this is the torchbench folder.")
194195
return torchbench_dir
195196

196197
def list_model_configs(self):
@@ -373,6 +374,9 @@ def is_accelerator_tpu(self):
373374
return self.benchmark_experiment.accelerator == "tpu"
374375

375376
def use_amp(self):
377+
# AMP is only supported on cuda and tpu, not on cpu.
378+
if self.benchmark_experiment.accelerator == "cpu":
379+
return False
376380
return self.is_training() or self.model_name in config(
377381
).dtype.force_amp_for_fp16_bf16_models
378382

0 commit comments

Comments
 (0)