diff --git a/tools/AutoTuner/src/autotuner/distributed.py b/tools/AutoTuner/src/autotuner/distributed.py index 87361a5fd9..9e2bdb1b42 100644 --- a/tools/AutoTuner/src/autotuner/distributed.py +++ b/tools/AutoTuner/src/autotuner/distributed.py @@ -1011,7 +1011,7 @@ def sweep(): local_dir=LOCAL_DIR, resume=args.resume, stop={"training_iteration": args.iterations}, - resources_per_trial={"cpu": os.cpu_count()/args.jobs}, + resources_per_trial={"cpu": os.cpu_count() / args.jobs}, log_to_file=["trail-out.log", "trail-err.log"], trial_name_creator=lambda x: f"variant-{x.trainable_name}-{x.trial_id}-ray", trial_dirname_creator=lambda x: f"variant-{x.trainable_name}-{x.trial_id}-ray", diff --git a/tools/AutoTuner/test/resume_check.py b/tools/AutoTuner/test/resume_check.py index c2c037e7bb..92219eed22 100644 --- a/tools/AutoTuner/test/resume_check.py +++ b/tools/AutoTuner/test/resume_check.py @@ -58,7 +58,6 @@ def setUp(self): for c in options ] - def test_tune_resume(self): # Goal is to first run the first config (without resume) and then run the second config (with resume) # and check if the run is able to complete.