diff --git a/tools/AutoTuner/src/autotuner/distributed.py b/tools/AutoTuner/src/autotuner/distributed.py index 8bf9caba4c..87361a5fd9 100644 --- a/tools/AutoTuner/src/autotuner/distributed.py +++ b/tools/AutoTuner/src/autotuner/distributed.py @@ -750,13 +750,6 @@ def parse_arguments(): default=1, help="Number of iterations for tuning.", ) - tune_parser.add_argument( - "--resources_per_trial", - type=float, - metavar="", - default=1, - help="Number of CPUs to request for each tunning job.", - ) tune_parser.add_argument( "--reference", type=str, @@ -1018,7 +1011,7 @@ def sweep(): local_dir=LOCAL_DIR, resume=args.resume, stop={"training_iteration": args.iterations}, - resources_per_trial={"cpu": args.resources_per_trial}, + resources_per_trial={"cpu": os.cpu_count()/args.jobs}, log_to_file=["trail-out.log", "trail-err.log"], trial_name_creator=lambda x: f"variant-{x.trainable_name}-{x.trial_id}-ray", trial_dirname_creator=lambda x: f"variant-{x.trainable_name}-{x.trial_id}-ray", diff --git a/tools/AutoTuner/test/resume_check.py b/tools/AutoTuner/test/resume_check.py index 5b0d2f49e6..c2c037e7bb 100644 --- a/tools/AutoTuner/test/resume_check.py +++ b/tools/AutoTuner/test/resume_check.py @@ -57,7 +57,7 @@ def setUp(self): f" {c}" for c in options ] - self.failCommands = [] # TODO + def test_tune_resume(self): # Goal is to first run the first config (without resume) and then run the second config (with resume)