Skip to content

Commit 0abaaa7

Browse files
committed
Enabled code coverage in AutoTuner smoke tests
lint fixes Signed-off-by: Jeff Ng <[email protected]>
1 parent 2c09ff1 commit 0abaaa7

File tree

8 files changed

+135
-56
lines changed

8 files changed

+135
-56
lines changed

tools/AutoTuner/src/autotuner/distributed.py

Lines changed: 35 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -216,11 +216,13 @@ def evaluate(self, metrics):
216216
error = "ERR" in metrics.values() or "ERR" in reference.values()
217217
not_found = "N/A" in metrics.values() or "N/A" in reference.values()
218218
if error or not_found:
219-
return ERROR_METRIC
219+
return (ERROR_METRIC, "-", "-")
220220
ppa = self.get_ppa(metrics)
221221
gamma = ppa / 10
222222
score = ppa * (self.step_ / 100) ** (-1) + (gamma * metrics["num_drc"])
223-
return score
223+
effective_clk_period = metrics["clk_period"] - metrics["worst_slack"]
224+
num_drc = metrics["num_drc"]
225+
return (score, effective_clk_period, num_drc)
224226

225227

226228
def parse_arguments():
@@ -464,32 +466,34 @@ def parse_arguments():
464466
return args
465467

466468

467-
def set_algorithm(experiment_name, config):
469+
def set_algorithm(
470+
algorithm_name, experiment_name, best_params, seed, perturbation, jobs, config
471+
):
468472
"""
469473
Configure search algorithm.
470474
"""
471475
# Pre-set seed if user sets seed to 0
472-
if args.seed == 0:
476+
if seed == 0:
473477
print(
474478
"Warning: you have chosen not to set a seed. Do you wish to continue? (y/n)"
475479
)
476480
if input().lower() != "y":
477481
sys.exit(0)
478-
args.seed = None
482+
seed = None
479483
else:
480-
torch.manual_seed(args.seed)
481-
np.random.seed(args.seed)
482-
random.seed(args.seed)
484+
torch.manual_seed(seed)
485+
np.random.seed(seed)
486+
random.seed(seed)
483487

484-
if args.algorithm == "hyperopt":
488+
if algorithm_name == "hyperopt":
485489
algorithm = HyperOptSearch(
486490
points_to_evaluate=best_params,
487-
random_state_seed=args.seed,
491+
random_state_seed=seed,
488492
)
489-
elif args.algorithm == "ax":
493+
elif algorithm_name == "ax":
490494
ax_client = AxClient(
491495
enforce_sequential_optimization=False,
492-
random_seed=args.seed,
496+
random_seed=seed,
493497
)
494498
AxClientMetric = namedtuple("AxClientMetric", "minimize")
495499
ax_client.create_experiment(
@@ -498,25 +502,25 @@ def set_algorithm(experiment_name, config):
498502
objectives={METRIC: AxClientMetric(minimize=True)},
499503
)
500504
algorithm = AxSearch(ax_client=ax_client, points_to_evaluate=best_params)
501-
elif args.algorithm == "optuna":
502-
algorithm = OptunaSearch(points_to_evaluate=best_params, seed=args.seed)
503-
elif args.algorithm == "pbt":
504-
print("Warning: PBT does not support seed values. args.seed will be ignored.")
505+
elif algorithm_name == "optuna":
506+
algorithm = OptunaSearch(points_to_evaluate=best_params, seed=seed)
507+
elif algorithm_name == "pbt":
508+
print("Warning: PBT does not support seed values. seed will be ignored.")
505509
algorithm = PopulationBasedTraining(
506510
time_attr="training_iteration",
507-
perturbation_interval=args.perturbation,
511+
perturbation_interval=perturbation,
508512
hyperparam_mutations=config,
509513
synch=True,
510514
)
511-
elif args.algorithm == "random":
515+
elif algorithm_name == "random":
512516
algorithm = BasicVariantGenerator(
513-
max_concurrent=args.jobs,
514-
random_state=args.seed,
517+
max_concurrent=jobs,
518+
random_state=seed,
515519
)
516520

517521
# A wrapper algorithm for limiting the number of concurrent trials.
518-
if args.algorithm not in ["random", "pbt"]:
519-
algorithm = ConcurrencyLimiter(algorithm, max_concurrent=args.jobs)
522+
if algorithm_name not in ["random", "pbt"]:
523+
algorithm = ConcurrencyLimiter(algorithm, max_concurrent=jobs)
520524

521525
return algorithm
522526

@@ -607,7 +611,15 @@ def main():
607611

608612
if args.mode == "tune":
609613
best_params = set_best_params(args.platform, args.design)
610-
search_algo = set_algorithm(args.experiment, config_dict)
614+
search_algo = set_algorithm(
615+
args.algorithm,
616+
args.experiment,
617+
best_params,
618+
args.seed,
619+
args.perturbation,
620+
args.jobs,
621+
config_dict,
622+
)
611623
TrainClass = set_training_class(args.eval)
612624
# PPAImprov requires a reference file to compute training scores.
613625
if args.eval == "ppa-improv":
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
#!/usr/bin/env python3
2+
3+
import os
4+
5+
6+
class AutoTunerTestUtils:
7+
@staticmethod
8+
def get_exec_cmd():
9+
"""
10+
Returns the execution command based on whether this is a coverage run or
11+
not.
12+
13+
Note that you need to run coverage combine after the runs complete to
14+
get the coverage of the parent plus the child invocations
15+
"""
16+
17+
if "COVERAGE_RUN" in os.environ:
18+
exec = "coverage run --parallel-mode --omit=*/site-packages/*,*/dist-packages/*"
19+
else: # pragma: no cover
20+
exec = "python3"
21+
return exec + " -m autotuner.distributed"
22+
23+
24+
if __name__ == "__main__": # pragma: no cover
25+
print(AutoTunerTestUtils.get_exec_cmd())

tools/AutoTuner/test/ref_file_check.py

Lines changed: 59 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,38 +1,70 @@
11
import unittest
22
import subprocess
33
import os
4-
5-
cur_dir = os.path.dirname(os.path.abspath(__file__))
6-
src_dir = os.path.join(cur_dir, "../src/autotuner")
7-
orfs_dir = os.path.join(cur_dir, "../../../flow")
8-
os.chdir(src_dir)
4+
from autotuner_test_utils import AutoTunerTestUtils
95

106

117
class RefFileCheck(unittest.TestCase):
12-
# only test 1 platform/design.
13-
platform = "asap7"
14-
design = "gcd"
8+
"""
9+
Tests situations where a referenced file (SDC or FastRoute) is not
10+
defined in the AutoTuner config
11+
"""
1512

1613
def setUp(self):
17-
configs = [
18-
"../../test/files/no_sdc_ref.json",
19-
"../../test/files/no_fr_ref.json",
20-
]
21-
self.commands = [
22-
f"python3 distributed.py"
23-
f" --design {self.design}"
24-
f" --platform {self.platform}"
25-
f" --config {c}"
26-
f" tune --samples 1"
27-
for c in configs
28-
]
29-
30-
# Make this a test case
31-
def test_files(self):
32-
for c in self.commands:
33-
out = subprocess.run(c, shell=True)
34-
failed = out.returncode != 0
35-
self.assertTrue(failed)
14+
self._cur_dir = os.path.dirname(os.path.abspath(__file__))
15+
src_dir = os.path.join(self._cur_dir, "../src")
16+
os.chdir(src_dir)
17+
18+
self._exec = AutoTunerTestUtils.get_exec_cmd()
19+
20+
def _execute_autotuner(self, platform, design, config_file, error_code=None):
21+
full_path = os.path.abspath(os.path.join(self._cur_dir, config_file))
22+
23+
cmd = f"{self._exec} --design {design} --platform {platform} --config {full_path} tune --samples 1"
24+
25+
out = subprocess.run(cmd, shell=True, text=True, capture_output=True)
26+
failed = out.returncode != 0
27+
self.assertTrue(failed, f"AT run with {config_file} passed")
28+
if error_code:
29+
self.assertTrue(
30+
error_code in out.stdout,
31+
f"Didn't find error code {error_code} in output '{out.stdout}'",
32+
)
33+
34+
def test_asap_gcd_no_sdc(self):
35+
"""
36+
Tests when SDC file is not defined, which is an error for all
37+
platforms and designs
38+
"""
39+
40+
platform = "asap7"
41+
design = "gcd"
42+
config_file = "files/no_sdc_ref.json"
43+
error_code = "[ERROR TUN-0020] No SDC reference"
44+
self._execute_autotuner(platform, design, config_file, error_code)
45+
46+
def test_asap_gcd_no_fr(self):
47+
"""
48+
Tests when FastRoute file is not defined, which is not an error for
49+
asap platform. This test fails anyway
50+
"""
51+
52+
platform = "asap7"
53+
design = "gcd"
54+
config_file = "files/no_fr_ref.json"
55+
self._execute_autotuner(platform, design, config_file)
56+
57+
def test_ihp_gcd_no_fr(self):
58+
"""
59+
Tests when FastRoute file is not defined, which is not an error for
60+
any non-asap7 platform.
61+
"""
62+
63+
platform = "ihp-sg13g2"
64+
design = "gcd"
65+
config_file = "files/no_fr_ref.json"
66+
error_code = "[ERROR TUN-0021] No FastRoute Tcl"
67+
self._execute_autotuner(platform, design, config_file, error_code)
3668

3769

3870
if __name__ == "__main__":

tools/AutoTuner/test/resume_check.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,12 @@
22
import subprocess
33
import os
44
import time
5+
from autotuner_test_utils import AutoTunerTestUtils
56

67
from contextlib import contextmanager
78

89
cur_dir = os.path.dirname(os.path.abspath(__file__))
9-
src_dir = os.path.join(cur_dir, "../src/autotuner")
10+
src_dir = os.path.join(cur_dir, "../src")
1011
orfs_dir = os.path.join(cur_dir, "../../../flow")
1112
os.chdir(src_dir)
1213

@@ -45,8 +46,9 @@ def setUp(self):
4546
# Cast to 1 decimal place
4647
res_per_trial = float("{:.1f}".format(self.num_cpus / self.samples))
4748
options = ["", "--resume"]
49+
self.exec = AutoTunerTestUtils.get_exec_cmd()
4850
self.commands = [
49-
f"python3 distributed.py"
51+
f"{self.exec}"
5052
f" --design {self.design}"
5153
f" --platform {self.platform}"
5254
f" --config {self.config}"

tools/AutoTuner/test/smoke_test_algo_eval.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import unittest
22
import subprocess
33
import os
4+
from autotuner_test_utils import AutoTunerTestUtils
45

56
cur_dir = os.path.dirname(os.path.abspath(__file__))
67
orfs_dir = os.path.join(cur_dir, "../../../flow")
@@ -19,8 +20,9 @@ def setUp(self):
1920
_algo = ["hyperopt", "ax", "optuna", "pbt", "random"]
2021
_eval = ["default", "ppa-improv"]
2122
self.matrix = [(a, e) for a in _algo for e in _eval]
23+
self.exec = AutoTunerTestUtils.get_exec_cmd()
2224
self.commands = [
23-
f"python3 -m autotuner.distributed"
25+
f"{self.exec}"
2426
f" --design {self.design}"
2527
f" --platform {self.platform}"
2628
f" --experiment {self.experiment}"

tools/AutoTuner/test/smoke_test_sample_iteration.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import unittest
22
import subprocess
33
import os
4+
from autotuner_test_utils import AutoTunerTestUtils
45

56
cur_dir = os.path.dirname(os.path.abspath(__file__))
67

@@ -16,8 +17,9 @@ def setUp(self):
1617
)
1718
self.experiment = f"smoke-test-sample-iteration-{self.platform}"
1819
self.matrix = [(5, 1), (1, 5), (2, 2), (1, 1)]
20+
self.exec = AutoTunerTestUtils.get_exec_cmd()
1921
self.commands = [
20-
f"python3 -m autotuner.distributed"
22+
f"{self.exec}"
2123
f" --design {self.design}"
2224
f" --platform {self.platform}"
2325
f" --experiment {self.experiment}"

tools/AutoTuner/test/smoke_test_sweep.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import subprocess
33
import os
44
import json
5+
from autotuner_test_utils import AutoTunerTestUtils
56

67
cur_dir = os.path.dirname(os.path.abspath(__file__))
78

@@ -30,8 +31,9 @@ def setUp(self):
3031
core = os.cpu_count()
3132
self.jobs = 4 if core >= 4 else core
3233
self.experiment = f"smoke-test-sweep-{self.platform}"
34+
self.exec = AutoTunerTestUtils.get_exec_cmd()
3335
self.command = (
34-
"python3 -m autotuner.distributed"
36+
f"{self.exec}"
3537
f" --design {self.design}"
3638
f" --platform {self.platform}"
3739
f" --experiment {self.experiment}"

tools/AutoTuner/test/smoke_test_tune.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import unittest
22
import subprocess
33
import os
4+
from autotuner_test_utils import AutoTunerTestUtils
45

56
cur_dir = os.path.dirname(os.path.abspath(__file__))
67

@@ -15,8 +16,9 @@ def setUp(self):
1516
f"../../../flow/designs/{self.platform}/{self.design}/autotuner.json",
1617
)
1718
self.experiment = f"smoke-test-tune-{self.platform}"
19+
self.exec = AutoTunerTestUtils.get_exec_cmd()
1820
self.command = (
19-
"python3 -m autotuner.distributed"
21+
f"{self.exec}"
2022
f" --design {self.design}"
2123
f" --platform {self.platform}"
2224
f" --experiment {self.experiment}"

0 commit comments

Comments
 (0)