Updated format
This commit is contained in:
parent
2afb91596f
commit
a6263c44af
|
@ -72,9 +72,7 @@ parser.add_argument(
|
||||||
help="Dummy run shows final generated VPR commands",
|
help="Dummy run shows final generated VPR commands",
|
||||||
)
|
)
|
||||||
parser.add_argument("--debug", action="store_true", help="Run script in debug mode")
|
parser.add_argument("--debug", action="store_true", help="Run script in debug mode")
|
||||||
parser.add_argument(
|
parser.add_argument("--continue_on_fail", action="store_true", help="Exit script with return code")
|
||||||
"--continue_on_fail", action="store_true", help="Exit script with return code"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--show_thread_logs", action="store_true", help="Skips logs from running thread"
|
"--show_thread_logs", action="store_true", help="Skips logs from running thread"
|
||||||
)
|
)
|
||||||
|
@ -87,24 +85,16 @@ task_script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
script_env_vars = {
|
script_env_vars = {
|
||||||
"PATH": {
|
"PATH": {
|
||||||
"OPENFPGA_FLOW_PATH": task_script_dir,
|
"OPENFPGA_FLOW_PATH": task_script_dir,
|
||||||
"VPR_ARCH_PATH": os.path.join(
|
"VPR_ARCH_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "openfpga_flow", "vpr_arch"),
|
||||||
"${PATH:OPENFPGA_PATH}", "openfpga_flow", "vpr_arch"
|
"OF_ARCH_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "openfpga_flow", "openfpga_arch"),
|
||||||
),
|
|
||||||
"OF_ARCH_PATH": os.path.join(
|
|
||||||
"${PATH:OPENFPGA_PATH}", "openfpga_flow", "openfpga_arch"
|
|
||||||
),
|
|
||||||
"OPENFPGA_SHELLSCRIPT_PATH": os.path.join(
|
"OPENFPGA_SHELLSCRIPT_PATH": os.path.join(
|
||||||
"${PATH:OPENFPGA_PATH}", "openfpga_flow", "OpenFPGAShellScripts"
|
"${PATH:OPENFPGA_PATH}", "openfpga_flow", "OpenFPGAShellScripts"
|
||||||
),
|
),
|
||||||
"BENCH_PATH": os.path.join(
|
"BENCH_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "openfpga_flow", "benchmarks"),
|
||||||
"${PATH:OPENFPGA_PATH}", "openfpga_flow", "benchmarks"
|
|
||||||
),
|
|
||||||
"TECH_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "openfpga_flow", "tech"),
|
"TECH_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "openfpga_flow", "tech"),
|
||||||
"SPICENETLIST_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "SpiceNetlists"),
|
"SPICENETLIST_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "SpiceNetlists"),
|
||||||
"VERILOG_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "VerilogNetlists"),
|
"VERILOG_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "VerilogNetlists"),
|
||||||
"OPENFPGA_PATH": os.path.abspath(
|
"OPENFPGA_PATH": os.path.abspath(os.path.join(task_script_dir, os.pardir, os.pardir)),
|
||||||
os.path.join(task_script_dir, os.pardir, os.pardir)
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
config = ConfigParser(interpolation=ExtendedInterpolation())
|
config = ConfigParser(interpolation=ExtendedInterpolation())
|
||||||
|
@ -252,8 +242,7 @@ def generate_each_task_actions(taskname):
|
||||||
missing_section = list(set(required_sec) - set(task_conf.sections()))
|
missing_section = list(set(required_sec) - set(task_conf.sections()))
|
||||||
if missing_section:
|
if missing_section:
|
||||||
clean_up_and_exit(
|
clean_up_and_exit(
|
||||||
"Missing sections %s" % " ".join(missing_section)
|
"Missing sections %s" % " ".join(missing_section) + " in task configuration file"
|
||||||
+ " in task configuration file"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Declare varibles to access sections
|
# Declare varibles to access sections
|
||||||
|
@ -274,9 +263,7 @@ def generate_each_task_actions(taskname):
|
||||||
clean_up_and_exit("Found duplicate architectures in config file")
|
clean_up_and_exit("Found duplicate architectures in config file")
|
||||||
|
|
||||||
# Get Flow information
|
# Get Flow information
|
||||||
logger.info(
|
logger.info('Running "%s" flow', GeneralSection.get("fpga_flow", fallback="yosys_vpr"))
|
||||||
'Running "%s" flow', GeneralSection.get("fpga_flow", fallback="yosys_vpr")
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check if specified benchmark files exist
|
# Check if specified benchmark files exist
|
||||||
benchmark_list = []
|
benchmark_list = []
|
||||||
|
@ -290,8 +277,7 @@ def generate_each_task_actions(taskname):
|
||||||
files = glob.glob(eachpath)
|
files = glob.glob(eachpath)
|
||||||
if not len(files):
|
if not len(files):
|
||||||
clean_up_and_exit(
|
clean_up_and_exit(
|
||||||
("No files added benchmark %s" % bech_name)
|
("No files added benchmark %s" % bech_name) + " with path %s " % (eachpath)
|
||||||
+ " with path %s " % (eachpath)
|
|
||||||
)
|
)
|
||||||
bench_files += files
|
bench_files += files
|
||||||
|
|
||||||
|
@ -327,15 +313,11 @@ def generate_each_task_actions(taskname):
|
||||||
|
|
||||||
yosys_params_common = {}
|
yosys_params_common = {}
|
||||||
for param in yosys_params:
|
for param in yosys_params:
|
||||||
yosys_params_common[param.upper()] = SynthSection.get(
|
yosys_params_common[param.upper()] = SynthSection.get("bench_" + param + "_common")
|
||||||
"bench_" + param + "_common"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Individual benchmark configuration
|
# Individual benchmark configuration
|
||||||
CurrBenchPara["files"] = bench_files
|
CurrBenchPara["files"] = bench_files
|
||||||
CurrBenchPara["top_module"] = SynthSection.get(
|
CurrBenchPara["top_module"] = SynthSection.get(bech_name + "_top", fallback="top")
|
||||||
bech_name + "_top", fallback="top"
|
|
||||||
)
|
|
||||||
CurrBenchPara["ys_script"] = SynthSection.get(
|
CurrBenchPara["ys_script"] = SynthSection.get(
|
||||||
bech_name + "_yosys", fallback=ys_for_task_common
|
bech_name + "_yosys", fallback=ys_for_task_common
|
||||||
)
|
)
|
||||||
|
@ -360,8 +342,7 @@ def generate_each_task_actions(taskname):
|
||||||
if GeneralSection.getboolean("power_analysis"):
|
if GeneralSection.getboolean("power_analysis"):
|
||||||
if not SynthSection.get(bech_name + "_act"):
|
if not SynthSection.get(bech_name + "_act"):
|
||||||
clean_up_and_exit(
|
clean_up_and_exit(
|
||||||
"Missing argument %s" % (bech_name + "_act")
|
"Missing argument %s" % (bech_name + "_act") + "for vpr_blif flow"
|
||||||
+ "for vpr_blif flow"
|
|
||||||
)
|
)
|
||||||
CurrBenchPara["activity_file"] = SynthSection.get(bech_name + "_act")
|
CurrBenchPara["activity_file"] = SynthSection.get(bech_name + "_act")
|
||||||
else:
|
else:
|
||||||
|
@ -369,9 +350,7 @@ def generate_each_task_actions(taskname):
|
||||||
if not SynthSection.get(bech_name + "_act"):
|
if not SynthSection.get(bech_name + "_act"):
|
||||||
CurrBenchPara["activity_file"] = bech_name + "_act"
|
CurrBenchPara["activity_file"] = bech_name + "_act"
|
||||||
else:
|
else:
|
||||||
CurrBenchPara["activity_file"] = SynthSection.get(
|
CurrBenchPara["activity_file"] = SynthSection.get(bech_name + "_act")
|
||||||
bech_name + "_act"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check if base verilog file exists
|
# Check if base verilog file exists
|
||||||
if not SynthSection.get(bech_name + "_verilog"):
|
if not SynthSection.get(bech_name + "_verilog"):
|
||||||
|
@ -413,10 +392,7 @@ def generate_each_task_actions(taskname):
|
||||||
if benchmark_top_module_count.count(bench["top_module"]) > 1:
|
if benchmark_top_module_count.count(bench["top_module"]) > 1:
|
||||||
flow_run_dir = get_flow_rundir(
|
flow_run_dir = get_flow_rundir(
|
||||||
arch,
|
arch,
|
||||||
"bench"
|
"bench" + str(benchmark_list.index(bench)) + "_" + bench["top_module"],
|
||||||
+ str(benchmark_list.index(bench))
|
|
||||||
+ "_"
|
|
||||||
+ bench["top_module"],
|
|
||||||
lbl,
|
lbl,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
@ -593,11 +569,7 @@ def run_single_script(s, eachJob, job_list):
|
||||||
os._exit(1)
|
os._exit(1)
|
||||||
eachJob["endtime"] = time.time()
|
eachJob["endtime"] = time.time()
|
||||||
timediff = timedelta(seconds=(eachJob["endtime"] - eachJob["starttime"]))
|
timediff = timedelta(seconds=(eachJob["endtime"] - eachJob["starttime"]))
|
||||||
timestr = (
|
timestr = humanize.naturaldelta(timediff) if "humanize" in sys.modules else str(timediff)
|
||||||
humanize.naturaldelta(timediff)
|
|
||||||
if "humanize" in sys.modules
|
|
||||||
else str(timediff)
|
|
||||||
)
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"%s Finished with returncode %d, Time Taken %s ",
|
"%s Finished with returncode %d, Time Taken %s ",
|
||||||
thread_name,
|
thread_name,
|
||||||
|
@ -639,9 +611,7 @@ def collect_results(job_run_list):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Read and merge result file
|
# Read and merge result file
|
||||||
vpr_res = ConfigParser(
|
vpr_res = ConfigParser(allow_no_value=True, interpolation=ExtendedInterpolation())
|
||||||
allow_no_value=True, interpolation=ExtendedInterpolation()
|
|
||||||
)
|
|
||||||
vpr_result_file = os.path.join(run["run_dir"], "vpr_stat.result")
|
vpr_result_file = os.path.join(run["run_dir"], "vpr_stat.result")
|
||||||
vpr_res.read_file(open(vpr_result_file, encoding="UTF-8"))
|
vpr_res.read_file(open(vpr_result_file, encoding="UTF-8"))
|
||||||
result = OrderedDict()
|
result = OrderedDict()
|
||||||
|
|
Loading…
Reference in New Issue