diff --git a/openfpga_flow/scripts/run_fpga_flow.py b/openfpga_flow/scripts/run_fpga_flow.py index 3a936b9e7..23a655e02 100644 --- a/openfpga_flow/scripts/run_fpga_flow.py +++ b/openfpga_flow/scripts/run_fpga_flow.py @@ -395,7 +395,7 @@ def main(): def check_required_file(): - """ Function ensure existace of all required files for the script """ + """Function ensure existace of all required files for the script""" files_dict = { "CAD TOOL PATH": os.path.join( flow_script_dir, os.pardir, "misc", "fpgaflow_default_tool_path.conf" @@ -407,7 +407,7 @@ def check_required_file(): def read_script_config(): - """ This fucntion reads default CAD tools path from configuration file """ + """This fucntion reads default CAD tools path from configuration file""" global config, cad_tools config = ConfigParser(interpolation=ExtendedInterpolation()) config.read_dict(script_env_vars) diff --git a/openfpga_flow/scripts/run_fpga_task.py b/openfpga_flow/scripts/run_fpga_task.py index bb223effa..e215a3f50 100644 --- a/openfpga_flow/scripts/run_fpga_task.py +++ b/openfpga_flow/scripts/run_fpga_task.py @@ -72,9 +72,7 @@ parser.add_argument( help="Dummy run shows final generated VPR commands", ) parser.add_argument("--debug", action="store_true", help="Run script in debug mode") -parser.add_argument( - "--continue_on_fail", action="store_true", help="Exit script with return code" -) +parser.add_argument("--continue_on_fail", action="store_true", help="Exit script with return code") parser.add_argument( "--show_thread_logs", action="store_true", help="Skips logs from running thread" ) @@ -87,24 +85,16 @@ task_script_dir = os.path.dirname(os.path.abspath(__file__)) script_env_vars = { "PATH": { "OPENFPGA_FLOW_PATH": task_script_dir, - "VPR_ARCH_PATH": os.path.join( - "${PATH:OPENFPGA_PATH}", "openfpga_flow", "vpr_arch" - ), - "OF_ARCH_PATH": os.path.join( - "${PATH:OPENFPGA_PATH}", "openfpga_flow", "openfpga_arch" - ), + "VPR_ARCH_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "openfpga_flow", "vpr_arch"), + "OF_ARCH_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "openfpga_flow", "openfpga_arch"), "OPENFPGA_SHELLSCRIPT_PATH": os.path.join( "${PATH:OPENFPGA_PATH}", "openfpga_flow", "OpenFPGAShellScripts" ), - "BENCH_PATH": os.path.join( - "${PATH:OPENFPGA_PATH}", "openfpga_flow", "benchmarks" - ), + "BENCH_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "openfpga_flow", "benchmarks"), "TECH_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "openfpga_flow", "tech"), "SPICENETLIST_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "SpiceNetlists"), "VERILOG_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "VerilogNetlists"), - "OPENFPGA_PATH": os.path.abspath( - os.path.join(task_script_dir, os.pardir, os.pardir) - ), + "OPENFPGA_PATH": os.path.abspath(os.path.join(task_script_dir, os.pardir, os.pardir)), } } config = ConfigParser(interpolation=ExtendedInterpolation()) @@ -252,8 +242,7 @@ def generate_each_task_actions(taskname): missing_section = list(set(required_sec) - set(task_conf.sections())) if missing_section: clean_up_and_exit( - "Missing sections %s" % " ".join(missing_section) - + " in task configuration file" + "Missing sections %s" % " ".join(missing_section) + " in task configuration file" ) # Declare varibles to access sections @@ -274,9 +263,7 @@ def generate_each_task_actions(taskname): clean_up_and_exit("Found duplicate architectures in config file") # Get Flow information - logger.info( - 'Running "%s" flow', GeneralSection.get("fpga_flow", fallback="yosys_vpr") - ) + logger.info('Running "%s" flow', GeneralSection.get("fpga_flow", fallback="yosys_vpr")) # Check if specified benchmark files exist benchmark_list = [] @@ -290,8 +277,7 @@ def generate_each_task_actions(taskname): files = glob.glob(eachpath) if not len(files): clean_up_and_exit( - ("No files added benchmark %s" % bech_name) - + " with path %s " % (eachpath) + ("No files added benchmark %s" % bech_name) + " with path %s " % (eachpath) ) bench_files += files @@ -327,15 +313,11 @@ def generate_each_task_actions(taskname): yosys_params_common = {} for param in yosys_params: - yosys_params_common[param.upper()] = SynthSection.get( - "bench_" + param + "_common" - ) + yosys_params_common[param.upper()] = SynthSection.get("bench_" + param + "_common") # Individual benchmark configuration CurrBenchPara["files"] = bench_files - CurrBenchPara["top_module"] = SynthSection.get( - bech_name + "_top", fallback="top" - ) + CurrBenchPara["top_module"] = SynthSection.get(bech_name + "_top", fallback="top") CurrBenchPara["ys_script"] = SynthSection.get( bech_name + "_yosys", fallback=ys_for_task_common ) @@ -360,8 +342,7 @@ def generate_each_task_actions(taskname): if GeneralSection.getboolean("power_analysis"): if not SynthSection.get(bech_name + "_act"): clean_up_and_exit( - "Missing argument %s" % (bech_name + "_act") - + "for vpr_blif flow" + "Missing argument %s" % (bech_name + "_act") + "for vpr_blif flow" ) CurrBenchPara["activity_file"] = SynthSection.get(bech_name + "_act") else: @@ -369,9 +350,7 @@ def generate_each_task_actions(taskname): if not SynthSection.get(bech_name + "_act"): CurrBenchPara["activity_file"] = bech_name + "_act" else: - CurrBenchPara["activity_file"] = SynthSection.get( - bech_name + "_act" - ) + CurrBenchPara["activity_file"] = SynthSection.get(bech_name + "_act") # Check if base verilog file exists if not SynthSection.get(bech_name + "_verilog"): @@ -413,10 +392,7 @@ def generate_each_task_actions(taskname): if benchmark_top_module_count.count(bench["top_module"]) > 1: flow_run_dir = get_flow_rundir( arch, - "bench" - + str(benchmark_list.index(bench)) - + "_" - + bench["top_module"], + "bench" + str(benchmark_list.index(bench)) + "_" + bench["top_module"], lbl, ) else: @@ -593,11 +569,7 @@ def run_single_script(s, eachJob, job_list): os._exit(1) eachJob["endtime"] = time.time() timediff = timedelta(seconds=(eachJob["endtime"] - eachJob["starttime"])) - timestr = ( - humanize.naturaldelta(timediff) - if "humanize" in sys.modules - else str(timediff) - ) + timestr = humanize.naturaldelta(timediff) if "humanize" in sys.modules else str(timediff) logger.info( "%s Finished with returncode %d, Time Taken %s ", thread_name, @@ -639,9 +611,7 @@ def collect_results(job_run_list): continue # Read and merge result file - vpr_res = ConfigParser( - allow_no_value=True, interpolation=ExtendedInterpolation() - ) + vpr_res = ConfigParser(allow_no_value=True, interpolation=ExtendedInterpolation()) vpr_result_file = os.path.join(run["run_dir"], "vpr_stat.result") vpr_res.read_file(open(vpr_result_file, encoding="UTF-8")) result = OrderedDict()