From 7f6c1b3e00b794806ff377dcb80f7e3df89c35d0 Mon Sep 17 00:00:00 2001 From: Ganesh Gore Date: Sun, 18 Aug 2019 12:26:05 -0600 Subject: [PATCH] Code re-arrangement + Added support for subdirectory task in openfpga_task + Rearranged function order + Combined vpr re-route and standrad run function + Removed external_call function from fpga_flow script + Added .gitignore to task directory --- .gitignore | 4 +- openfpga_flow/scripts/run_fpga_flow.py | 110 ++++--------------------- openfpga_flow/scripts/run_fpga_task.py | 42 ++++++---- 3 files changed, 45 insertions(+), 111 deletions(-) diff --git a/.gitignore b/.gitignore index cc77d456e..fc6dbf3a5 100644 --- a/.gitignore +++ b/.gitignore @@ -42,4 +42,6 @@ vpr7_x2p/vpr/vpr_shell *_local.bat fpga_flow/csv_rpts tmp/ -build/ \ No newline at end of file +build/ + +message.txt \ No newline at end of file diff --git a/openfpga_flow/scripts/run_fpga_flow.py b/openfpga_flow/scripts/run_fpga_flow.py index d242daa03..ad18e66ad 100644 --- a/openfpga_flow/scripts/run_fpga_flow.py +++ b/openfpga_flow/scripts/run_fpga_flow.py @@ -337,7 +337,7 @@ def clean_up_and_exit(msg, clean=False): logger.error("Current working directory : " + os.getcwd()) logger.error(msg) logger.error("Exiting . . . . . .") - exit() + exit(1) def run_yosys_with_abc(): @@ -499,9 +499,10 @@ def run_vpr(): " (Slack of %d%%)" % ((args.min_route_chan_width-1)*100)) while(1): - res = run_vpr_route(args.top_module+"_ace_corrected_out.blif", - min_channel_width, - args.top_module+"_min_channel_reroute_vpr.txt") + res = run_standard_vpr(args.top_module+"_ace_corrected_out.blif", + int(min_channel_width), + args.top_module+"_reroute_vpr.txt", + route_only=True) if res: logger.info("Routing with channel width=%d successful" % @@ -532,15 +533,17 @@ def run_vpr(): extract_vpr_power_esti(args.top_module+"_ace_corrected_out.power") -def run_standard_vpr(bench_blif, fixed_chan_width, logfile): +def run_standard_vpr(bench_blif, fixed_chan_width, logfile, route_only=False): command = [cad_tools["vpr_path"], args.arch_file, bench_blif, "--net_file", args.top_module+"_vpr.net", "--place_file", args.top_module+"_vpr.place", "--route_file", args.top_module+"_vpr.route", - "--full_stats", "--nodisp" + "--full_stats", "--nodisp", ] + if route_only: + command += ["--route"] # Power options if args.power: command += ["--power", @@ -551,7 +554,7 @@ def run_standard_vpr(bench_blif, fixed_chan_width, logfile): command += ["--timing_driven_clustering", "off"] # channel width option if fixed_chan_width >= 0: - command += ["-route_chan_width", fixed_chan_width] + command += ["--route_chan_width", "%d"%fixed_chan_width] if args.vpr_use_tileable_route_chan_width: command += ["--use_tileable_route_chan_width"] @@ -654,90 +657,16 @@ def run_standard_vpr(bench_blif, fixed_chan_width, logfile): universal_newlines=True) for line in process.stdout.split('\n'): if "Best routing" in line: - chan_width = re.search( - r"channel width factor of ([0-9]+)", line).group(1) + chan_width = int(re.search( + r"channel width factor of ([0-9]+)", line).group(1)) if "Circuit successfully routed" in line: - chan_width = re.search( - r"a channel width factor of ([0-9]+)", line).group(1) + chan_width = int(re.search( + r"a channel width factor of ([0-9]+)", line).group(1)) output.write(process.stdout) if process.returncode: logger.info("Standard VPR run failed with returncode %d", process.returncode) - except Exception as e: - logger.exception("Failed to run VPR") - process_failed_vpr_run(e.output) - clean_up_and_exit("") - logger.info("VPR output is written in file %s" % logfile) - return int(chan_width) - - -def run_vpr_route(bench_blif, fixed_chan_width, logfile): - command = [cad_tools["vpr_path"], - args.arch_file, - bench_blif, - "--net_file", args.top_module+"_vpr.net", - "--place_file", args.top_module+"_vpr.place", - "--route_file", args.top_module+"_vpr.route", - "--full_stats", "--nodisp", - "--route" - ] - if args.power: - command += [ - "--power", - "--activity_file", args.top_module+"_ace_out.act", - "--tech_properties", args.power_tech] - if fixed_chan_width >= 0: - command += ["-route_chan_width", "%d" % fixed_chan_width] - - # VPR - SPICE options - if args.power and args.vpr_fpga_spice: - command += "--fpga_spice" - if args.vpr_fpga_spice_print_cbsbtb: - command += ["--print_spice_cb_mux_testbench", - "--print_spice_sb_mux_testbench"] - if args.vpr_fpga_spice_print_pbtb: - command += ["--print_spice_pb_mux_testbench", - "--print_spice_lut_testbench", - "--print_spice_hardlogic_testbench"] - if args.vpr_fpga_spice_print_gridtb: - command += ["--print_spice_grid_testbench"] - if args.vpr_fpga_spice_print_toptb: - command += ["--print_spice_top_testbench"] - if args.vpr_fpga_spice_leakage_only: - command += ["--fpga_spice_leakage_only"] - if args.vpr_fpga_spice_parasitic_net_estimation_off: - command += ["--fpga_spice_parasitic_net_estimation_off"] - - if args.vpr_fpga_verilog: - command += ["--fpga_verilog"] - if args.vpr_fpga_x2p_rename_illegal_port: - command += ["--fpga_x2p_rename_illegal_port"] - - if args.vpr_max_router_iteration: - command += ["--max_router_iterations", args.vpr_max_router_iteration] - if args.vpr_route_breadthfirst: - command += ["--router_algorithm", "breadth_first"] - chan_width = None - try: - with open(logfile, 'w+') as output: - output.write(" ".join(command)+"\n") - process = subprocess.run(command, - check=True, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True) - for line in process.stdout.split('\n'): - if "Best routing" in line: - chan_width = re.search( - r"channel width factor of ([0-9]+)", line).group(1) - if "Circuit successfully routed" in line: - chan_width = re.search( - r"a channel width factor of ([0-9]+)", line).group(1) - output.write(process.stdout) - if process.returncode: - logger.info("Standard VPR run failed with returncode %d", - process.returncode) - except Exception as e: + except (Exception, subprocess.CalledProcessError) as e: logger.exception("Failed to run VPR") process_failed_vpr_run(e.output) clean_up_and_exit("") @@ -843,7 +772,7 @@ def run_command(taskname, logfile, command, exit_if_fail=True): if process.returncode: logger.error("%s run failed with returncode %d" % (taskname, process.returncode)) - except Exception as e: + except (Exception, subprocess.CalledProcessError) as e: logger.exception("failed to execute %s" % taskname) process_failed_vpr_run(e.output) if exit_if_fail: @@ -851,13 +780,6 @@ def run_command(taskname, logfile, command, exit_if_fail=True): logger.info("%s is written in file %s" % (taskname, logfile)) -def external_call(parent_logger=None, passed_args=[]): - global logger, args - logger = parent_logger - args = parser.parse_args(passed_args) - main() - - def process_failed_vpr_run(vpr_output): for line in vpr_output.split("\n"): if "error" in line.lower(): diff --git a/openfpga_flow/scripts/run_fpga_task.py b/openfpga_flow/scripts/run_fpga_task.py index f740f0b5b..0612700d4 100644 --- a/openfpga_flow/scripts/run_fpga_task.py +++ b/openfpga_flow/scripts/run_fpga_task.py @@ -17,7 +17,7 @@ import pprint # = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = # Configure logging system # = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = -logging.basicConfig(level=logging.DEBUG, stream=sys.stdout, +logging.basicConfig(level=logging.INFO, stream=sys.stdout, format='%(levelname)s (%(threadName)-9s) - %(message)s') logger = logging.getLogger('OpenFPGA_Task_logs') @@ -50,11 +50,12 @@ gc = config["GENERAL CONFIGURATION"] def main(): - validate_command_line_arguments() for eachtask in args.tasks: logger.info("Currently running task %s" % eachtask) + eachtask = eachtask.replace("\\", "/").split("/") job_run_list = generate_each_task_actions(eachtask) + eachtask = "_".join(eachtask) if not args.test_run: run_actions(job_run_list) collect_results(job_run_list) @@ -84,7 +85,7 @@ def generate_each_task_actions(taskname): """ # Check if task directory exists and consistent - curr_task_dir = os.path.join(gc["task_dir"], taskname) + curr_task_dir = os.path.join(gc["task_dir"], *(taskname)) if not os.path.isdir(curr_task_dir): clean_up_and_exit("Task directory [%s] not found" % curr_task_dir) os.chdir(curr_task_dir) @@ -148,7 +149,7 @@ def generate_each_task_actions(taskname): "top_module": task_conf.get("SYNTHESIS_PARAM", bech_name+"_top", fallback="top"), "ys_script": task_conf.get("SYNTHESIS_PARAM", bech_name+"_yosys", - fallback=ys_for_task) + fallback=ys_for_task), }) # Create OpenFPGA flow run commnad for each combination of @@ -165,7 +166,8 @@ def generate_each_task_actions(taskname): "bench": bench, "name": "%s_arch%d" % (bench["top_module"], indx), "run_dir": flow_run_dir, - "commands": cmd}) + "commands": cmd, + "status": False}) return flow_run_cmd_list @@ -230,23 +232,28 @@ def create_run_command(curr_job_dir, archfile, benchmark_obj, task_conf): return command -def run_single_script(s, command): +def run_single_script(s, eachJob): logger.debug('Added job in pool') with s: - logger.debug("Running OpenFPGA flow with " + " ".join(command)) + logger.debug("Running OpenFPGA flow with " + + " ".join(eachJob["commands"])) name = threading.currentThread().getName() - # run_fpga_flow.external_call(logger, command) try: logfile = "%s_out.log" % name with open(logfile, 'w+') as output: - process = subprocess.run(["python3.5", gc["script_default"]]+command, + process = subprocess.run(["python3.5", + gc["script_default"]] + + eachJob["commands"], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) output.write(process.stdout) + eachJob["status"] = True except: - logger.exception("Failed to launch openfpga flow") + logger.error("Failed to execute openfpga flow - " + + eachJob["name"]) + # logger.exception("Failed to launch openfpga flow") logger.info("%s Finished " % name) @@ -256,7 +263,7 @@ def run_actions(job_run_list): for index, eachjob in enumerate(job_run_list): t = threading.Thread(target=run_single_script, name='Job_%02d' % (index+1), - args=(thread_sema, eachjob["commands"])) + args=(thread_sema, eachjob)) t.start() thred_list.append(t) @@ -267,6 +274,8 @@ def run_actions(job_run_list): def collect_results(job_run_list): task_result = [] for run in job_run_list: + if not run["status"]: + continue # Check if any result file exist if not glob.glob(os.path.join(run["run_dir"], "*.result")): logger.info("No result files found for %s" % run["name"]) @@ -280,11 +289,12 @@ def collect_results(job_run_list): result["name"] = run["name"] task_result.append(result) - with open("task_result.csv", 'w', newline='') as csvfile: - writer = csv.DictWriter(csvfile, fieldnames=task_result[0].keys()) - writer.writeheader() - for eachResult in task_result: - writer.writerow(eachResult) + if len(task_result): + with open("task_result.csv", 'w', newline='') as csvfile: + writer = csv.DictWriter(csvfile, fieldnames=task_result[0].keys()) + writer.writeheader() + for eachResult in task_result: + writer.writerow(eachResult) if __name__ == "__main__":