Code re-arrangement

+ Added support for subdirectory task in openfpga_task
+ Rearranged function order
+ Combined vpr re-route and standrad run function
+ Removed external_call function from fpga_flow script
+ Added .gitignore to task directory
This commit is contained in:
Ganesh Gore 2019-08-18 12:26:05 -06:00
parent fb29fcfc06
commit 7f6c1b3e00
3 changed files with 45 additions and 111 deletions

2
.gitignore vendored
View File

@ -43,3 +43,5 @@ vpr7_x2p/vpr/vpr_shell
fpga_flow/csv_rpts fpga_flow/csv_rpts
tmp/ tmp/
build/ build/
message.txt

View File

@ -337,7 +337,7 @@ def clean_up_and_exit(msg, clean=False):
logger.error("Current working directory : " + os.getcwd()) logger.error("Current working directory : " + os.getcwd())
logger.error(msg) logger.error(msg)
logger.error("Exiting . . . . . .") logger.error("Exiting . . . . . .")
exit() exit(1)
def run_yosys_with_abc(): def run_yosys_with_abc():
@ -499,9 +499,10 @@ def run_vpr():
" (Slack of %d%%)" % ((args.min_route_chan_width-1)*100)) " (Slack of %d%%)" % ((args.min_route_chan_width-1)*100))
while(1): while(1):
res = run_vpr_route(args.top_module+"_ace_corrected_out.blif", res = run_standard_vpr(args.top_module+"_ace_corrected_out.blif",
min_channel_width, int(min_channel_width),
args.top_module+"_min_channel_reroute_vpr.txt") args.top_module+"_reroute_vpr.txt",
route_only=True)
if res: if res:
logger.info("Routing with channel width=%d successful" % logger.info("Routing with channel width=%d successful" %
@ -532,15 +533,17 @@ def run_vpr():
extract_vpr_power_esti(args.top_module+"_ace_corrected_out.power") extract_vpr_power_esti(args.top_module+"_ace_corrected_out.power")
def run_standard_vpr(bench_blif, fixed_chan_width, logfile): def run_standard_vpr(bench_blif, fixed_chan_width, logfile, route_only=False):
command = [cad_tools["vpr_path"], command = [cad_tools["vpr_path"],
args.arch_file, args.arch_file,
bench_blif, bench_blif,
"--net_file", args.top_module+"_vpr.net", "--net_file", args.top_module+"_vpr.net",
"--place_file", args.top_module+"_vpr.place", "--place_file", args.top_module+"_vpr.place",
"--route_file", args.top_module+"_vpr.route", "--route_file", args.top_module+"_vpr.route",
"--full_stats", "--nodisp" "--full_stats", "--nodisp",
] ]
if route_only:
command += ["--route"]
# Power options # Power options
if args.power: if args.power:
command += ["--power", command += ["--power",
@ -551,7 +554,7 @@ def run_standard_vpr(bench_blif, fixed_chan_width, logfile):
command += ["--timing_driven_clustering", "off"] command += ["--timing_driven_clustering", "off"]
# channel width option # channel width option
if fixed_chan_width >= 0: if fixed_chan_width >= 0:
command += ["-route_chan_width", fixed_chan_width] command += ["--route_chan_width", "%d"%fixed_chan_width]
if args.vpr_use_tileable_route_chan_width: if args.vpr_use_tileable_route_chan_width:
command += ["--use_tileable_route_chan_width"] command += ["--use_tileable_route_chan_width"]
@ -654,90 +657,16 @@ def run_standard_vpr(bench_blif, fixed_chan_width, logfile):
universal_newlines=True) universal_newlines=True)
for line in process.stdout.split('\n'): for line in process.stdout.split('\n'):
if "Best routing" in line: if "Best routing" in line:
chan_width = re.search( chan_width = int(re.search(
r"channel width factor of ([0-9]+)", line).group(1) r"channel width factor of ([0-9]+)", line).group(1))
if "Circuit successfully routed" in line: if "Circuit successfully routed" in line:
chan_width = re.search( chan_width = int(re.search(
r"a channel width factor of ([0-9]+)", line).group(1) r"a channel width factor of ([0-9]+)", line).group(1))
output.write(process.stdout) output.write(process.stdout)
if process.returncode: if process.returncode:
logger.info("Standard VPR run failed with returncode %d", logger.info("Standard VPR run failed with returncode %d",
process.returncode) process.returncode)
except Exception as e: except (Exception, subprocess.CalledProcessError) as e:
logger.exception("Failed to run VPR")
process_failed_vpr_run(e.output)
clean_up_and_exit("")
logger.info("VPR output is written in file %s" % logfile)
return int(chan_width)
def run_vpr_route(bench_blif, fixed_chan_width, logfile):
command = [cad_tools["vpr_path"],
args.arch_file,
bench_blif,
"--net_file", args.top_module+"_vpr.net",
"--place_file", args.top_module+"_vpr.place",
"--route_file", args.top_module+"_vpr.route",
"--full_stats", "--nodisp",
"--route"
]
if args.power:
command += [
"--power",
"--activity_file", args.top_module+"_ace_out.act",
"--tech_properties", args.power_tech]
if fixed_chan_width >= 0:
command += ["-route_chan_width", "%d" % fixed_chan_width]
# VPR - SPICE options
if args.power and args.vpr_fpga_spice:
command += "--fpga_spice"
if args.vpr_fpga_spice_print_cbsbtb:
command += ["--print_spice_cb_mux_testbench",
"--print_spice_sb_mux_testbench"]
if args.vpr_fpga_spice_print_pbtb:
command += ["--print_spice_pb_mux_testbench",
"--print_spice_lut_testbench",
"--print_spice_hardlogic_testbench"]
if args.vpr_fpga_spice_print_gridtb:
command += ["--print_spice_grid_testbench"]
if args.vpr_fpga_spice_print_toptb:
command += ["--print_spice_top_testbench"]
if args.vpr_fpga_spice_leakage_only:
command += ["--fpga_spice_leakage_only"]
if args.vpr_fpga_spice_parasitic_net_estimation_off:
command += ["--fpga_spice_parasitic_net_estimation_off"]
if args.vpr_fpga_verilog:
command += ["--fpga_verilog"]
if args.vpr_fpga_x2p_rename_illegal_port:
command += ["--fpga_x2p_rename_illegal_port"]
if args.vpr_max_router_iteration:
command += ["--max_router_iterations", args.vpr_max_router_iteration]
if args.vpr_route_breadthfirst:
command += ["--router_algorithm", "breadth_first"]
chan_width = None
try:
with open(logfile, 'w+') as output:
output.write(" ".join(command)+"\n")
process = subprocess.run(command,
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
for line in process.stdout.split('\n'):
if "Best routing" in line:
chan_width = re.search(
r"channel width factor of ([0-9]+)", line).group(1)
if "Circuit successfully routed" in line:
chan_width = re.search(
r"a channel width factor of ([0-9]+)", line).group(1)
output.write(process.stdout)
if process.returncode:
logger.info("Standard VPR run failed with returncode %d",
process.returncode)
except Exception as e:
logger.exception("Failed to run VPR") logger.exception("Failed to run VPR")
process_failed_vpr_run(e.output) process_failed_vpr_run(e.output)
clean_up_and_exit("") clean_up_and_exit("")
@ -843,7 +772,7 @@ def run_command(taskname, logfile, command, exit_if_fail=True):
if process.returncode: if process.returncode:
logger.error("%s run failed with returncode %d" % logger.error("%s run failed with returncode %d" %
(taskname, process.returncode)) (taskname, process.returncode))
except Exception as e: except (Exception, subprocess.CalledProcessError) as e:
logger.exception("failed to execute %s" % taskname) logger.exception("failed to execute %s" % taskname)
process_failed_vpr_run(e.output) process_failed_vpr_run(e.output)
if exit_if_fail: if exit_if_fail:
@ -851,13 +780,6 @@ def run_command(taskname, logfile, command, exit_if_fail=True):
logger.info("%s is written in file %s" % (taskname, logfile)) logger.info("%s is written in file %s" % (taskname, logfile))
def external_call(parent_logger=None, passed_args=[]):
global logger, args
logger = parent_logger
args = parser.parse_args(passed_args)
main()
def process_failed_vpr_run(vpr_output): def process_failed_vpr_run(vpr_output):
for line in vpr_output.split("\n"): for line in vpr_output.split("\n"):
if "error" in line.lower(): if "error" in line.lower():

View File

@ -17,7 +17,7 @@ import pprint
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = # = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
# Configure logging system # Configure logging system
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = # = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
logging.basicConfig(level=logging.DEBUG, stream=sys.stdout, logging.basicConfig(level=logging.INFO, stream=sys.stdout,
format='%(levelname)s (%(threadName)-9s) - %(message)s') format='%(levelname)s (%(threadName)-9s) - %(message)s')
logger = logging.getLogger('OpenFPGA_Task_logs') logger = logging.getLogger('OpenFPGA_Task_logs')
@ -50,11 +50,12 @@ gc = config["GENERAL CONFIGURATION"]
def main(): def main():
validate_command_line_arguments() validate_command_line_arguments()
for eachtask in args.tasks: for eachtask in args.tasks:
logger.info("Currently running task %s" % eachtask) logger.info("Currently running task %s" % eachtask)
eachtask = eachtask.replace("\\", "/").split("/")
job_run_list = generate_each_task_actions(eachtask) job_run_list = generate_each_task_actions(eachtask)
eachtask = "_".join(eachtask)
if not args.test_run: if not args.test_run:
run_actions(job_run_list) run_actions(job_run_list)
collect_results(job_run_list) collect_results(job_run_list)
@ -84,7 +85,7 @@ def generate_each_task_actions(taskname):
""" """
# Check if task directory exists and consistent # Check if task directory exists and consistent
curr_task_dir = os.path.join(gc["task_dir"], taskname) curr_task_dir = os.path.join(gc["task_dir"], *(taskname))
if not os.path.isdir(curr_task_dir): if not os.path.isdir(curr_task_dir):
clean_up_and_exit("Task directory [%s] not found" % curr_task_dir) clean_up_and_exit("Task directory [%s] not found" % curr_task_dir)
os.chdir(curr_task_dir) os.chdir(curr_task_dir)
@ -148,7 +149,7 @@ def generate_each_task_actions(taskname):
"top_module": task_conf.get("SYNTHESIS_PARAM", bech_name+"_top", "top_module": task_conf.get("SYNTHESIS_PARAM", bech_name+"_top",
fallback="top"), fallback="top"),
"ys_script": task_conf.get("SYNTHESIS_PARAM", bech_name+"_yosys", "ys_script": task_conf.get("SYNTHESIS_PARAM", bech_name+"_yosys",
fallback=ys_for_task) fallback=ys_for_task),
}) })
# Create OpenFPGA flow run commnad for each combination of # Create OpenFPGA flow run commnad for each combination of
@ -165,7 +166,8 @@ def generate_each_task_actions(taskname):
"bench": bench, "bench": bench,
"name": "%s_arch%d" % (bench["top_module"], indx), "name": "%s_arch%d" % (bench["top_module"], indx),
"run_dir": flow_run_dir, "run_dir": flow_run_dir,
"commands": cmd}) "commands": cmd,
"status": False})
return flow_run_cmd_list return flow_run_cmd_list
@ -230,23 +232,28 @@ def create_run_command(curr_job_dir, archfile, benchmark_obj, task_conf):
return command return command
def run_single_script(s, command): def run_single_script(s, eachJob):
logger.debug('Added job in pool') logger.debug('Added job in pool')
with s: with s:
logger.debug("Running OpenFPGA flow with " + " ".join(command)) logger.debug("Running OpenFPGA flow with " +
" ".join(eachJob["commands"]))
name = threading.currentThread().getName() name = threading.currentThread().getName()
# run_fpga_flow.external_call(logger, command)
try: try:
logfile = "%s_out.log" % name logfile = "%s_out.log" % name
with open(logfile, 'w+') as output: with open(logfile, 'w+') as output:
process = subprocess.run(["python3.5", gc["script_default"]]+command, process = subprocess.run(["python3.5",
gc["script_default"]] +
eachJob["commands"],
check=True, check=True,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, stderr=subprocess.PIPE,
universal_newlines=True) universal_newlines=True)
output.write(process.stdout) output.write(process.stdout)
eachJob["status"] = True
except: except:
logger.exception("Failed to launch openfpga flow") logger.error("Failed to execute openfpga flow - " +
eachJob["name"])
# logger.exception("Failed to launch openfpga flow")
logger.info("%s Finished " % name) logger.info("%s Finished " % name)
@ -256,7 +263,7 @@ def run_actions(job_run_list):
for index, eachjob in enumerate(job_run_list): for index, eachjob in enumerate(job_run_list):
t = threading.Thread(target=run_single_script, t = threading.Thread(target=run_single_script,
name='Job_%02d' % (index+1), name='Job_%02d' % (index+1),
args=(thread_sema, eachjob["commands"])) args=(thread_sema, eachjob))
t.start() t.start()
thred_list.append(t) thred_list.append(t)
@ -267,6 +274,8 @@ def run_actions(job_run_list):
def collect_results(job_run_list): def collect_results(job_run_list):
task_result = [] task_result = []
for run in job_run_list: for run in job_run_list:
if not run["status"]:
continue
# Check if any result file exist # Check if any result file exist
if not glob.glob(os.path.join(run["run_dir"], "*.result")): if not glob.glob(os.path.join(run["run_dir"], "*.result")):
logger.info("No result files found for %s" % run["name"]) logger.info("No result files found for %s" % run["name"])
@ -280,11 +289,12 @@ def collect_results(job_run_list):
result["name"] = run["name"] result["name"] = run["name"]
task_result.append(result) task_result.append(result)
with open("task_result.csv", 'w', newline='') as csvfile: if len(task_result):
writer = csv.DictWriter(csvfile, fieldnames=task_result[0].keys()) with open("task_result.csv", 'w', newline='') as csvfile:
writer.writeheader() writer = csv.DictWriter(csvfile, fieldnames=task_result[0].keys())
for eachResult in task_result: writer.writeheader()
writer.writerow(eachResult) for eachResult in task_result:
writer.writerow(eachResult)
if __name__ == "__main__": if __name__ == "__main__":