2019-08-29 23:13:18 -05:00
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
# Script Name : run_fpga_task.py
# Description : This script designed to run openfpga_flow tasks,
# Opensfpga task are design to run opefpga_flow on each
# Combination of architecture, benchmark and script paramters
# Args : python3 run_fpga_task.py --help
# Author : Ganesh Gore
2019-11-16 20:10:04 -06:00
# Email : ganesh.gore@utah.edu
2019-08-29 23:13:18 -05:00
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
2019-08-09 01:17:06 -05:00
import os
2019-08-16 10:44:50 -05:00
import sys
2019-08-09 01:17:06 -05:00
import shutil
import time
2019-08-21 12:08:47 -05:00
from datetime import timedelta
2019-08-09 01:17:06 -05:00
import shlex
import argparse
from configparser import ConfigParser , ExtendedInterpolation
import logging
import glob
import subprocess
import threading
2019-08-16 11:59:44 -05:00
import csv
2019-08-09 01:17:06 -05:00
from string import Template
2019-08-15 15:39:58 -05:00
import pprint
2019-08-21 13:42:58 -05:00
from importlib import util
2019-08-25 01:23:39 -05:00
from collections import OrderedDict
2019-08-21 12:08:47 -05:00
2021-02-03 11:34:34 -06:00
if util . find_spec ( " coloredlogs " ) :
import coloredlogs
2019-08-21 13:42:58 -05:00
if util . find_spec ( " humanize " ) :
2019-08-21 12:08:47 -05:00
import humanize
2019-08-09 01:17:06 -05:00
2019-08-29 23:13:18 -05:00
if sys . version_info [ 0 ] < 3 :
raise Exception ( " run_fpga_task script must be using Python 3 " )
2019-08-15 15:39:58 -05:00
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
# Configure logging system
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
2021-02-03 11:34:34 -06:00
LOG_FORMAT = " %(levelname)5s ( %(threadName)15s ) - %(message)s "
if util . find_spec ( " coloredlogs " ) :
coloredlogs . install ( level = ' INFO ' , stream = sys . stdout ,
fmt = LOG_FORMAT )
else :
logging . basicConfig ( level = logging . INFO , stream = sys . stdout ,
format = LOG_FORMAT )
2019-08-09 01:17:06 -05:00
logger = logging . getLogger ( ' OpenFPGA_Task_logs ' )
2019-08-15 15:39:58 -05:00
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
# Read commandline arguments
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
2019-08-09 01:17:06 -05:00
parser = argparse . ArgumentParser ( )
parser . add_argument ( ' tasks ' , nargs = ' + ' )
2019-08-15 15:39:58 -05:00
parser . add_argument ( ' --maxthreads ' , type = int , default = 2 ,
help = " Number of fpga_flow threads to run default = 2, " +
" Typically <= Number of processors on the system " )
2019-09-22 00:35:56 -05:00
parser . add_argument ( ' --remove_run_dir ' , type = str ,
help = " Remove run dir " +
" ' all ' to remove all. " +
" <int>,<int> to remove specific run dir " +
" <int>-<int> To remove range of directory " )
2019-08-15 15:39:58 -05:00
parser . add_argument ( ' --config ' , help = " Override default configuration " )
2019-08-16 10:44:50 -05:00
parser . add_argument ( ' --test_run ' , action = " store_true " ,
help = " Dummy run shows final generated VPR commands " )
2019-08-21 12:08:13 -05:00
parser . add_argument ( ' --debug ' , action = " store_true " ,
help = " Run script in debug mode " )
2019-09-13 23:50:20 -05:00
parser . add_argument ( ' --continue_on_fail ' , action = " store_true " ,
2019-08-31 23:04:57 -05:00
help = " Exit script with return code " )
2019-09-13 23:50:20 -05:00
parser . add_argument ( ' --show_thread_logs ' , action = " store_true " ,
2019-08-21 12:08:47 -05:00
help = " Skips logs from running thread " )
2019-08-09 01:17:06 -05:00
args = parser . parse_args ( )
2019-08-15 15:39:58 -05:00
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
# Read script configuration file
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
2019-08-09 01:17:06 -05:00
task_script_dir = os . path . dirname ( os . path . abspath ( __file__ ) )
script_env_vars = ( { " PATH " : {
" OPENFPGA_FLOW_PATH " : task_script_dir ,
2019-08-29 22:37:07 -05:00
" ARCH_PATH " : os . path . join ( " $ { PATH:OPENFPGA_PATH} " , " arch " ) ,
2020-04-06 01:34:36 -05:00
" OPENFPGA_SHELLSCRIPT_PATH " : os . path . join ( " $ { PATH:OPENFPGA_PATH} " , " OpenFPGAShellScripts " ) ,
2019-08-29 22:37:07 -05:00
" BENCH_PATH " : os . path . join ( " $ { PATH:OPENFPGA_PATH} " , " benchmarks " ) ,
" TECH_PATH " : os . path . join ( " $ { PATH:OPENFPGA_PATH} " , " tech " ) ,
" SPICENETLIST_PATH " : os . path . join ( " $ { PATH:OPENFPGA_PATH} " , " SpiceNetlists " ) ,
" VERILOG_PATH " : os . path . join ( " $ { PATH:OPENFPGA_PATH} " , " VerilogNetlists " ) ,
2019-08-15 15:39:58 -05:00
" OPENFPGA_PATH " : os . path . abspath ( os . path . join ( task_script_dir , os . pardir ,
os . pardir ) ) } } )
2019-08-09 01:17:06 -05:00
config = ConfigParser ( interpolation = ExtendedInterpolation ( ) )
config . read_dict ( script_env_vars )
config . read_file ( open ( os . path . join ( task_script_dir , ' run_fpga_task.conf ' ) ) )
gc = config [ " GENERAL CONFIGURATION " ]
def main ( ) :
2019-08-15 15:39:58 -05:00
validate_command_line_arguments ( )
2019-08-09 01:17:06 -05:00
for eachtask in args . tasks :
logger . info ( " Currently running task %s " % eachtask )
2019-08-18 13:26:05 -05:00
eachtask = eachtask . replace ( " \\ " , " / " ) . split ( " / " )
2022-01-06 03:44:11 -06:00
job_run_list , GeneralSection = generate_each_task_actions ( eachtask )
2019-09-22 00:35:56 -05:00
if args . remove_run_dir :
continue
2019-08-18 13:26:05 -05:00
eachtask = " _ " . join ( eachtask )
2019-08-16 10:44:50 -05:00
if not args . test_run :
2019-08-16 11:59:44 -05:00
run_actions ( job_run_list )
2022-01-06 03:44:11 -06:00
if not ( GeneralSection . get ( " fpga_flow " ) == " yosys " ) :
collect_results ( job_run_list )
2019-08-16 10:44:50 -05:00
else :
2019-08-16 11:59:44 -05:00
pprint . pprint ( job_run_list )
2019-08-09 01:17:06 -05:00
logger . info ( " Task execution completed " )
2019-08-25 01:23:39 -05:00
exit ( 0 )
2019-08-15 15:39:58 -05:00
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
# Subroutines starts here
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
2019-08-09 01:17:06 -05:00
def clean_up_and_exit ( msg ) :
logger . error ( msg )
2019-08-25 01:23:39 -05:00
logger . error ( " Exiting . . . . . . " )
exit ( 1 )
2019-08-09 01:17:06 -05:00
2019-08-15 15:39:58 -05:00
def validate_command_line_arguments ( ) :
2019-08-21 12:08:13 -05:00
if args . debug :
logger . info ( " Setting loggger in debug mode " )
logger . setLevel ( logging . DEBUG )
2019-08-22 17:56:58 -05:00
logger . info ( " Set up to run %d Parallel threads " , args . maxthreads )
2019-08-15 15:39:58 -05:00
2019-09-22 00:35:56 -05:00
def remove_run_dir ( ) :
remove_dir = [ ]
try :
argval = args . remove_run_dir . lower ( )
if argval == " all " :
for eachRun in glob . glob ( " run* " ) :
remove_dir + = [ eachRun ]
elif " - " in argval :
minval , maxval = map ( int , argval . split ( " - " ) )
if minval > maxval :
raise Exception ( " Enter valid range to remove " )
for eachRun in glob . glob ( " run* " ) :
if minval < = int ( eachRun [ - 3 : ] ) < = maxval :
remove_dir + = [ eachRun ]
elif " , " in argval :
for eachRun in argval . split ( " , " ) :
remove_dir + = [ " run %03d " % int ( eachRun ) ]
else :
logger . error ( " Unknow argument to --remove_run_dir " )
except :
logger . exception ( " Failed to parse remove rund_dir options " )
try :
for eachdir in remove_dir :
logger . info ( ' Removing run_dir %s ' % ( eachdir ) )
if os . path . exists ( ' latest ' ) :
if eachdir == os . readlink ( ' latest ' ) :
remove_dir + = [ " latest " ]
shutil . rmtree ( eachdir , ignore_errors = True )
except :
logger . exception ( " Failed to remove %s run directory " %
( eachdir or " Unknown " ) )
2019-08-15 15:39:58 -05:00
def generate_each_task_actions ( taskname ) :
2019-08-09 01:17:06 -05:00
"""
This script generates all the scripts required for each benchmark
"""
2019-08-15 15:39:58 -05:00
# Check if task directory exists and consistent
2021-02-08 20:11:28 -06:00
local_tasks = os . path . join ( * ( taskname ) )
repo_tasks = os . path . join ( gc [ " task_dir " ] , * ( taskname ) )
2021-08-13 10:08:09 -05:00
abs_tasks = os . path . abspath ( ' / ' + local_tasks )
2021-02-08 20:11:28 -06:00
if os . path . isdir ( local_tasks ) :
os . chdir ( local_tasks )
curr_task_dir = os . path . abspath ( os . getcwd ( ) )
2021-08-13 10:08:09 -05:00
elif os . path . isdir ( abs_tasks ) :
curr_task_dir = abs_tasks
2021-02-08 20:11:28 -06:00
elif os . path . isdir ( repo_tasks ) :
curr_task_dir = repo_tasks
else :
2021-08-13 10:08:09 -05:00
clean_up_and_exit ( " Task directory [ %s ] not found " % taskname +
" locally at [ %s ] " % local_tasks +
" , absolutely at [ %s ] " % abs_tasks +
" , or in OpenFPGA task directory [ %s ] " % repo_tasks )
2021-03-08 01:21:07 -06:00
2019-08-09 01:17:06 -05:00
os . chdir ( curr_task_dir )
curr_task_conf_file = os . path . join ( curr_task_dir , " config " , " task.conf " )
if not os . path . isfile ( curr_task_conf_file ) :
clean_up_and_exit (
" Missing configuration file for task %s " % curr_task_dir )
2019-09-22 00:35:56 -05:00
if args . remove_run_dir :
remove_run_dir ( )
2022-02-15 01:11:42 -06:00
flow_run_cmd_list = [ ]
GeneralSection = [ ]
return flow_run_cmd_list , GeneralSection
# Create run directory for current task run ./runxxx
run_dirs = [ int ( os . path . basename ( x ) [ - 3 : ] ) for x in glob . glob ( ' run*[0-9] ' ) ]
curr_run_dir = " run %03d " % ( max ( run_dirs + [ 0 , ] ) + 1 )
try :
os . mkdir ( curr_run_dir )
if os . path . islink ( ' latest ' ) or os . path . exists ( ' latest ' ) :
os . remove ( " latest " )
os . symlink ( curr_run_dir , " latest " )
logger . info ( ' Created " %s " directory for current task run ' %
curr_run_dir )
except :
logger . exception ( " " )
logger . error ( " Failed to create new run directory in task directory " )
os . chdir ( curr_run_dir )
2019-08-15 15:39:58 -05:00
# Read task configuration file and check consistency
2019-08-09 01:17:06 -05:00
task_conf = ConfigParser ( allow_no_value = True ,
interpolation = ExtendedInterpolation ( ) )
2020-07-27 15:14:57 -05:00
script_env_vars [ ' PATH ' ] [ " TASK_NAME " ] = " / " . join ( taskname )
script_env_vars [ ' PATH ' ] [ " TASK_DIR " ] = curr_task_dir
2019-08-09 01:17:06 -05:00
task_conf . read_dict ( script_env_vars )
task_conf . read_file ( open ( curr_task_conf_file ) )
2019-08-15 15:39:58 -05:00
2019-08-25 01:23:39 -05:00
required_sec = [ " GENERAL " , " BENCHMARKS " , " ARCHITECTURES " ]
2019-08-09 01:17:06 -05:00
missing_section = list ( set ( required_sec ) - set ( task_conf . sections ( ) ) )
if missing_section :
2019-08-15 15:39:58 -05:00
clean_up_and_exit ( " Missing sections %s " % " " . join ( missing_section ) +
" in task configuration file " )
2019-08-09 01:17:06 -05:00
2019-08-25 01:23:39 -05:00
# Declare varibles to access sections
TaskFileSections = task_conf . sections ( )
SynthSection = task_conf [ " SYNTHESIS_PARAM " ]
GeneralSection = task_conf [ " GENERAL " ]
2019-08-15 15:39:58 -05:00
# Check if specified architecture files exist
2019-09-01 23:15:53 -05:00
# TODO Store it as a dictionary and take reference from the key
2019-08-09 01:17:06 -05:00
archfile_list = [ ]
for _ , arch_file in task_conf [ " ARCHITECTURES " ] . items ( ) :
arch_full_path = arch_file
if os . path . isfile ( arch_full_path ) :
archfile_list . append ( arch_full_path )
else :
2019-08-15 15:39:58 -05:00
clean_up_and_exit ( " Architecture file not found: " +
" %s " % arch_file )
2019-08-25 01:23:39 -05:00
if not len ( archfile_list ) == len ( list ( set ( archfile_list ) ) ) :
clean_up_and_exit ( " Found duplicate architectures in config file " )
2019-08-15 15:39:58 -05:00
2019-08-27 23:04:32 -05:00
# Get Flow information
2022-05-08 14:03:16 -05:00
logger . info ( ' Running " %s " flow ' ,
2019-08-29 22:37:07 -05:00
GeneralSection . get ( " fpga_flow " , fallback = " yosys_vpr " ) )
2019-08-27 23:04:32 -05:00
2019-08-15 15:39:58 -05:00
# Check if specified benchmark files exist
2019-08-16 10:44:50 -05:00
benchmark_list = [ ]
2019-08-15 15:39:58 -05:00
for bech_name , each_benchmark in task_conf [ " BENCHMARKS " ] . items ( ) :
2019-08-25 01:23:39 -05:00
# Declare varible to store paramteres for current benchmark
CurrBenchPara = { }
# Parse benchmark file
2019-08-16 10:44:50 -05:00
bench_files = [ ]
2019-08-15 15:39:58 -05:00
for eachpath in each_benchmark . split ( " , " ) :
2019-08-16 10:44:50 -05:00
files = glob . glob ( eachpath )
if not len ( files ) :
clean_up_and_exit ( ( " No files added benchmark %s " % bech_name ) +
" with path %s " % ( eachpath ) )
bench_files + = files
2019-08-15 15:39:58 -05:00
2019-08-25 01:23:39 -05:00
# Read provided benchmark configurations
# Common configurations
2021-03-10 12:36:29 -06:00
# - All the benchmarks may share the same yosys synthesis template script
# - All the benchmarks may share the same rewrite yosys template script, which converts post-synthesis .v netlist to be compatible with .blif port definition. This is required for correct verification at the end of flows
# - All the benchmarks may share the same routing channel width in VPR runs. This is designed to enable architecture evaluations for a fixed device model
2022-01-17 02:21:29 -06:00
# - All the benchmarks may share the same options for reading verilog files
2019-08-25 01:23:39 -05:00
ys_for_task_common = SynthSection . get ( " bench_yosys_common " )
2021-03-10 12:36:29 -06:00
ys_rewrite_for_task_common = SynthSection . get ( " bench_yosys_rewrite_common " )
2019-08-25 01:23:39 -05:00
chan_width_common = SynthSection . get ( " bench_chan_width_common " )
2022-01-20 15:21:00 -06:00
yosys_params = [
" read_verilog_options " ,
" yosys_args " ,
" yosys_bram_map_rules " ,
" yosys_bram_map_verilog " ,
" yosys_cell_sim_verilog " ,
" yosys_cell_sim_systemverilog " ,
" yosys_cell_sim_vhdl " ,
" yosys_blackbox_modules " ,
" yosys_dff_map_verilog " ,
" yosys_dsp_map_parameters " ,
" yosys_dsp_map_verilog " ,
" verific_verilog_standard " ,
" verific_systemverilog_standard " ,
" verific_vhdl_standard " ,
" verific_include_dir " ,
" verific_library_dir " ,
" verific_search_lib "
]
yosys_params_common = { }
for param in yosys_params :
yosys_params_common [ param . upper ( ) ] = SynthSection . get ( " bench_ " + param + " _common " )
2019-08-25 01:23:39 -05:00
# Individual benchmark configuration
CurrBenchPara [ " files " ] = bench_files
CurrBenchPara [ " top_module " ] = SynthSection . get ( bech_name + " _top " ,
fallback = " top " )
CurrBenchPara [ " ys_script " ] = SynthSection . get ( bech_name + " _yosys " ,
fallback = ys_for_task_common )
2021-03-08 01:21:07 -06:00
CurrBenchPara [ " ys_rewrite_script " ] = SynthSection . get ( bech_name + " _yosys_rewrite " ,
2021-03-10 12:36:29 -06:00
fallback = ys_rewrite_for_task_common )
2019-08-25 01:23:39 -05:00
CurrBenchPara [ " chan_width " ] = SynthSection . get ( bech_name + " _chan_width " ,
fallback = chan_width_common )
2021-07-02 16:26:39 -05:00
CurrBenchPara [ " benchVariable " ] = [ ]
2021-07-01 23:19:53 -05:00
for eachKey , eachValue in SynthSection . items ( ) :
2021-07-02 13:51:34 -05:00
if bech_name in eachKey :
eachKey = eachKey . replace ( bech_name + " _ " , " " ) . upper ( )
2021-07-02 16:59:29 -05:00
CurrBenchPara [ " benchVariable " ] + = [ f " -- { eachKey } " , eachValue ]
2022-01-20 15:21:00 -06:00
for param , value in yosys_params_common . items ( ) :
if not param in CurrBenchPara [ " benchVariable " ] and value :
CurrBenchPara [ " benchVariable " ] + = [ f " -- { param } " , value ]
2022-01-17 02:21:29 -06:00
2019-08-25 01:23:39 -05:00
if GeneralSection . get ( " fpga_flow " ) == " vpr_blif " :
2022-08-01 19:02:28 -05:00
# Check if activity file exist only when power analysis is required
if ( GeneralSection . getboolean ( " power_analysis " ) ) :
if not SynthSection . get ( bech_name + " _act " ) :
clean_up_and_exit ( " Missing argument %s " % ( bech_name + " _act " ) +
" for vpr_blif flow " )
CurrBenchPara [ " activity_file " ] = SynthSection . get ( bech_name + " _act " )
else :
2022-08-01 20:13:57 -05:00
# If users defined an acitivity file, we use it otherwise create a dummy act
if not SynthSection . get ( bech_name + " _act " ) :
CurrBenchPara [ " activity_file " ] = bech_name + " _act "
else :
CurrBenchPara [ " activity_file " ] = SynthSection . get ( bech_name + " _act " )
2019-08-25 01:23:39 -05:00
2021-03-10 14:36:11 -06:00
# Check if base verilog file exists
if not SynthSection . get ( bech_name + " _verilog " ) :
clean_up_and_exit ( " Missing argument %s for vpr_blif flow " %
( bech_name + " _verilog " ) )
CurrBenchPara [ " verilog_file " ] = SynthSection . get (
bech_name + " _verilog " )
2019-08-25 01:23:39 -05:00
# Add script parameter list in current benchmark
ScriptSections = [ x for x in TaskFileSections if " SCRIPT_PARAM " in x ]
script_para_list = { }
for eachset in ScriptSections :
command = [ ]
for key , values in task_conf [ eachset ] . items ( ) :
command + = [ " -- " + key , values ] if values else [ " -- " + key ]
# Set label for Sript Parameters
set_lbl = eachset . replace ( " SCRIPT_PARAM " , " " )
set_lbl = set_lbl [ 1 : ] if set_lbl else " Common "
script_para_list [ set_lbl ] = command
CurrBenchPara [ " script_params " ] = script_para_list
benchmark_list . append ( CurrBenchPara )
2019-08-15 15:39:58 -05:00
2021-06-22 12:45:23 -05:00
# Count the number of duplicated top module name among benchmark
# This is required as flow run directory names for these benchmarks are different than others
# which are uniquified
benchmark_top_module_count = [ ]
for bench in benchmark_list :
benchmark_top_module_count . append ( bench [ " top_module " ] )
2019-08-15 15:39:58 -05:00
# Create OpenFPGA flow run commnad for each combination of
# architecture, benchmark and parameters
2019-08-16 11:59:44 -05:00
# Create run_job object [arch, bench, run_dir, commnad]
2019-08-15 15:39:58 -05:00
flow_run_cmd_list = [ ]
2019-08-16 11:59:44 -05:00
for indx , arch in enumerate ( archfile_list ) :
2019-08-16 10:44:50 -05:00
for bench in benchmark_list :
2019-08-25 01:23:39 -05:00
for lbl , param in bench [ " script_params " ] . items ( ) :
2021-06-22 12:45:23 -05:00
if ( benchmark_top_module_count . count ( bench [ " top_module " ] ) > 1 ) :
flow_run_dir = get_flow_rundir ( arch , " bench " + str ( benchmark_list . index ( bench ) ) + " _ " + bench [ " top_module " ] , lbl )
else :
flow_run_dir = get_flow_rundir ( arch , bench [ " top_module " ] , lbl )
2021-07-01 23:19:53 -05:00
2019-08-25 01:23:39 -05:00
command = create_run_command (
curr_job_dir = flow_run_dir ,
archfile = arch ,
benchmark_obj = bench ,
param = param ,
task_conf = task_conf )
flow_run_cmd_list . append ( {
" arch " : arch ,
" bench " : bench ,
2019-09-14 00:56:38 -05:00
" name " : " %02d _ %s _ %s " % ( indx , bench [ " top_module " ] , lbl ) ,
2019-08-25 01:23:39 -05:00
" run_dir " : flow_run_dir ,
2021-07-02 16:26:39 -05:00
" commands " : command + bench [ " benchVariable " ] ,
2019-09-22 00:35:56 -05:00
" finished " : False ,
2019-08-25 01:23:39 -05:00
" status " : False } )
2019-08-27 23:04:32 -05:00
logger . info ( ' Found %d Architectures %d Benchmarks & %d Script Parameters ' %
( len ( archfile_list ) , len ( benchmark_list ) , len ( ScriptSections ) ) )
logger . info ( ' Created total %d jobs ' % len ( flow_run_cmd_list ) )
2022-01-25 15:56:42 -06:00
2022-01-06 03:44:11 -06:00
return flow_run_cmd_list , GeneralSection
2019-08-15 15:39:58 -05:00
2021-06-21 16:27:12 -05:00
# Make the directory name unique by including the benchmark index in the list.
# This is because benchmarks may share the same top module names
2021-07-02 13:51:34 -05:00
2021-06-22 12:45:23 -05:00
def get_flow_rundir ( arch , top_module , flow_params = None ) :
2019-08-15 15:39:58 -05:00
path = [
os . path . basename ( arch ) . replace ( " .xml " , " " ) ,
2021-06-22 12:45:23 -05:00
top_module ,
2019-08-15 15:39:58 -05:00
flow_params if flow_params else " common "
]
2019-08-16 10:44:50 -05:00
return os . path . abspath ( os . path . join ( * path ) )
2019-08-15 15:39:58 -05:00
2019-08-25 01:23:39 -05:00
def create_run_command ( curr_job_dir , archfile , benchmark_obj , param , task_conf ) :
2019-08-09 01:17:06 -05:00
"""
2019-08-15 15:39:58 -05:00
Create_run_script function accepts run directory , architecture list and
2019-08-09 01:17:06 -05:00
fpga_flow configuration file and prepare final executable fpga_flow script
TODO : Replace this section after convert fpga_flow to python script
Config file creation and bechnmark list can be skipped
"""
# = = = = = = = = = File/Directory Consitancy Check = = = = = = = = = =
if not os . path . isdir ( gc [ " misc_dir " ] ) :
clean_up_and_exit ( " Miscellaneous directory does not exist " )
# = = = = = = = = = = = = Create execution folder = = = = = = = = = = = =
if os . path . isdir ( curr_job_dir ) :
2019-08-16 10:44:50 -05:00
question = " One the result directory already exist. \n "
question + = " %s \n " % curr_job_dir
reply = str ( input ( question + ' (y/n): ' ) ) . lower ( ) . strip ( )
if reply [ : 1 ] in [ ' y ' , ' yes ' ] :
shutil . rmtree ( curr_job_dir )
else :
logger . info ( " Result directory removal denied by the user " )
exit ( )
2019-08-09 01:17:06 -05:00
os . makedirs ( curr_job_dir )
2019-08-15 15:39:58 -05:00
# Make execution command to run Open FPGA flow
2019-08-25 01:23:39 -05:00
task_gc = task_conf [ " GENERAL " ]
2020-07-27 15:08:58 -05:00
task_OFPGAc = task_conf [ " OpenFPGA_SHELL " ]
2019-08-16 10:44:50 -05:00
command = [ archfile ] + benchmark_obj [ " files " ]
command + = [ " --top_module " , benchmark_obj [ " top_module " ] ]
command + = [ " --run_dir " , curr_job_dir ]
2019-08-25 01:23:39 -05:00
if task_gc . get ( " fpga_flow " ) :
command + = [ " --fpga_flow " , task_gc . get ( " fpga_flow " ) ]
2021-10-29 08:34:27 -05:00
if task_gc . getboolean ( " verific " ) :
command + = [ " --verific " ]
2020-04-06 01:34:36 -05:00
if task_gc . get ( " run_engine " ) == " openfpga_shell " :
2020-07-27 15:08:58 -05:00
for eachKey in task_OFPGAc . keys ( ) :
command + = [ f " -- { eachKey } " ,
task_OFPGAc . get ( f " { eachKey } " ) ]
2020-04-06 01:34:36 -05:00
2019-08-25 01:23:39 -05:00
if benchmark_obj . get ( " activity_file " ) :
command + = [ " --activity_file " , benchmark_obj . get ( " activity_file " ) ]
if benchmark_obj . get ( " verilog_file " ) :
command + = [ " --base_verilog " , benchmark_obj . get ( " verilog_file " ) ]
if benchmark_obj . get ( " ys_script " ) :
command + = [ " --yosys_tmpl " , benchmark_obj [ " ys_script " ] ]
2021-03-08 01:21:07 -06:00
if benchmark_obj . get ( " ys_rewrite_script " ) :
command + = [ " --ys_rewrite_tmpl " , benchmark_obj [ " ys_rewrite_script " ] ]
2019-08-25 01:23:39 -05:00
if task_gc . getboolean ( " power_analysis " ) :
2019-08-16 10:44:50 -05:00
command + = [ " --power " ]
2019-08-25 01:23:39 -05:00
command + = [ " --power_tech " , task_gc . get ( " power_tech_file " ) ]
2020-11-25 18:22:41 -06:00
if task_gc . get ( " arch_variable_file " ) :
command + = [ " --arch_variable_file " , task_gc . get ( " arch_variable_file " ) ]
2019-08-25 01:23:39 -05:00
if task_gc . getboolean ( " spice_output " ) :
2019-08-16 10:44:50 -05:00
command + = [ " --vpr_fpga_spice " ]
2019-08-25 01:23:39 -05:00
if task_gc . getboolean ( " verilog_output " ) :
2019-08-16 10:44:50 -05:00
command + = [ " --vpr_fpga_verilog " ]
2019-11-01 19:21:26 -05:00
command + = [ " --vpr_fpga_verilog_dir " , curr_job_dir ]
2019-08-21 12:08:47 -05:00
command + = [ " --vpr_fpga_x2p_rename_illegal_port " ]
2019-08-16 10:44:50 -05:00
# Add other paramters to pass
2019-08-25 01:23:39 -05:00
command + = param
2019-08-21 12:08:47 -05:00
2019-08-21 12:08:13 -05:00
if args . debug :
command + = [ " --debug " ]
2019-08-15 15:39:58 -05:00
return command
2019-08-09 01:17:06 -05:00
2019-08-22 17:56:58 -05:00
def strip_child_logger_info ( line ) :
try :
logtype , message = line . split ( " - " , 1 )
lognumb = { " CRITICAL " : 50 , " ERROR " : 40 , " WARNING " : 30 ,
" INFO " : 20 , " DEBUG " : 10 , " NOTSET " : 0 }
2019-08-23 00:41:25 -05:00
logger . log ( lognumb [ logtype . strip ( ) . upper ( ) ] , message )
2019-08-22 17:56:58 -05:00
except :
logger . info ( line )
2019-09-06 10:48:13 -05:00
def run_single_script ( s , eachJob , job_list ) :
2019-08-09 01:17:06 -05:00
with s :
2019-09-14 00:56:38 -05:00
thread_name = threading . currentThread ( ) . getName ( )
2019-08-21 12:08:47 -05:00
eachJob [ " starttime " ] = time . time ( )
2019-08-16 10:44:50 -05:00
try :
2019-09-14 00:56:38 -05:00
logfile = " %s _out.log " % thread_name
2019-08-16 10:44:50 -05:00
with open ( logfile , ' w+ ' ) as output :
2019-08-25 01:42:48 -05:00
output . write ( " * " * 20 + ' \n ' )
output . write ( " RunDirectory : %s \n " % os . getcwd ( ) )
2021-01-26 17:40:45 -06:00
command = [ os . getenv ( ' PYTHON_EXEC ' , gc [ " python_path " ] ) , gc [ " script_default " ] ] + \
2019-08-25 01:42:48 -05:00
eachJob [ " commands " ]
output . write ( " " . join ( command ) + ' \n ' )
output . write ( " * " * 20 + ' \n ' )
2019-09-14 00:56:38 -05:00
logger . debug ( " Running OpenFPGA flow with [ %s ] " % command )
2019-08-25 01:42:48 -05:00
process = subprocess . Popen ( command ,
2019-08-21 12:08:47 -05:00
stdout = subprocess . PIPE ,
stderr = subprocess . STDOUT ,
universal_newlines = True )
for line in process . stdout :
2019-09-13 23:50:20 -05:00
if args . show_thread_logs :
2019-08-22 17:56:58 -05:00
strip_child_logger_info ( line [ : - 1 ] )
2019-08-21 12:08:47 -05:00
sys . stdout . buffer . flush ( )
output . write ( line )
process . wait ( )
2019-08-25 01:23:39 -05:00
if process . returncode :
2019-09-14 00:56:38 -05:00
raise subprocess . CalledProcessError ( 0 , " " . join ( command ) )
2019-08-18 13:26:05 -05:00
eachJob [ " status " ] = True
2019-08-16 10:44:50 -05:00
except :
2022-05-08 14:03:16 -05:00
logger . exception ( " Failed to execute openfpga flow - %s " ,
2019-08-21 12:08:47 -05:00
eachJob [ " name " ] )
2019-09-13 23:50:20 -05:00
if not args . continue_on_fail :
2019-09-14 00:56:38 -05:00
os . _exit ( 1 )
2019-08-21 12:08:47 -05:00
eachJob [ " endtime " ] = time . time ( )
timediff = timedelta ( seconds = ( eachJob [ " endtime " ] - eachJob [ " starttime " ] ) )
timestr = humanize . naturaldelta ( timediff ) if " humanize " in sys . modules \
else str ( timediff )
2022-05-08 14:03:16 -05:00
logger . info ( " %s Finished with returncode %d , Time Taken %s " ,
thread_name , process . returncode , timestr )
2019-09-06 10:48:13 -05:00
eachJob [ " finished " ] = True
2019-09-22 00:35:56 -05:00
no_of_finished_job = sum ( [ not eachJ [ " finished " ] for eachJ in job_list ] )
2022-05-08 14:03:16 -05:00
logger . info ( " ***** %d runs pending ***** " , no_of_finished_job )
2019-08-16 10:44:50 -05:00
2019-09-06 10:48:13 -05:00
def run_actions ( job_list ) :
2019-08-09 01:17:06 -05:00
thread_sema = threading . Semaphore ( args . maxthreads )
2019-09-14 00:56:38 -05:00
thread_list = [ ]
2019-09-22 00:35:56 -05:00
for _ , eachjob in enumerate ( job_list ) :
2019-09-14 00:56:38 -05:00
t = threading . Thread ( target = run_single_script , name = eachjob [ " name " ] ,
args = ( thread_sema , eachjob , job_list ) )
2019-08-09 01:17:06 -05:00
t . start ( )
2019-09-14 00:56:38 -05:00
thread_list . append ( t )
for eachthread in thread_list :
2019-08-09 01:17:06 -05:00
eachthread . join ( )
2019-08-16 11:59:44 -05:00
def collect_results ( job_run_list ) :
task_result = [ ]
for run in job_run_list :
2019-08-18 13:26:05 -05:00
if not run [ " status " ] :
2019-08-25 01:23:39 -05:00
logger . warning ( " Skipping %s run " , run [ " name " ] )
2019-08-18 13:26:05 -05:00
continue
2019-08-16 11:59:44 -05:00
# Check if any result file exist
if not glob . glob ( os . path . join ( run [ " run_dir " ] , " *.result " ) ) :
logger . info ( " No result files found for %s " % run [ " name " ] )
# Read and merge result file
vpr_res = ConfigParser ( allow_no_value = True ,
interpolation = ExtendedInterpolation ( ) )
vpr_res . read_file (
open ( os . path . join ( run [ " run_dir " ] , " vpr_stat.result " ) ) )
2019-08-25 01:23:39 -05:00
result = OrderedDict ( )
2019-08-16 11:59:44 -05:00
result [ " name " ] = run [ " name " ]
2019-08-25 01:23:39 -05:00
result [ " TotalRunTime " ] = int ( run [ " endtime " ] - run [ " starttime " ] )
result . update ( vpr_res [ " RESULTS " ] )
2019-08-16 11:59:44 -05:00
task_result . append ( result )
2019-08-27 22:25:38 -05:00
colnames = [ ]
for eachLbl in task_result :
colnames . extend ( eachLbl . keys ( ) )
2019-08-18 13:26:05 -05:00
if len ( task_result ) :
with open ( " task_result.csv " , ' w ' , newline = ' ' ) as csvfile :
2019-08-27 22:25:38 -05:00
writer = csv . DictWriter (
2022-05-08 14:03:16 -05:00
csvfile , extrasaction = ' ignore ' , fieldnames = list ( colnames ) )
2019-08-18 13:26:05 -05:00
writer . writeheader ( )
for eachResult in task_result :
writer . writerow ( eachResult )
2019-08-16 11:59:44 -05:00
2019-08-09 01:17:06 -05:00
if __name__ == " __main__ " :
main ( )