2019-08-09 01:17:06 -05:00
|
|
|
import os
|
2019-08-16 10:44:50 -05:00
|
|
|
import sys
|
2019-08-09 01:17:06 -05:00
|
|
|
import shutil
|
|
|
|
import time
|
2019-08-21 12:08:47 -05:00
|
|
|
from datetime import timedelta
|
2019-08-09 01:17:06 -05:00
|
|
|
import shlex
|
|
|
|
import argparse
|
|
|
|
from configparser import ConfigParser, ExtendedInterpolation
|
|
|
|
import logging
|
|
|
|
import glob
|
|
|
|
import subprocess
|
|
|
|
import threading
|
2019-08-16 11:59:44 -05:00
|
|
|
import csv
|
2019-08-09 01:17:06 -05:00
|
|
|
from string import Template
|
2019-08-15 15:39:58 -05:00
|
|
|
import pprint
|
2019-08-21 13:42:58 -05:00
|
|
|
from importlib import util
|
2019-08-25 01:23:39 -05:00
|
|
|
from collections import OrderedDict
|
2019-08-21 12:08:47 -05:00
|
|
|
|
2019-08-21 13:42:58 -05:00
|
|
|
if util.find_spec("humanize"):
|
2019-08-21 12:08:47 -05:00
|
|
|
import humanize
|
2019-08-09 01:17:06 -05:00
|
|
|
|
2019-08-15 15:39:58 -05:00
|
|
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
|
|
|
# Configure logging system
|
|
|
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
2019-08-18 13:26:05 -05:00
|
|
|
logging.basicConfig(level=logging.INFO, stream=sys.stdout,
|
2019-08-25 01:23:39 -05:00
|
|
|
format='%(levelname)s (%(threadName)10s) - %(message)s')
|
2019-08-09 01:17:06 -05:00
|
|
|
logger = logging.getLogger('OpenFPGA_Task_logs')
|
|
|
|
|
2019-08-15 15:39:58 -05:00
|
|
|
|
|
|
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
|
|
|
# Read commandline arguments
|
|
|
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
2019-08-09 01:17:06 -05:00
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument('tasks', nargs='+')
|
2019-08-15 15:39:58 -05:00
|
|
|
parser.add_argument('--maxthreads', type=int, default=2,
|
|
|
|
help="Number of fpga_flow threads to run default = 2," +
|
|
|
|
"Typically <= Number of processors on the system")
|
|
|
|
parser.add_argument('--config', help="Override default configuration")
|
2019-08-16 10:44:50 -05:00
|
|
|
parser.add_argument('--test_run', action="store_true",
|
|
|
|
help="Dummy run shows final generated VPR commands")
|
2019-08-21 12:08:13 -05:00
|
|
|
parser.add_argument('--debug', action="store_true",
|
|
|
|
help="Run script in debug mode")
|
2019-08-21 12:08:47 -05:00
|
|
|
parser.add_argument('--skip_tread_logs', action="store_true",
|
|
|
|
help="Skips logs from running thread")
|
2019-08-09 01:17:06 -05:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
2019-08-15 15:39:58 -05:00
|
|
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
|
|
|
# Read script configuration file
|
|
|
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
2019-08-09 01:17:06 -05:00
|
|
|
task_script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
script_env_vars = ({"PATH": {
|
|
|
|
"OPENFPGA_FLOW_PATH": task_script_dir,
|
2019-08-15 15:39:58 -05:00
|
|
|
"OPENFPGA_PATH": os.path.abspath(os.path.join(task_script_dir, os.pardir,
|
|
|
|
os.pardir))}})
|
2019-08-09 01:17:06 -05:00
|
|
|
config = ConfigParser(interpolation=ExtendedInterpolation())
|
|
|
|
config.read_dict(script_env_vars)
|
|
|
|
config.read_file(open(os.path.join(task_script_dir, 'run_fpga_task.conf')))
|
|
|
|
gc = config["GENERAL CONFIGURATION"]
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
2019-08-15 15:39:58 -05:00
|
|
|
validate_command_line_arguments()
|
2019-08-09 01:17:06 -05:00
|
|
|
for eachtask in args.tasks:
|
|
|
|
logger.info("Currently running task %s" % eachtask)
|
2019-08-18 13:26:05 -05:00
|
|
|
eachtask = eachtask.replace("\\", "/").split("/")
|
2019-08-16 11:59:44 -05:00
|
|
|
job_run_list = generate_each_task_actions(eachtask)
|
2019-08-18 13:26:05 -05:00
|
|
|
eachtask = "_".join(eachtask)
|
2019-08-16 10:44:50 -05:00
|
|
|
if not args.test_run:
|
2019-08-16 11:59:44 -05:00
|
|
|
run_actions(job_run_list)
|
|
|
|
collect_results(job_run_list)
|
2019-08-16 10:44:50 -05:00
|
|
|
else:
|
2019-08-16 11:59:44 -05:00
|
|
|
pprint.pprint(job_run_list)
|
2019-08-09 01:17:06 -05:00
|
|
|
logger.info("Task execution completed")
|
2019-08-25 01:23:39 -05:00
|
|
|
exit(0)
|
2019-08-15 15:39:58 -05:00
|
|
|
|
|
|
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
|
|
|
# Subroutines starts here
|
|
|
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
2019-08-09 01:17:06 -05:00
|
|
|
|
|
|
|
|
|
|
|
def clean_up_and_exit(msg):
|
|
|
|
logger.error(msg)
|
2019-08-25 01:23:39 -05:00
|
|
|
logger.error("Exiting . . . . . .")
|
|
|
|
exit(1)
|
2019-08-09 01:17:06 -05:00
|
|
|
|
|
|
|
|
2019-08-15 15:39:58 -05:00
|
|
|
def validate_command_line_arguments():
|
2019-08-21 12:08:13 -05:00
|
|
|
if args.debug:
|
|
|
|
logger.info("Setting loggger in debug mode")
|
|
|
|
logger.setLevel(logging.DEBUG)
|
2019-08-22 17:56:58 -05:00
|
|
|
logger.info("Set up to run %d Parallel threads", args.maxthreads)
|
2019-08-15 15:39:58 -05:00
|
|
|
|
|
|
|
|
|
|
|
def generate_each_task_actions(taskname):
|
2019-08-09 01:17:06 -05:00
|
|
|
"""
|
|
|
|
This script generates all the scripts required for each benchmark
|
|
|
|
"""
|
2019-08-15 15:39:58 -05:00
|
|
|
|
|
|
|
# Check if task directory exists and consistent
|
2019-08-18 13:26:05 -05:00
|
|
|
curr_task_dir = os.path.join(gc["task_dir"], *(taskname))
|
2019-08-09 01:17:06 -05:00
|
|
|
if not os.path.isdir(curr_task_dir):
|
2019-08-16 10:44:50 -05:00
|
|
|
clean_up_and_exit("Task directory [%s] not found" % curr_task_dir)
|
2019-08-09 01:17:06 -05:00
|
|
|
os.chdir(curr_task_dir)
|
|
|
|
|
|
|
|
curr_task_conf_file = os.path.join(curr_task_dir, "config", "task.conf")
|
|
|
|
if not os.path.isfile(curr_task_conf_file):
|
|
|
|
clean_up_and_exit(
|
|
|
|
"Missing configuration file for task %s" % curr_task_dir)
|
|
|
|
|
2019-08-15 15:39:58 -05:00
|
|
|
# Create run directory for current task run ./runxxx
|
|
|
|
run_dirs = [int(os.path.basename(x)[-3:]) for x in glob.glob('run*[0-9]')]
|
|
|
|
curr_run_dir = "run%03d" % (max(run_dirs+[0, ])+1)
|
|
|
|
try:
|
|
|
|
os.mkdir(curr_run_dir)
|
2019-08-25 01:23:39 -05:00
|
|
|
if os.path.islink('latest') or os.path.exists('latest'):
|
|
|
|
os.remove("latest")
|
2019-08-15 15:39:58 -05:00
|
|
|
os.symlink(curr_run_dir, "latest")
|
|
|
|
logger.info('Created "%s" directory for current task run' %
|
|
|
|
curr_run_dir)
|
|
|
|
except:
|
2019-08-16 10:44:50 -05:00
|
|
|
logger.exception("")
|
2019-08-15 15:39:58 -05:00
|
|
|
logger.error("Failed to create new run directory in task directory")
|
2019-08-16 10:44:50 -05:00
|
|
|
os.chdir(curr_run_dir)
|
2019-08-15 15:39:58 -05:00
|
|
|
|
|
|
|
# Read task configuration file and check consistency
|
2019-08-09 01:17:06 -05:00
|
|
|
task_conf = ConfigParser(allow_no_value=True,
|
|
|
|
interpolation=ExtendedInterpolation())
|
|
|
|
task_conf.read_dict(script_env_vars)
|
|
|
|
task_conf.read_file(open(curr_task_conf_file))
|
2019-08-15 15:39:58 -05:00
|
|
|
|
2019-08-25 01:23:39 -05:00
|
|
|
required_sec = ["GENERAL", "BENCHMARKS", "ARCHITECTURES"]
|
2019-08-09 01:17:06 -05:00
|
|
|
missing_section = list(set(required_sec)-set(task_conf.sections()))
|
|
|
|
if missing_section:
|
2019-08-15 15:39:58 -05:00
|
|
|
clean_up_and_exit("Missing sections %s" % " ".join(missing_section) +
|
|
|
|
" in task configuration file")
|
2019-08-09 01:17:06 -05:00
|
|
|
|
2019-08-25 01:23:39 -05:00
|
|
|
# Declare varibles to access sections
|
|
|
|
TaskFileSections = task_conf.sections()
|
|
|
|
SynthSection = task_conf["SYNTHESIS_PARAM"]
|
|
|
|
GeneralSection = task_conf["GENERAL"]
|
|
|
|
|
2019-08-15 15:39:58 -05:00
|
|
|
# Check if specified architecture files exist
|
2019-08-09 01:17:06 -05:00
|
|
|
archfile_list = []
|
|
|
|
for _, arch_file in task_conf["ARCHITECTURES"].items():
|
|
|
|
arch_full_path = arch_file
|
|
|
|
if os.path.isfile(arch_full_path):
|
|
|
|
archfile_list.append(arch_full_path)
|
|
|
|
else:
|
2019-08-15 15:39:58 -05:00
|
|
|
clean_up_and_exit("Architecture file not found: " +
|
|
|
|
"%s " % arch_file)
|
2019-08-25 01:23:39 -05:00
|
|
|
if not len(archfile_list) == len(list(set(archfile_list))):
|
|
|
|
clean_up_and_exit("Found duplicate architectures in config file")
|
2019-08-15 15:39:58 -05:00
|
|
|
|
|
|
|
# Check if specified benchmark files exist
|
2019-08-16 10:44:50 -05:00
|
|
|
benchmark_list = []
|
2019-08-15 15:39:58 -05:00
|
|
|
for bech_name, each_benchmark in task_conf["BENCHMARKS"].items():
|
2019-08-25 01:23:39 -05:00
|
|
|
# Declare varible to store paramteres for current benchmark
|
|
|
|
CurrBenchPara = {}
|
|
|
|
|
|
|
|
# Parse benchmark file
|
2019-08-16 10:44:50 -05:00
|
|
|
bench_files = []
|
2019-08-15 15:39:58 -05:00
|
|
|
for eachpath in each_benchmark.split(","):
|
2019-08-16 10:44:50 -05:00
|
|
|
files = glob.glob(eachpath)
|
|
|
|
if not len(files):
|
|
|
|
clean_up_and_exit(("No files added benchmark %s" % bech_name) +
|
|
|
|
" with path %s " % (eachpath))
|
|
|
|
bench_files += files
|
2019-08-15 15:39:58 -05:00
|
|
|
|
2019-08-25 01:23:39 -05:00
|
|
|
# Read provided benchmark configurations
|
|
|
|
# Common configurations
|
|
|
|
ys_for_task_common = SynthSection.get("bench_yosys_common")
|
|
|
|
chan_width_common = SynthSection.get("bench_chan_width_common")
|
|
|
|
|
|
|
|
# Individual benchmark configuration
|
|
|
|
CurrBenchPara["files"] = bench_files
|
|
|
|
CurrBenchPara["top_module"] = SynthSection.get(bech_name+"_top",
|
|
|
|
fallback="top")
|
|
|
|
CurrBenchPara["ys_script"] = SynthSection.get(bech_name+"_yosys",
|
|
|
|
fallback=ys_for_task_common)
|
|
|
|
CurrBenchPara["chan_width"] = SynthSection.get(bech_name+"_chan_width",
|
|
|
|
fallback=chan_width_common)
|
|
|
|
|
|
|
|
logger.info('Running "%s" flow' %
|
|
|
|
GeneralSection.get("fpga_flow", fallback="yosys_vpr"))
|
|
|
|
if GeneralSection.get("fpga_flow") == "vpr_blif":
|
|
|
|
# Check if activity file exist
|
|
|
|
if not SynthSection.get(bech_name+"_act"):
|
|
|
|
clean_up_and_exit("Missing argument %s" % (bech_name+"_act") +
|
|
|
|
"for vpr_blif flow")
|
|
|
|
CurrBenchPara["activity_file"] = SynthSection.get(bech_name+"_act")
|
|
|
|
|
|
|
|
# Check if base verilog file exists
|
|
|
|
if not SynthSection.get(bech_name+"_verilog"):
|
|
|
|
clean_up_and_exit("Missing argument %s for vpr_blif flow" %
|
|
|
|
(bech_name+"_verilog"))
|
|
|
|
CurrBenchPara["verilog_file"] = SynthSection.get(
|
|
|
|
bech_name+"_verilog")
|
|
|
|
|
|
|
|
# Add script parameter list in current benchmark
|
|
|
|
ScriptSections = [x for x in TaskFileSections if "SCRIPT_PARAM" in x]
|
|
|
|
script_para_list = {}
|
|
|
|
for eachset in ScriptSections:
|
|
|
|
command = []
|
|
|
|
for key, values in task_conf[eachset].items():
|
|
|
|
command += ["--"+key, values] if values else ["--"+key]
|
|
|
|
|
|
|
|
# Set label for Sript Parameters
|
|
|
|
set_lbl = eachset.replace("SCRIPT_PARAM", "")
|
|
|
|
set_lbl = set_lbl[1:] if set_lbl else "Common"
|
|
|
|
script_para_list[set_lbl] = command
|
|
|
|
CurrBenchPara["script_params"] = script_para_list
|
|
|
|
|
|
|
|
benchmark_list.append(CurrBenchPara)
|
2019-08-15 15:39:58 -05:00
|
|
|
|
|
|
|
# Create OpenFPGA flow run commnad for each combination of
|
|
|
|
# architecture, benchmark and parameters
|
2019-08-16 11:59:44 -05:00
|
|
|
# Create run_job object [arch, bench, run_dir, commnad]
|
2019-08-15 15:39:58 -05:00
|
|
|
flow_run_cmd_list = []
|
2019-08-16 11:59:44 -05:00
|
|
|
for indx, arch in enumerate(archfile_list):
|
2019-08-16 10:44:50 -05:00
|
|
|
for bench in benchmark_list:
|
2019-08-25 01:23:39 -05:00
|
|
|
for lbl, param in bench["script_params"].items():
|
|
|
|
flow_run_dir = get_flow_rundir(arch, bench["top_module"], lbl)
|
|
|
|
command = create_run_command(
|
|
|
|
curr_job_dir=flow_run_dir,
|
|
|
|
archfile=arch,
|
|
|
|
benchmark_obj=bench,
|
|
|
|
param=param,
|
|
|
|
task_conf=task_conf)
|
|
|
|
flow_run_cmd_list.append({
|
|
|
|
"arch": arch,
|
|
|
|
"bench": bench,
|
|
|
|
"name": "%02d_arch%s_%s" % (indx, bench["top_module"], lbl),
|
|
|
|
"run_dir": flow_run_dir,
|
|
|
|
"commands": command,
|
|
|
|
"status": False})
|
2019-08-15 15:39:58 -05:00
|
|
|
return flow_run_cmd_list
|
|
|
|
|
|
|
|
|
|
|
|
def get_flow_rundir(arch, top_module, flow_params=None):
|
|
|
|
path = [
|
|
|
|
os.path.basename(arch).replace(".xml", ""),
|
|
|
|
top_module,
|
|
|
|
flow_params if flow_params else "common"
|
|
|
|
]
|
2019-08-16 10:44:50 -05:00
|
|
|
return os.path.abspath(os.path.join(*path))
|
2019-08-15 15:39:58 -05:00
|
|
|
|
|
|
|
|
2019-08-25 01:23:39 -05:00
|
|
|
def create_run_command(curr_job_dir, archfile, benchmark_obj, param, task_conf):
|
2019-08-09 01:17:06 -05:00
|
|
|
"""
|
2019-08-15 15:39:58 -05:00
|
|
|
Create_run_script function accepts run directory, architecture list and
|
2019-08-09 01:17:06 -05:00
|
|
|
fpga_flow configuration file and prepare final executable fpga_flow script
|
|
|
|
TODO : Replace this section after convert fpga_flow to python script
|
|
|
|
Config file creation and bechnmark list can be skipped
|
|
|
|
"""
|
|
|
|
# = = = = = = = = = File/Directory Consitancy Check = = = = = = = = = =
|
|
|
|
if not os.path.isdir(gc["misc_dir"]):
|
|
|
|
clean_up_and_exit("Miscellaneous directory does not exist")
|
|
|
|
|
|
|
|
# = = = = = = = = = = = = Create execution folder = = = = = = = = = = = =
|
|
|
|
if os.path.isdir(curr_job_dir):
|
2019-08-16 10:44:50 -05:00
|
|
|
question = "One the result directory already exist.\n"
|
|
|
|
question += "%s\n" % curr_job_dir
|
|
|
|
reply = str(input(question+' (y/n): ')).lower().strip()
|
|
|
|
if reply[:1] in ['y', 'yes']:
|
|
|
|
shutil.rmtree(curr_job_dir)
|
|
|
|
else:
|
|
|
|
logger.info("Result directory removal denied by the user")
|
|
|
|
exit()
|
2019-08-09 01:17:06 -05:00
|
|
|
os.makedirs(curr_job_dir)
|
2019-08-15 15:39:58 -05:00
|
|
|
|
|
|
|
# Make execution command to run Open FPGA flow
|
2019-08-25 01:23:39 -05:00
|
|
|
task_gc = task_conf["GENERAL"]
|
2019-08-16 10:44:50 -05:00
|
|
|
command = [archfile] + benchmark_obj["files"]
|
|
|
|
command += ["--top_module", benchmark_obj["top_module"]]
|
|
|
|
command += ["--run_dir", curr_job_dir]
|
2019-08-25 01:23:39 -05:00
|
|
|
|
|
|
|
if task_gc.get("fpga_flow"):
|
|
|
|
command += ["--fpga_flow", task_gc.get("fpga_flow")]
|
|
|
|
|
|
|
|
if benchmark_obj.get("activity_file"):
|
|
|
|
command += ["--activity_file", benchmark_obj.get("activity_file")]
|
|
|
|
|
|
|
|
if benchmark_obj.get("verilog_file"):
|
|
|
|
command += ["--base_verilog", benchmark_obj.get("verilog_file")]
|
|
|
|
|
|
|
|
if benchmark_obj.get("ys_script"):
|
|
|
|
command += ["--yosys_tmpl", benchmark_obj["ys_script"]]
|
|
|
|
|
|
|
|
if task_gc.getboolean("power_analysis"):
|
2019-08-16 10:44:50 -05:00
|
|
|
command += ["--power"]
|
2019-08-25 01:23:39 -05:00
|
|
|
command += ["--power_tech", task_gc.get("power_tech_file")]
|
|
|
|
|
|
|
|
if task_gc.getboolean("spice_output"):
|
2019-08-16 10:44:50 -05:00
|
|
|
command += ["--vpr_fpga_spice"]
|
2019-08-25 01:23:39 -05:00
|
|
|
|
|
|
|
if task_gc.getboolean("verilog_output"):
|
2019-08-16 10:44:50 -05:00
|
|
|
command += ["--vpr_fpga_verilog"]
|
2019-08-21 12:08:47 -05:00
|
|
|
command += ["--vpr_fpga_verilog_dir", "."]
|
|
|
|
command += ["--vpr_fpga_x2p_rename_illegal_port"]
|
2019-08-16 10:44:50 -05:00
|
|
|
|
|
|
|
# Add other paramters to pass
|
2019-08-25 01:23:39 -05:00
|
|
|
command += param
|
2019-08-21 12:08:47 -05:00
|
|
|
|
2019-08-21 12:08:13 -05:00
|
|
|
if args.debug:
|
|
|
|
command += ["--debug"]
|
2019-08-15 15:39:58 -05:00
|
|
|
return command
|
2019-08-09 01:17:06 -05:00
|
|
|
|
|
|
|
|
2019-08-22 17:56:58 -05:00
|
|
|
def strip_child_logger_info(line):
|
|
|
|
try:
|
|
|
|
logtype, message = line.split(" - ", 1)
|
|
|
|
lognumb = {"CRITICAL": 50, "ERROR": 40, "WARNING": 30,
|
|
|
|
"INFO": 20, "DEBUG": 10, "NOTSET": 0}
|
2019-08-23 00:41:25 -05:00
|
|
|
logger.log(lognumb[logtype.strip().upper()], message)
|
2019-08-22 17:56:58 -05:00
|
|
|
except:
|
|
|
|
logger.info(line)
|
|
|
|
|
|
|
|
|
2019-08-18 13:26:05 -05:00
|
|
|
def run_single_script(s, eachJob):
|
2019-08-16 10:44:50 -05:00
|
|
|
logger.debug('Added job in pool')
|
2019-08-09 01:17:06 -05:00
|
|
|
with s:
|
2019-08-18 13:26:05 -05:00
|
|
|
logger.debug("Running OpenFPGA flow with " +
|
|
|
|
" ".join(eachJob["commands"]))
|
2019-08-09 01:17:06 -05:00
|
|
|
name = threading.currentThread().getName()
|
2019-08-21 12:08:47 -05:00
|
|
|
eachJob["starttime"] = time.time()
|
2019-08-16 10:44:50 -05:00
|
|
|
try:
|
|
|
|
logfile = "%s_out.log" % name
|
|
|
|
with open(logfile, 'w+') as output:
|
2019-08-21 12:08:47 -05:00
|
|
|
process = subprocess.Popen(["python3.5",
|
|
|
|
gc["script_default"]] +
|
|
|
|
eachJob["commands"],
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.STDOUT,
|
|
|
|
universal_newlines=True)
|
|
|
|
for line in process.stdout:
|
|
|
|
if not args.skip_tread_logs:
|
2019-08-22 17:56:58 -05:00
|
|
|
strip_child_logger_info(line[:-1])
|
2019-08-21 12:08:47 -05:00
|
|
|
sys.stdout.buffer.flush()
|
|
|
|
output.write(line)
|
|
|
|
process.wait()
|
2019-08-25 01:23:39 -05:00
|
|
|
if process.returncode:
|
|
|
|
raise subprocess.CalledProcessError(0, [])
|
2019-08-18 13:26:05 -05:00
|
|
|
eachJob["status"] = True
|
2019-08-16 10:44:50 -05:00
|
|
|
except:
|
2019-08-21 12:08:47 -05:00
|
|
|
logger.exception("Failed to execute openfpga flow - " +
|
|
|
|
eachJob["name"])
|
|
|
|
eachJob["endtime"] = time.time()
|
|
|
|
timediff = timedelta(seconds=(eachJob["endtime"]-eachJob["starttime"]))
|
|
|
|
timestr = humanize.naturaldelta(timediff) if "humanize" in sys.modules \
|
|
|
|
else str(timediff)
|
2019-08-25 01:23:39 -05:00
|
|
|
logger.info("%s Finished with returncode %d, Time Taken %s " %
|
|
|
|
(name, process.returncode, timestr))
|
2019-08-16 10:44:50 -05:00
|
|
|
|
|
|
|
|
2019-08-16 11:59:44 -05:00
|
|
|
def run_actions(job_run_list):
|
2019-08-09 01:17:06 -05:00
|
|
|
thread_sema = threading.Semaphore(args.maxthreads)
|
|
|
|
thred_list = []
|
2019-08-16 11:59:44 -05:00
|
|
|
for index, eachjob in enumerate(job_run_list):
|
2019-08-09 01:17:06 -05:00
|
|
|
t = threading.Thread(target=run_single_script,
|
2019-08-16 10:44:50 -05:00
|
|
|
name='Job_%02d' % (index+1),
|
2019-08-18 13:26:05 -05:00
|
|
|
args=(thread_sema, eachjob))
|
2019-08-09 01:17:06 -05:00
|
|
|
t.start()
|
|
|
|
thred_list.append(t)
|
|
|
|
|
|
|
|
for eachthread in thred_list:
|
|
|
|
eachthread.join()
|
|
|
|
|
|
|
|
|
2019-08-16 11:59:44 -05:00
|
|
|
def collect_results(job_run_list):
|
|
|
|
task_result = []
|
|
|
|
for run in job_run_list:
|
2019-08-18 13:26:05 -05:00
|
|
|
if not run["status"]:
|
2019-08-25 01:23:39 -05:00
|
|
|
logger.warning("Skipping %s run", run["name"])
|
2019-08-18 13:26:05 -05:00
|
|
|
continue
|
2019-08-16 11:59:44 -05:00
|
|
|
# Check if any result file exist
|
|
|
|
if not glob.glob(os.path.join(run["run_dir"], "*.result")):
|
|
|
|
logger.info("No result files found for %s" % run["name"])
|
|
|
|
|
|
|
|
# Read and merge result file
|
|
|
|
vpr_res = ConfigParser(allow_no_value=True,
|
|
|
|
interpolation=ExtendedInterpolation())
|
|
|
|
vpr_res.read_file(
|
|
|
|
open(os.path.join(run["run_dir"], "vpr_stat.result")))
|
2019-08-25 01:23:39 -05:00
|
|
|
result = OrderedDict()
|
2019-08-16 11:59:44 -05:00
|
|
|
result["name"] = run["name"]
|
2019-08-25 01:23:39 -05:00
|
|
|
result["TotalRunTime"] = int(run["endtime"]-run["starttime"])
|
|
|
|
result.update(vpr_res["RESULTS"])
|
2019-08-16 11:59:44 -05:00
|
|
|
task_result.append(result)
|
|
|
|
|
2019-08-18 13:26:05 -05:00
|
|
|
if len(task_result):
|
|
|
|
with open("task_result.csv", 'w', newline='') as csvfile:
|
|
|
|
writer = csv.DictWriter(csvfile, fieldnames=task_result[0].keys())
|
|
|
|
writer.writeheader()
|
|
|
|
for eachResult in task_result:
|
|
|
|
writer.writerow(eachResult)
|
2019-08-16 11:59:44 -05:00
|
|
|
|
|
|
|
|
2019-08-09 01:17:06 -05:00
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|