2019-11-01 19:20:40 -05:00
|
|
|
from string import Template
|
|
|
|
import sys
|
|
|
|
import os
|
2019-11-16 00:23:15 -06:00
|
|
|
import re
|
2019-11-16 20:10:04 -06:00
|
|
|
import csv
|
2019-11-16 00:23:15 -06:00
|
|
|
import glob
|
2019-11-16 02:06:09 -06:00
|
|
|
import time
|
|
|
|
import threading
|
|
|
|
from datetime import timedelta
|
2019-11-01 19:20:40 -05:00
|
|
|
import argparse
|
|
|
|
import subprocess
|
|
|
|
import logging
|
2019-11-16 00:23:15 -06:00
|
|
|
from configparser import ConfigParser, ExtendedInterpolation
|
2019-11-01 19:20:40 -05:00
|
|
|
|
|
|
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
|
|
|
# Configure logging system
|
|
|
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
2019-11-16 02:06:09 -06:00
|
|
|
FILE_LOG_FORMAT = '%(levelname)s (%(threadName)10s) - %(message)s'
|
2019-11-01 19:20:40 -05:00
|
|
|
logging.basicConfig(level=logging.INFO, stream=sys.stdout,
|
|
|
|
format='%(levelname)s (%(threadName)10s) - %(message)s')
|
|
|
|
logger = logging.getLogger('Modelsim_run_log')
|
|
|
|
|
|
|
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
|
|
|
# Parse commandline arguments
|
|
|
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument('files', nargs='+',
|
2019-11-16 00:23:15 -06:00
|
|
|
help="Pass SimulationDeckInfo generated by OpenFPGA flow" +
|
|
|
|
" or pass taskname <taskname> <run_number[optional]>")
|
2019-11-16 02:06:09 -06:00
|
|
|
parser.add_argument('--maxthreads', type=int, default=2,
|
|
|
|
help="Number of fpga_flow threads to run default = 2," +
|
|
|
|
"Typically <= Number of processors on the system")
|
|
|
|
parser.add_argument('--debug', action="store_true",
|
|
|
|
help="Run script in debug mode")
|
2019-11-01 19:20:40 -05:00
|
|
|
parser.add_argument('--modelsim_proc_tmpl', type=str,
|
|
|
|
help="Modelsim proc template file")
|
|
|
|
parser.add_argument('--modelsim_runsim_tmpl', type=str,
|
|
|
|
help="Modelsim runsim template file")
|
|
|
|
parser.add_argument('--run_sim', action="store_true",
|
|
|
|
help="Execute generated script in formality")
|
|
|
|
parser.add_argument('--modelsim_proj_name',
|
|
|
|
help="Provide modelsim project name")
|
|
|
|
parser.add_argument('--modelsim_ini', type=str,
|
|
|
|
default="/uusoc/facility/cad_tools/Mentor/modelsim10.7b/modeltech/modelsim.ini",
|
|
|
|
help="Skip any confirmation")
|
|
|
|
parser.add_argument('--skip_prompt', action='store_true',
|
|
|
|
help='Skip any confirmation')
|
2019-11-16 00:23:15 -06:00
|
|
|
parser.add_argument('--ini_filename', type=str,
|
|
|
|
default="simulation_deck_info.ini",
|
|
|
|
help='default INI filename in in fun dir')
|
2019-11-01 19:20:40 -05:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
2019-11-16 00:23:15 -06:00
|
|
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
|
|
|
# Read script configuration file
|
|
|
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
|
|
|
task_script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
script_env_vars = ({"PATH": {
|
|
|
|
"OPENFPGA_FLOW_PATH": task_script_dir,
|
|
|
|
"ARCH_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "arch"),
|
|
|
|
"BENCH_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "benchmarks"),
|
|
|
|
"TECH_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "tech"),
|
|
|
|
"SPICENETLIST_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "SpiceNetlists"),
|
|
|
|
"VERILOG_PATH": os.path.join("${PATH:OPENFPGA_PATH}", "VerilogNetlists"),
|
|
|
|
"OPENFPGA_PATH": os.path.abspath(os.path.join(task_script_dir, os.pardir,
|
|
|
|
os.pardir))}})
|
|
|
|
config = ConfigParser(interpolation=ExtendedInterpolation())
|
|
|
|
config.read_dict(script_env_vars)
|
|
|
|
config.read_file(open(os.path.join(task_script_dir, 'run_fpga_task.conf')))
|
|
|
|
gc = config["GENERAL CONFIGURATION"]
|
|
|
|
|
|
|
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
|
|
|
# Load default templates for modelsim
|
|
|
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
2019-11-01 19:20:40 -05:00
|
|
|
task_script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
if not args.modelsim_proc_tmpl:
|
|
|
|
args.modelsim_proc_tmpl = os.path.join(task_script_dir, os.pardir,
|
|
|
|
"misc", "modelsim_proc.tcl")
|
|
|
|
if not args.modelsim_runsim_tmpl:
|
|
|
|
args.modelsim_runsim_tmpl = os.path.join(task_script_dir, os.pardir,
|
|
|
|
"misc", "modelsim_runsim.tcl")
|
|
|
|
|
|
|
|
args.modelsim_proc_tmpl = os.path.abspath(args.modelsim_proc_tmpl)
|
|
|
|
args.modelsim_runsim_tmpl = os.path.abspath(args.modelsim_runsim_tmpl)
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
2019-11-16 00:23:15 -06:00
|
|
|
if os.path.isfile(args.files[0]):
|
2019-11-16 02:06:09 -06:00
|
|
|
create_tcl_script(args.files)
|
2019-11-16 00:23:15 -06:00
|
|
|
else:
|
|
|
|
# Check if task directory exists and consistent
|
|
|
|
taskname = args.files[0]
|
|
|
|
task_run = "latest"
|
|
|
|
if len(args.files) > 1:
|
|
|
|
task_run = f"run{int(args.files[1]):03}"
|
|
|
|
|
|
|
|
temp_dir = os.path.join(gc["task_dir"], taskname)
|
|
|
|
if not os.path.isdir(temp_dir):
|
|
|
|
clean_up_and_exit("Task directory [%s] not found" % temp_dir)
|
|
|
|
temp_dir = os.path.join(gc["task_dir"], taskname, task_run)
|
|
|
|
if not os.path.isdir(temp_dir):
|
|
|
|
clean_up_and_exit("Task run directory [%s] not found" % temp_dir)
|
|
|
|
|
2019-11-16 02:06:09 -06:00
|
|
|
# = = = = = = = Create a current script log file handler = = = =
|
|
|
|
logfile_path = os.path.join(gc["task_dir"],
|
|
|
|
taskname, task_run, "modelsim_run.log")
|
2019-11-16 20:10:04 -06:00
|
|
|
resultfile_path = os.path.join(gc["task_dir"],
|
2020-04-24 22:53:57 -05:00
|
|
|
taskname, task_run, "modelsim_result.csv")
|
2019-11-16 02:06:09 -06:00
|
|
|
logfilefh = logging.FileHandler(logfile_path, "w")
|
|
|
|
logfilefh.setFormatter(logging.Formatter(FILE_LOG_FORMAT))
|
|
|
|
logger.addHandler(logfilefh)
|
|
|
|
logger.info("Created log file at %s" % logfile_path)
|
|
|
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
|
|
|
|
|
|
|
# = = = = Read Task log file and extract run directory = = =
|
|
|
|
logfile = os.path.join(gc["task_dir"], taskname, task_run, "*_out.log")
|
2019-11-16 00:23:15 -06:00
|
|
|
logfiles = glob.glob(logfile)
|
|
|
|
if not len(logfiles):
|
|
|
|
clean_up_and_exit("No successful run found in [%s]" % temp_dir)
|
|
|
|
|
|
|
|
task_ini_files = []
|
|
|
|
for eachfile in logfiles:
|
|
|
|
with open(eachfile) as fp:
|
|
|
|
run_dir = [re.findall(r'^INFO.*Run directory : (.*)$', line)
|
|
|
|
for line in open(eachfile)]
|
|
|
|
run_dir = filter(bool, run_dir)
|
|
|
|
for each_run in run_dir:
|
|
|
|
INIfile = os.path.join(each_run[0], args.ini_filename)
|
|
|
|
if os.path.isfile(INIfile):
|
|
|
|
task_ini_files.append(INIfile)
|
|
|
|
logger.info(f"Found {len(task_ini_files)} INI files")
|
2019-11-16 20:10:04 -06:00
|
|
|
results = create_tcl_script(task_ini_files)
|
2020-04-24 22:53:57 -05:00
|
|
|
if args.run_sim:
|
|
|
|
collect_result(resultfile_path, results)
|
2019-11-16 00:23:15 -06:00
|
|
|
|
|
|
|
|
|
|
|
def clean_up_and_exit(msg):
|
|
|
|
logger.error(msg)
|
|
|
|
logger.error("Exiting . . . . . .")
|
|
|
|
exit(1)
|
|
|
|
|
|
|
|
|
2019-11-16 02:06:09 -06:00
|
|
|
def create_tcl_script(files):
|
|
|
|
runsim_files = []
|
2019-11-16 00:23:15 -06:00
|
|
|
for eachFile in files:
|
2019-11-01 19:20:40 -05:00
|
|
|
eachFile = os.path.abspath(eachFile)
|
|
|
|
pDir = os.path.dirname(eachFile)
|
|
|
|
os.chdir(pDir)
|
|
|
|
|
|
|
|
config = ConfigParser()
|
|
|
|
config.read(eachFile)
|
|
|
|
config = config["SIMULATION_DECK"]
|
|
|
|
|
|
|
|
# Resolve project Modelsim project path
|
2019-11-16 20:10:04 -06:00
|
|
|
args.modelsim_run_dir = os.path.dirname(os.path.abspath(eachFile))
|
|
|
|
modelsim_proj_dir = os.path.join(
|
|
|
|
args.modelsim_run_dir, "MMSIM2")
|
|
|
|
logger.info(f"Modelsim project dir not provide " +
|
|
|
|
f"using default {modelsim_proj_dir} directory")
|
|
|
|
|
|
|
|
modelsim_proj_dir = os.path.abspath(modelsim_proj_dir)
|
|
|
|
config["MODELSIM_PROJ_DIR"] = modelsim_proj_dir
|
|
|
|
if not os.path.exists(modelsim_proj_dir):
|
|
|
|
os.makedirs(modelsim_proj_dir)
|
2019-11-01 19:20:40 -05:00
|
|
|
|
|
|
|
# Resolve Modelsim Project name
|
2019-11-16 20:10:04 -06:00
|
|
|
args.modelsim_proj_name = config["BENCHMARK"] + "_MMSIM"
|
|
|
|
logger.info(f"Modelsim project name not provide " +
|
|
|
|
f"using default {args.modelsim_proj_name} directory")
|
2019-11-01 19:20:40 -05:00
|
|
|
|
|
|
|
config["MODELSIM_PROJ_NAME"] = args.modelsim_proj_name
|
|
|
|
config["MODELSIM_INI"] = args.modelsim_ini
|
2020-04-24 22:53:57 -05:00
|
|
|
config["VERILOG_PATH"] = os.path.join(
|
|
|
|
os.getcwd(), config["VERILOG_PATH"])
|
|
|
|
IncludeFile = os.path.join(
|
|
|
|
os.getcwd(),
|
|
|
|
config["VERILOG_PATH"],
|
|
|
|
config["VERILOG_FILE2"])
|
|
|
|
IncludeFileResolved = os.path.join(
|
|
|
|
os.getcwd(),
|
|
|
|
config["VERILOG_PATH"],
|
|
|
|
config["VERILOG_FILE2"].replace(".v", "_resolved.v"))
|
|
|
|
with open(IncludeFileResolved, "w") as fpw:
|
|
|
|
with open(IncludeFile, "r") as fp:
|
|
|
|
for eachline in fp.readlines():
|
2020-04-25 21:16:17 -05:00
|
|
|
eachline = eachline.replace("\"./", "\"../../../")
|
2020-04-24 22:53:57 -05:00
|
|
|
fpw.write(eachline)
|
2019-11-01 19:20:40 -05:00
|
|
|
# Modify the variables in config file here
|
|
|
|
config["TOP_TB"] = os.path.splitext(config["TOP_TB"])[0]
|
|
|
|
|
|
|
|
# Write final template file
|
|
|
|
# Write runsim file
|
|
|
|
tmpl = Template(open(args.modelsim_runsim_tmpl,
|
|
|
|
encoding='utf-8').read())
|
2019-11-16 20:10:04 -06:00
|
|
|
runsim_filename = os.path.join(modelsim_proj_dir,
|
2019-11-01 19:20:40 -05:00
|
|
|
"%s_runsim.tcl" % config['BENCHMARK'])
|
|
|
|
logger.info(f"Creating tcl script at : {runsim_filename}")
|
|
|
|
with open(runsim_filename, 'w', encoding='utf-8') as tclout:
|
|
|
|
tclout.write(tmpl.substitute(config))
|
|
|
|
|
|
|
|
# Write proc file
|
2019-11-16 20:10:04 -06:00
|
|
|
proc_filename = os.path.join(modelsim_proj_dir,
|
2019-11-01 19:20:40 -05:00
|
|
|
"%s_autocheck_proc.tcl" % config['BENCHMARK'])
|
|
|
|
logger.info(f"Creating tcl script at : {proc_filename}")
|
|
|
|
with open(proc_filename, 'w', encoding='utf-8') as tclout:
|
|
|
|
tclout.write(open(args.modelsim_proc_tmpl,
|
|
|
|
encoding='utf-8').read())
|
2019-11-16 02:06:09 -06:00
|
|
|
runsim_files.append({
|
2019-11-16 20:10:04 -06:00
|
|
|
"ini_file": eachFile,
|
2019-11-16 02:06:09 -06:00
|
|
|
"modelsim_run_dir": args.modelsim_run_dir,
|
|
|
|
"runsim_filename": runsim_filename,
|
2020-04-24 22:53:57 -05:00
|
|
|
"run_complete": False,
|
2019-11-16 20:10:04 -06:00
|
|
|
"status": False,
|
2020-04-24 22:53:57 -05:00
|
|
|
"finished": True,
|
|
|
|
"starttime": 0,
|
|
|
|
"endtime": 0,
|
2019-11-16 20:10:04 -06:00
|
|
|
"Errors": 0,
|
2020-04-24 22:53:57 -05:00
|
|
|
"Warnings": 0
|
2019-11-16 02:06:09 -06:00
|
|
|
})
|
|
|
|
# Execute modelsim
|
|
|
|
if args.run_sim:
|
|
|
|
thread_sema = threading.Semaphore(args.maxthreads)
|
|
|
|
logger.info("Launching %d parallel threads" % args.maxthreads)
|
|
|
|
thread_list = []
|
|
|
|
for thread_no, eachjob in enumerate(runsim_files):
|
|
|
|
t = threading.Thread(target=run_modelsim_thread,
|
|
|
|
name=f"Thread_{thread_no:d}",
|
|
|
|
args=(thread_sema, eachjob, runsim_files))
|
|
|
|
t.start()
|
|
|
|
thread_list.append(t)
|
|
|
|
for eachthread in thread_list:
|
|
|
|
eachthread.join()
|
2019-11-16 20:10:04 -06:00
|
|
|
return runsim_files
|
2019-11-16 02:06:09 -06:00
|
|
|
else:
|
|
|
|
logger.info("Created runsim and proc files")
|
|
|
|
logger.info(f"runsim_filename {runsim_filename}")
|
|
|
|
logger.info(f"proc_filename {proc_filename}")
|
|
|
|
from pprint import pprint
|
|
|
|
pprint(runsim_files)
|
2019-11-01 19:20:40 -05:00
|
|
|
|
|
|
|
|
2019-11-16 02:06:09 -06:00
|
|
|
def run_modelsim_thread(s, eachJob, job_list):
|
|
|
|
os.chdir(eachJob["modelsim_run_dir"])
|
|
|
|
with s:
|
|
|
|
thread_name = threading.currentThread().getName()
|
|
|
|
eachJob["starttime"] = time.time()
|
2019-11-16 20:10:04 -06:00
|
|
|
eachJob["Errors"] = 0
|
2020-04-24 22:53:57 -05:00
|
|
|
eachJob["Warnings"] = 0
|
2019-11-16 02:06:09 -06:00
|
|
|
try:
|
|
|
|
logfile = "%s_modelsim.log" % thread_name
|
2019-11-16 20:10:04 -06:00
|
|
|
eachJob["logfile"] = "<task_dir>" + \
|
|
|
|
os.path.relpath(logfile, gc["task_dir"])
|
2019-11-16 02:06:09 -06:00
|
|
|
with open(logfile, 'w+') as output:
|
|
|
|
output.write("* "*20 + '\n')
|
|
|
|
output.write("RunDirectory : %s\n" % os.getcwd())
|
|
|
|
command = ["vsim", "-c", "-do", eachJob["runsim_filename"]]
|
|
|
|
output.write(" ".join(command) + '\n')
|
|
|
|
output.write("* "*20 + '\n')
|
|
|
|
logger.info("Running modelsim with [%s]" % " ".join(command))
|
|
|
|
process = subprocess.Popen(command,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.STDOUT,
|
|
|
|
universal_newlines=True)
|
|
|
|
for line in process.stdout:
|
|
|
|
if "Errors" in line:
|
2019-11-16 14:18:13 -06:00
|
|
|
logger.info(line.strip())
|
2020-04-24 22:53:57 -05:00
|
|
|
e, w = re.match(
|
|
|
|
"# .*: ([0-9].*), .*: ([0-9].*)", line).groups()
|
2019-11-16 20:10:04 -06:00
|
|
|
eachJob["Errors"] += int(e)
|
|
|
|
eachJob["Warnings"] += int(w)
|
2019-11-16 02:06:09 -06:00
|
|
|
sys.stdout.buffer.flush()
|
|
|
|
output.write(line)
|
|
|
|
process.wait()
|
|
|
|
if process.returncode:
|
|
|
|
raise subprocess.CalledProcessError(0, " ".join(command))
|
2019-11-16 20:10:04 -06:00
|
|
|
eachJob["run_complete"] = True
|
|
|
|
if not eachJob["Errors"]:
|
|
|
|
eachJob["status"] = True
|
2019-11-16 02:06:09 -06:00
|
|
|
except:
|
|
|
|
logger.exception("Failed to execute openfpga flow - " +
|
|
|
|
eachJob["name"])
|
|
|
|
if not args.continue_on_fail:
|
|
|
|
os._exit(1)
|
|
|
|
eachJob["endtime"] = time.time()
|
|
|
|
timediff = timedelta(seconds=(eachJob["endtime"]-eachJob["starttime"]))
|
|
|
|
timestr = humanize.naturaldelta(timediff) if "humanize" in sys.modules \
|
|
|
|
else str(timediff)
|
2019-11-16 20:10:04 -06:00
|
|
|
eachJob["exectime"] = timestr
|
2019-11-16 02:06:09 -06:00
|
|
|
logger.info("%s Finished with returncode %d, Time Taken %s " %
|
|
|
|
(thread_name, process.returncode, timestr))
|
|
|
|
eachJob["finished"] = True
|
|
|
|
no_of_finished_job = sum([not eachJ["finished"] for eachJ in job_list])
|
|
|
|
logger.info("***** %d runs pending *****" % (no_of_finished_job))
|
2019-11-01 19:20:40 -05:00
|
|
|
|
2020-04-24 22:53:57 -05:00
|
|
|
|
2019-11-16 20:10:04 -06:00
|
|
|
def collect_result(result_file, result_obj):
|
2020-04-24 22:53:57 -05:00
|
|
|
colnames = ["status", "Errors", "Warnings",
|
|
|
|
"run_complete", "exectime", "finished", "logfile"]
|
2019-11-16 20:10:04 -06:00
|
|
|
if len(result_obj):
|
|
|
|
with open(result_file, 'w', newline='') as csvfile:
|
|
|
|
writer = csv.DictWriter(
|
|
|
|
csvfile, extrasaction='ignore', fieldnames=colnames)
|
|
|
|
writer.writeheader()
|
|
|
|
for eachResult in result_obj:
|
|
|
|
writer.writerow(eachResult)
|
|
|
|
logger.info("= = = ="*10)
|
2020-04-24 22:53:57 -05:00
|
|
|
passed_jobs = [each["status"] for each in result_obj]
|
2019-11-16 20:10:04 -06:00
|
|
|
logger.info(f"Passed Jobs %d/%d", len(passed_jobs), len(result_obj))
|
|
|
|
logger.info(f"Result file stored at {result_file}")
|
|
|
|
logger.info("= = = ="*10)
|
|
|
|
|
2020-04-24 22:53:57 -05:00
|
|
|
|
2019-11-01 19:20:40 -05:00
|
|
|
if __name__ == "__main__":
|
2019-11-16 02:06:09 -06:00
|
|
|
if args.debug:
|
|
|
|
logger.info("Setting loggger in debug mode")
|
|
|
|
logger.setLevel(logging.DEBUG)
|
2019-11-01 19:20:40 -05:00
|
|
|
main()
|