First draft for multithreaded Modelsim simulation
This commit is contained in:
parent
f05aede868
commit
373dbe0718
|
@ -3,6 +3,9 @@ import sys
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import glob
|
import glob
|
||||||
|
import time
|
||||||
|
import threading
|
||||||
|
from datetime import timedelta
|
||||||
import argparse
|
import argparse
|
||||||
import subprocess
|
import subprocess
|
||||||
import logging
|
import logging
|
||||||
|
@ -11,6 +14,7 @@ from configparser import ConfigParser, ExtendedInterpolation
|
||||||
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
||||||
# Configure logging system
|
# Configure logging system
|
||||||
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
||||||
|
FILE_LOG_FORMAT = '%(levelname)s (%(threadName)10s) - %(message)s'
|
||||||
logging.basicConfig(level=logging.INFO, stream=sys.stdout,
|
logging.basicConfig(level=logging.INFO, stream=sys.stdout,
|
||||||
format='%(levelname)s (%(threadName)10s) - %(message)s')
|
format='%(levelname)s (%(threadName)10s) - %(message)s')
|
||||||
logger = logging.getLogger('Modelsim_run_log')
|
logger = logging.getLogger('Modelsim_run_log')
|
||||||
|
@ -22,6 +26,11 @@ parser = argparse.ArgumentParser()
|
||||||
parser.add_argument('files', nargs='+',
|
parser.add_argument('files', nargs='+',
|
||||||
help="Pass SimulationDeckInfo generated by OpenFPGA flow" +
|
help="Pass SimulationDeckInfo generated by OpenFPGA flow" +
|
||||||
" or pass taskname <taskname> <run_number[optional]>")
|
" or pass taskname <taskname> <run_number[optional]>")
|
||||||
|
parser.add_argument('--maxthreads', type=int, default=2,
|
||||||
|
help="Number of fpga_flow threads to run default = 2," +
|
||||||
|
"Typically <= Number of processors on the system")
|
||||||
|
parser.add_argument('--debug', action="store_true",
|
||||||
|
help="Run script in debug mode")
|
||||||
parser.add_argument('--modelsim_proc_tmpl', type=str,
|
parser.add_argument('--modelsim_proc_tmpl', type=str,
|
||||||
help="Modelsim proc template file")
|
help="Modelsim proc template file")
|
||||||
parser.add_argument('--modelsim_runsim_tmpl', type=str,
|
parser.add_argument('--modelsim_runsim_tmpl', type=str,
|
||||||
|
@ -77,7 +86,7 @@ args.modelsim_runsim_tmpl = os.path.abspath(args.modelsim_runsim_tmpl)
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
if os.path.isfile(args.files[0]):
|
if os.path.isfile(args.files[0]):
|
||||||
run_modelsim(args.files)
|
create_tcl_script(args.files)
|
||||||
else:
|
else:
|
||||||
# Check if task directory exists and consistent
|
# Check if task directory exists and consistent
|
||||||
taskname = args.files[0]
|
taskname = args.files[0]
|
||||||
|
@ -92,7 +101,17 @@ def main():
|
||||||
if not os.path.isdir(temp_dir):
|
if not os.path.isdir(temp_dir):
|
||||||
clean_up_and_exit("Task run directory [%s] not found" % temp_dir)
|
clean_up_and_exit("Task run directory [%s] not found" % temp_dir)
|
||||||
|
|
||||||
logfile = os.path.join(gc["task_dir"], taskname, task_run, "*.log")
|
# = = = = = = = Create a current script log file handler = = = =
|
||||||
|
logfile_path = os.path.join(gc["task_dir"],
|
||||||
|
taskname, task_run, "modelsim_run.log")
|
||||||
|
logfilefh = logging.FileHandler(logfile_path, "w")
|
||||||
|
logfilefh.setFormatter(logging.Formatter(FILE_LOG_FORMAT))
|
||||||
|
logger.addHandler(logfilefh)
|
||||||
|
logger.info("Created log file at %s" % logfile_path)
|
||||||
|
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
||||||
|
|
||||||
|
# = = = = Read Task log file and extract run directory = = =
|
||||||
|
logfile = os.path.join(gc["task_dir"], taskname, task_run, "*_out.log")
|
||||||
logfiles = glob.glob(logfile)
|
logfiles = glob.glob(logfile)
|
||||||
if not len(logfiles):
|
if not len(logfiles):
|
||||||
clean_up_and_exit("No successful run found in [%s]" % temp_dir)
|
clean_up_and_exit("No successful run found in [%s]" % temp_dir)
|
||||||
|
@ -108,7 +127,7 @@ def main():
|
||||||
if os.path.isfile(INIfile):
|
if os.path.isfile(INIfile):
|
||||||
task_ini_files.append(INIfile)
|
task_ini_files.append(INIfile)
|
||||||
logger.info(f"Found {len(task_ini_files)} INI files")
|
logger.info(f"Found {len(task_ini_files)} INI files")
|
||||||
run_modelsim(task_ini_files)
|
create_tcl_script(task_ini_files)
|
||||||
|
|
||||||
|
|
||||||
def clean_up_and_exit(msg):
|
def clean_up_and_exit(msg):
|
||||||
|
@ -117,7 +136,8 @@ def clean_up_and_exit(msg):
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
def run_modelsim(files):
|
def create_tcl_script(files):
|
||||||
|
runsim_files = []
|
||||||
for eachFile in files:
|
for eachFile in files:
|
||||||
eachFile = os.path.abspath(eachFile)
|
eachFile = os.path.abspath(eachFile)
|
||||||
pDir = os.path.dirname(eachFile)
|
pDir = os.path.dirname(eachFile)
|
||||||
|
@ -173,42 +193,78 @@ def run_modelsim(files):
|
||||||
with open(proc_filename, 'w', encoding='utf-8') as tclout:
|
with open(proc_filename, 'w', encoding='utf-8') as tclout:
|
||||||
tclout.write(open(args.modelsim_proc_tmpl,
|
tclout.write(open(args.modelsim_proc_tmpl,
|
||||||
encoding='utf-8').read())
|
encoding='utf-8').read())
|
||||||
|
runsim_files.append({
|
||||||
# Execute modelsim
|
"modelsim_run_dir": args.modelsim_run_dir,
|
||||||
if args.run_sim:
|
"runsim_filename": runsim_filename,
|
||||||
os.chdir(args.modelsim_run_dir)
|
"status" :False,
|
||||||
modelsim_run_cmd = ["vsim", "-c", "-do", runsim_filename]
|
"finished" : True
|
||||||
out = run_command("ModelSim Run", "modelsim_run.log",
|
})
|
||||||
modelsim_run_cmd)
|
# Execute modelsim
|
||||||
logger.info(re.findall(r"(.*Errors.*Warning.*)", out))
|
if args.run_sim:
|
||||||
else:
|
thread_sema = threading.Semaphore(args.maxthreads)
|
||||||
logger.info("Created runsim and proc files")
|
logger.info("Launching %d parallel threads" % args.maxthreads)
|
||||||
logger.info(f"runsim_filename {runsim_filename}")
|
thread_list = []
|
||||||
logger.info(f"proc_filename {proc_filename}")
|
for thread_no, eachjob in enumerate(runsim_files):
|
||||||
|
t = threading.Thread(target=run_modelsim_thread,
|
||||||
|
name=f"Thread_{thread_no:d}",
|
||||||
|
args=(thread_sema, eachjob, runsim_files))
|
||||||
|
t.start()
|
||||||
|
thread_list.append(t)
|
||||||
|
for eachthread in thread_list:
|
||||||
|
eachthread.join()
|
||||||
|
exit()
|
||||||
|
else:
|
||||||
|
logger.info("Created runsim and proc files")
|
||||||
|
logger.info(f"runsim_filename {runsim_filename}")
|
||||||
|
logger.info(f"proc_filename {proc_filename}")
|
||||||
|
from pprint import pprint
|
||||||
|
pprint(runsim_files)
|
||||||
|
|
||||||
|
|
||||||
def run_command(taskname, logfile, command, exit_if_fail=True):
|
def run_modelsim_thread(s, eachJob, job_list):
|
||||||
# os.chdir(os.pardir)
|
os.chdir(eachJob["modelsim_run_dir"])
|
||||||
logger.info("Launching %s " % taskname)
|
with s:
|
||||||
with open(logfile, 'w+') as output:
|
thread_name = threading.currentThread().getName()
|
||||||
|
eachJob["starttime"] = time.time()
|
||||||
try:
|
try:
|
||||||
output.write(os.getcwd() + "\n")
|
logfile = "%s_modelsim.log" % thread_name
|
||||||
output.write(" ".join(command)+"\n")
|
with open(logfile, 'w+') as output:
|
||||||
process = subprocess.run(command,
|
output.write("* "*20 + '\n')
|
||||||
check=True,
|
output.write("RunDirectory : %s\n" % os.getcwd())
|
||||||
stdout=subprocess.PIPE,
|
command = ["vsim", "-c", "-do", eachJob["runsim_filename"]]
|
||||||
stderr=subprocess.PIPE,
|
output.write(" ".join(command) + '\n')
|
||||||
universal_newlines=True)
|
output.write("* "*20 + '\n')
|
||||||
output.write(process.stdout)
|
logger.info("Running modelsim with [%s]" % " ".join(command))
|
||||||
if process.returncode:
|
process = subprocess.Popen(command,
|
||||||
logger.error("%s run failed with returncode %d" %
|
stdout=subprocess.PIPE,
|
||||||
(taskname, process.returncode))
|
stderr=subprocess.STDOUT,
|
||||||
except (Exception, subprocess.CalledProcessError) as e:
|
universal_newlines=True)
|
||||||
logger.exception("failed to execute %s" % taskname)
|
for line in process.stdout:
|
||||||
return None
|
if "Errors" in line:
|
||||||
logger.info("%s is written in file %s" % (taskname, logfile))
|
logger.debug(line.strip())
|
||||||
return process.stdout
|
sys.stdout.buffer.flush()
|
||||||
|
output.write(line)
|
||||||
|
process.wait()
|
||||||
|
if process.returncode:
|
||||||
|
raise subprocess.CalledProcessError(0, " ".join(command))
|
||||||
|
eachJob["status"] = True
|
||||||
|
except:
|
||||||
|
logger.exception("Failed to execute openfpga flow - " +
|
||||||
|
eachJob["name"])
|
||||||
|
if not args.continue_on_fail:
|
||||||
|
os._exit(1)
|
||||||
|
eachJob["endtime"] = time.time()
|
||||||
|
timediff = timedelta(seconds=(eachJob["endtime"]-eachJob["starttime"]))
|
||||||
|
timestr = humanize.naturaldelta(timediff) if "humanize" in sys.modules \
|
||||||
|
else str(timediff)
|
||||||
|
logger.info("%s Finished with returncode %d, Time Taken %s " %
|
||||||
|
(thread_name, process.returncode, timestr))
|
||||||
|
eachJob["finished"] = True
|
||||||
|
no_of_finished_job = sum([not eachJ["finished"] for eachJ in job_list])
|
||||||
|
logger.info("***** %d runs pending *****" % (no_of_finished_job))
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
if args.debug:
|
||||||
|
logger.info("Setting loggger in debug mode")
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
main()
|
main()
|
||||||
|
|
Loading…
Reference in New Issue