Fixed duplicated columns in csv file bug
This commit is contained in:
parent
4cfc74be55
commit
669b1c7f3e
|
@ -381,8 +381,8 @@ def generate_each_task_actions(taskname):
|
|||
# architecture, benchmark and parameters
|
||||
# Create run_job object [arch, bench, run_dir, commnad]
|
||||
flow_run_cmd_list = []
|
||||
for indx, arch in enumerate(archfile_list):
|
||||
for bench in benchmark_list:
|
||||
for bench in benchmark_list:
|
||||
for indx, arch in enumerate(archfile_list):
|
||||
for lbl, param in bench["script_params"].items():
|
||||
if benchmark_top_module_count.count(bench["top_module"]) > 1:
|
||||
flow_run_dir = get_flow_rundir(
|
||||
|
@ -581,6 +581,9 @@ def run_actions(job_list):
|
|||
|
||||
|
||||
def collect_results(job_run_list):
|
||||
'''
|
||||
Collect performance numbers from vpr_stat.result file
|
||||
'''
|
||||
task_result = []
|
||||
for run in job_run_list:
|
||||
if not run["status"]:
|
||||
|
@ -588,25 +591,31 @@ def collect_results(job_run_list):
|
|||
continue
|
||||
# Check if any result file exist
|
||||
if not glob.glob(os.path.join(run["run_dir"], "*.result")):
|
||||
logger.info("No result files found for %s" % run["name"])
|
||||
logger.info("No result files found for %s", run["name"])
|
||||
continue
|
||||
|
||||
# Read and merge result file
|
||||
vpr_res = ConfigParser(allow_no_value=True, interpolation=ExtendedInterpolation())
|
||||
vpr_res.read_file(open(os.path.join(run["run_dir"], "vpr_stat.result")))
|
||||
vpr_result_file = os.path.join(run["run_dir"], "vpr_stat.result")
|
||||
vpr_res.read_file(open(vpr_result_file, encoding="UTF-8"))
|
||||
result = OrderedDict()
|
||||
result["name"] = run["name"]
|
||||
result["TotalRunTime"] = int(run["endtime"] - run["starttime"])
|
||||
result.update(vpr_res["RESULTS"])
|
||||
task_result.append(result)
|
||||
colnames = []
|
||||
for eachLbl in task_result:
|
||||
colnames.extend(eachLbl.keys())
|
||||
if len(task_result):
|
||||
with open("task_result.csv", "w", newline="") as csvfile:
|
||||
writer = csv.DictWriter(csvfile, extrasaction="ignore", fieldnames=list(colnames))
|
||||
|
||||
colnames = []
|
||||
# Extract all column names
|
||||
for each_metric in task_result:
|
||||
colnames.extend(set(each_metric.keys())-{"name", "TotalRunTime"})
|
||||
colnames = sorted(list(set(colnames)))
|
||||
if len(task_result) > 0:
|
||||
with open("task_result.csv", "w", encoding="UTF-8", newline="") as csvfile:
|
||||
writer = csv.DictWriter(csvfile, extrasaction="ignore",
|
||||
fieldnames=["name", "TotalRunTime"] + colnames)
|
||||
writer.writeheader()
|
||||
for eachResult in task_result:
|
||||
writer.writerow(eachResult)
|
||||
for each in task_result:
|
||||
writer.writerow(each)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
Loading…
Reference in New Issue