pin constraint support

This commit is contained in:
Tarachand Pagarani 2022-01-25 10:06:19 -08:00
parent 7f07f9167c
commit d862ea2930
8 changed files with 1020 additions and 0 deletions

View File

@ -0,0 +1,2 @@
vpr ${VPR_ARCH_FILE} ${VPR_TESTBENCH_BLIF} --clock_modeling ideal --device ${OPENFPGA_VPR_DEVICE_LAYOUT} --absorb_buffer_luts off --pack

View File

@ -0,0 +1,189 @@
#!/usr/bin/env python3
"""
Convert a PCF file into a VPR io.place file.
"""
import argparse
import csv
import sys
import re
from collections import defaultdict
import vpr_io_place
from pinmap_parse import read_pinmapfile_data
from pinmap_parse import vec_to_scalar
from lib.parse_pcf import parse_simple_pcf
# =============================================================================
BLOCK_INSTANCE_RE = re.compile(r"^(?P<name>\S+)\[(?P<index>[0-9]+)\]$")
# =============================================================================
def gen_io_def(args):
'''
Generate io.place file from pcf file
'''
io_place = vpr_io_place.IoPlace()
io_place.read_io_list_from_eblif(args.blif)
io_place.load_block_names_from_net_file(args.net)
# Load all the necessary data from the pinmap_xml
io_cells, port_map = read_pinmapfile_data(args.pinmap_xml)
# Map of pad names to VPR locations.
pad_map = defaultdict(lambda: dict())
with open(args.csv_file, mode='r') as csv_fp:
reader = csv.DictReader(csv_fp)
for line in reader:
port_name_list = vec_to_scalar(line['port_name'])
pin_name = vec_to_scalar(line['mapped_pin'])
gpio_type = line['GPIO_type']
if len(port_name_list) != len(pin_name):
print(
'CSV port name "{}" length does not match with mapped pin name "{}" length'
.format(line['port_name'], line['mapped_pin']),
file=sys.stderr
)
sys.exit(1)
for port, pin in zip(port_name_list, pin_name):
if port in port_map:
curr_map = port_map[port]
if gpio_type is None or gpio_type == '':
pad_map[pin] = (
int(curr_map.x), int(curr_map.y), int(curr_map.z)
)
else:
gpio_pin = pin + ":" + gpio_type.strip()
pad_map[gpio_pin] = (
int(curr_map.x), int(curr_map.y), int(curr_map.z)
)
else:
print(
'Port name "{}" specified in csv file "{}" is invalid. {} "{}"'
.format(
line['port_name'], args.csv_file,
"Specify from port names in xml file",
args.pinmap_xml
),
file=sys.stderr
)
sys.exit(1)
for pcf_constraint in parse_simple_pcf(args.pcf):
if (type(pcf_constraint).__name__ == 'PcfIoConstraint'):
pad_name = pcf_constraint.pad
if not io_place.is_net(pcf_constraint.net):
print(
'PCF constraint "{}" from line {} constraints net {} {}:\n{}'
.format(
pcf_constraint.line_str, pcf_constraint.line_num,
pcf_constraint.net, '\n'.join(io_place.get_nets()),
"which is not in available netlist"
),
file=sys.stderr
)
sys.exit(1)
if pad_name not in pad_map:
print(
'PCF constraint "{}" from line {} constraints pad {} {}:\n{}'
.format(
pcf_constraint.line_str, pcf_constraint.line_num,
pad_name, '\n'.join(sorted(pad_map.keys())),
"which is not in available pad map"
),
file=sys.stderr
)
sys.exit(1)
# Get the top-level block instance, strip its index
inst = io_place.get_top_level_block_instance_for_net(
pcf_constraint.net
)
if inst is None:
continue
match = BLOCK_INSTANCE_RE.match(inst)
assert match is not None, inst
inst = match.group("name")
# Constraint the net (block)
locs = pad_map[pad_name]
io_place.constrain_net(
net_name=pcf_constraint.net,
loc=locs,
comment=pcf_constraint.line_str
)
if io_place.constraints:
io_place.output_io_place(args.output)
# =============================================================================
def main():
'''
Convert a PCF file into a VPR io.place file
'''
parser = argparse.ArgumentParser(
description='Convert a PCF file into a VPR io.place file.'
)
parser.add_argument(
"--pcf",
"-p",
"-P",
type=argparse.FileType('r'),
required=True,
help='PCF input file'
)
parser.add_argument(
"--blif",
"-b",
type=argparse.FileType('r'),
required=True,
help='BLIF / eBLIF file'
)
parser.add_argument(
"--output",
"-o",
"-O",
type=argparse.FileType('w'),
default=sys.stdout,
help='The output io.place file'
)
parser.add_argument(
"--net",
"-n",
type=argparse.FileType('r'),
required=True,
help='top.net file'
)
parser.add_argument(
"--pinmap_xml",
type=str,
required=True,
help="Input pin-mapping xml file"
)
parser.add_argument(
"--csv_file",
type=str,
required=True,
help="Input user-defined pinmap CSV file"
)
args = parser.parse_args()
gen_io_def(args)
# =============================================================================
if __name__ == '__main__':
main()

77
openfpga_flow/scripts/eblif.py Executable file
View File

@ -0,0 +1,77 @@
# Top level keywords defining the begin of a cell definition.
top_level = [
"model",
"inputs",
"outputs",
"names",
"latch",
"subckt",
]
# Keywords defining cell attributes / parameters. Those can be specified for
# each cell multiple times. Parameter names and values are stored in a dict
# under the parsed blif data.
#
# For example: the construct ".param MODE SYNC" will add to the dict under
# the key "param" entry "MODE":"SYNC".
#
sub_level = [
"attr",
"param",
]
def parse_blif(f):
current = None
data = {}
def add(d):
if d['type'] not in data:
data[d['type']] = []
data[d['type']].append(d)
current = None
for oline in f:
line = oline
if '#' in line:
line = line[:line.find('#')]
line = line.strip()
if not line:
continue
if line.startswith("."):
args = line.split(" ", maxsplit=1)
if len(args) < 2:
args.append("")
ctype = args.pop(0)
assert ctype.startswith("."), ctype
ctype = ctype[1:]
if ctype in top_level:
if current:
add(current)
current = {
'type': ctype,
'args': args[-1].split(),
'data': [],
}
elif ctype in sub_level:
if ctype not in current:
current[ctype] = {}
key, value = args[-1].split(maxsplit=1)
current[ctype][key] = value
else:
current[ctype] = args[-1].split()
continue
current['data'].append(line.strip().split())
if current:
add(current)
assert len(data['inputs']) == 1
data['inputs'] = data['inputs'][0]
assert len(data['outputs']) == 1
data['outputs'] = data['outputs'][0]
return data

View File

@ -0,0 +1,44 @@
"""
Supported PCF commands:
* set_io <net> <pad> - constrain a given <net> to a given physical <pad> in eFPGA pinout.
* set_clk <pin> <net> - constrain a given global clock <pin> to a given <net>
Every tile where <net> is present will be constrained to use a given global clock.
"""
from collections import namedtuple
import re
PcfIoConstraint = namedtuple('PcfIoConstraint', 'net pad line_str line_num')
PcfClkConstraint = namedtuple('PcfClkConstraint', 'pin net')
def parse_simple_pcf(f):
""" Parse a simple PCF file object and yield PcfIoConstraint objects. """
for line_number, line in enumerate(f):
line_number += 1
# Remove comments.
args = re.sub(r"#.*", "", line.strip()).split()
if not args:
continue
# Ignore arguments.
args = [arg for arg in args if arg[0] != '-']
assert len(args) == 3, args
if args[0] == 'set_io':
yield PcfIoConstraint(
net=args[1],
pad=args[2],
line_str=line.strip(),
line_num=line_number,
)
if args[0] == 'set_clk':
yield PcfClkConstraint(
pin=args[1],
net=args[2],
)

View File

@ -0,0 +1,364 @@
#!/usr/bin/env python3
"""
This script parses given interface pin-mapping xml file, stores the pin-mapping
information w.r.t. its location in the device. It also generates a template
csv file for the end-user of the eFPGA device. User can modify the template
csv file and specify the user-defined pin names to the ports defined in the
template csv file.
"""
import argparse
import csv
import sys
from collections import namedtuple
import lxml.etree as ET
# =============================================================================
class PinMappingData(object):
"""
Pin mapping data for IO ports in an eFPGA device.
port_name - IO port name
mapped_pin - User-defined pin name mapped to the given port_name
x - x coordinate corresponding to column number
y - y coordinate corresponding to row number
z - z coordinate corresponding to pin index at current x,y location
"""
def __init__(self, port_name, mapped_pin, x, y, z):
self.port_name = port_name
self.mapped_pin = mapped_pin
self.x = x
self.y = y
self.z = z
def __str__(self):
return "{Port_name: '%s' mapped_pin: '%s' x: '%s' y: '%s' z: '%s'}" % (
self.port_name, self.mapped_pin, self.x, self.y, self.z
)
def __repr__(self):
return "{Port_name: '%s' mapped_pin: '%s' x: '%s' y: '%s' z: '%s'}" % (
self.port_name, self.mapped_pin, self.x, self.y, self.z
)
"""
Device properties present in the pin-mapping xml
name - Device name
family - Device family name
width - Device width aka number of cells in a row
heigth - Device height aka number of cells in a column
z - Number of cells per row/col
"""
DeviceData = namedtuple("DeviceData", "name family width height z")
# =============================================================================
def parse_io(xml_io, port_map, orientation, width, height, z):
'''
Parses IO section of xml file
'''
assert xml_io is not None
pins = {}
io_row = ""
io_col = ""
if orientation in ("TOP", "BOTTOM"):
io_row = xml_io.get("y")
if io_row is None:
if orientation == "TOP":
io_row = str(int(height) - 1)
elif orientation == "BOTTOM":
io_row = "0"
elif orientation in ("LEFT", "RIGHT"):
io_col = xml_io.get("x")
if io_col is None:
if orientation == "LEFT":
io_col = "0"
elif orientation == "RIGHT":
io_col = str(int(width) - 1)
for xml_cell in xml_io.findall("CELL"):
port_name = xml_cell.get("port_name")
mapped_name = xml_cell.get("mapped_name")
startx = xml_cell.get("startx")
starty = xml_cell.get("starty")
endx = xml_cell.get("endx")
endy = xml_cell.get("endy")
# define properties for scalar pins
scalar_mapped_pins = vec_to_scalar(mapped_name)
i = 0
if startx is not None and endx is not None:
curr_startx = int(startx)
curr_endx = int(endx)
y = io_row
if curr_startx < curr_endx:
for x in range(curr_startx, curr_endx + 1):
for j in range(0, int(z)):
pins[x, y, j] = PinMappingData(
port_name=port_name,
mapped_pin=scalar_mapped_pins[i],
x=x,
y=y,
z=j
)
port_map[scalar_mapped_pins[i]] = pins[x, y, j]
i += 1
else:
for x in range(curr_startx, curr_endx - 1, -1):
for j in range(0, int(z)):
pins[x, y, j] = PinMappingData(
port_name=port_name,
mapped_pin=scalar_mapped_pins[i],
x=x,
y=y,
z=j
)
port_map[scalar_mapped_pins[i]] = pins[x, y, j]
i += 1
elif starty is not None and endy is not None:
curr_starty = int(starty)
curr_endy = int(endy)
x = io_col
if curr_starty < curr_endy:
for y in range(curr_starty, curr_endy + 1):
for j in range(0, int(z)):
pins[x, y, j] = PinMappingData(
port_name=port_name,
mapped_pin=scalar_mapped_pins[i],
x=x,
y=y,
z=j
)
port_map[scalar_mapped_pins[i]] = pins[x, y, j]
i += 1
else:
for y in range(curr_starty, curr_endy - 1, -1):
for j in range(0, int(z)):
pins[x, y, j] = PinMappingData(
port_name=port_name,
mapped_pin=scalar_mapped_pins[i],
x=x,
y=y,
z=j
)
port_map[scalar_mapped_pins[i]] = pins[x, y, j]
i += 1
return pins, port_map
# =============================================================================
def vec_to_scalar(port_name):
'''
Converts given bus port into its scalar ports
'''
scalar_ports = []
if port_name is not None and ':' in port_name:
open_brace = port_name.find('[')
close_brace = port_name.find(']')
if open_brace == -1 or close_brace == -1:
print(
'Invalid portname "{}" specified. Bus ports should contain [ ] to specify range'
.format(port_name),
file=sys.stderr
)
sys.exit(1)
bus = port_name[open_brace + 1:close_brace]
lsb = int(bus[:bus.find(':')])
msb = int(bus[bus.find(':') + 1:])
if lsb > msb:
for i in range(lsb, msb - 1, -1):
curr_port_name = port_name[:open_brace] + '[' + str(i) + ']'
scalar_ports.append(curr_port_name)
else:
for i in range(lsb, msb + 1):
curr_port_name = port_name[:open_brace] + '[' + str(i) + ']'
scalar_ports.append(curr_port_name)
else:
scalar_ports.append(port_name)
return scalar_ports
# =============================================================================
def parse_io_cells(xml_root):
"""
Parses the "IO" section of the pinmapfile. Returns a dict indexed by IO cell
names which contains cell types and their locations in the device grid.
"""
cells = {}
port_map = {}
width = xml_root.get("width"),
height = xml_root.get("height"),
io_per_cell = xml_root.get("z")
# Get the "IO" section
xml_io = xml_root.find("IO")
if xml_io is None:
print("ERROR: No mandatory 'IO' section defined in 'DEVICE' section")
sys.exit(1)
xml_top_io = xml_io.find("TOP_IO")
if xml_top_io is not None:
currcells, port_map = parse_io(
xml_top_io, port_map, "TOP", width, height, io_per_cell
)
cells["TOP"] = currcells
xml_bottom_io = xml_io.find("BOTTOM_IO")
if xml_bottom_io is not None:
currcells, port_map = parse_io(
xml_bottom_io, port_map, "BOTTOM", width, height, io_per_cell
)
cells["BOTTOM"] = currcells
xml_left_io = xml_io.find("LEFT_IO")
if xml_left_io is not None:
currcells, port_map = parse_io(
xml_left_io, port_map, "LEFT", width, height, io_per_cell
)
cells["LEFT"] = currcells
xml_right_io = xml_io.find("RIGHT_IO")
if xml_right_io is not None:
currcells, port_map = parse_io(
xml_right_io, port_map, "RIGHT", width, height, io_per_cell
)
cells["RIGHT"] = currcells
return cells, port_map
# ============================================================================
def read_pinmapfile_data(pinmapfile):
"""
Loads and parses a pinmap file
"""
# Read and parse the XML archfile
parser = ET.XMLParser(resolve_entities=False, strip_cdata=False)
xml_tree = ET.parse(pinmapfile, parser)
xml_root = xml_tree.getroot()
if xml_root.get("name") is None:
print(
"ERROR: No mandatory attribute 'name' specified in 'DEVICE' section"
)
sys.exit(1)
if xml_root.get("family") is None:
print(
"ERROR: No mandatory attribute 'family' specified in 'DEVICE' section"
)
sys.exit(1)
if xml_root.get("width") is None:
print(
"ERROR: No mandatory attribute 'width' specified in 'DEVICE' section"
)
sys.exit(1)
if xml_root.get("height") is None:
print(
"ERROR: No mandatory attribute 'height' specified in 'DEVICE' section"
)
sys.exit(1)
if xml_root.get("z") is None:
print(
"ERROR: No mandatory attribute 'z' specified in 'DEVICE' section"
)
sys.exit(1)
# Parse IO cells
io_cells, port_map = parse_io_cells(xml_root)
return io_cells, port_map
# =============================================================================
def generate_pinmap_csv(pinmap_csv_file, io_cells):
'''
Generates pinmap csv file
'''
with open(pinmap_csv_file, "w", newline='') as csvfile:
fieldnames = [
'orientation', 'row', 'col', 'pin_num_in_cell', 'port_name',
'mapped_pin', 'GPIO_type', 'Associated Clock', 'Clock Edge'
]
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for orientation, pin_map in io_cells.items():
for pin_loc, pin_obj in pin_map.items():
writer.writerow(
{
'orientation': orientation,
'row': str(pin_obj.y),
'col': str(pin_obj.x),
'pin_num_in_cell': str(pin_obj.z),
'port_name': pin_obj.mapped_pin,
}
)
# =============================================================================
def main():
'''
Processes interface mapping xml file and generates template csv file
'''
# Parse arguments
parser = argparse.ArgumentParser(
description='Process interface mapping xml file to generate csv file.'
)
parser.add_argument(
"--pinmapfile",
"-p",
"-P",
type=str,
required=True,
help="Input pin-mapping XML file"
)
parser.add_argument(
"--csv_file",
"-c",
"-C",
type=str,
default="template_pinmap.csv",
help="Output template pinmap CSV file"
)
args = parser.parse_args()
# Load all the necessary data from the pinmapfile
io_cells, port_map = read_pinmapfile_data(args.pinmapfile)
# Generate the pinmap CSV
generate_pinmap_csv(args.csv_file, io_cells)
if __name__ == "__main__":
main()

View File

@ -89,10 +89,21 @@ parser.add_argument('--openfpga_shell_template', type=str,
"openfpga_shell_scripts",
"example_script.openfpga"),
help="Sample openfpga shell script")
parser.add_argument('--openfpga_vpr_packer_template', type=str,
default=os.path.join(openfpga_base_dir, "openfpga_flow",
"openfpga_shell_scripts",
"run_vpr_packer.openfpga"),
help="Vpr packer shell script")
parser.add_argument('--openfpga_arch_file', type=str,
help="Openfpga architecture file for shell")
parser.add_argument('--arch_variable_file', type=str, default=None,
help="Openfpga architecture file for shell")
parser.add_argument('--pinmap_xml_file', type=str,
default=None,
help="pinmap xml file")
parser.add_argument('--pinmap_csv_file', type=str,
default=None,
help="pinmap csv file")
# parser.add_argument('--openfpga_sim_setting_file', type=str,
# help="Openfpga simulation file for shell")
# parser.add_argument('--external_fabric_key_file', type=str,
@ -115,6 +126,10 @@ parser.add_argument('--base_verilog', type=str,
help="Original Verilog file to run verification in " +
"blif_VPR flow")
#VPR IO placement constraint file
parser.add_argument('--pcf_file', type=str,
help="IO placement constraint file used while running vpr flow")
# ACE2 and power estimation related arguments
parser.add_argument('--K', type=int,
help="LUT Size, if not specified extracted from arch file")
@ -273,6 +288,12 @@ def main():
if (args.fpga_flow == "vpr_blif"):
collect_files_for_vpr()
logger.info("Running OpenFPGA Shell Engine ")
if (args.pcf_file):
run_openfpga_vpr_packer()
run_io_placement()
if (args.fpga_flow == "yosys"):
run_yosys_with_abc()
if not (args.fpga_flow == "yosys"):
@ -415,6 +436,9 @@ def validate_command_line_arguments():
args.activity_file = os.path.abspath(args.activity_file)
if args.base_verilog:
args.base_verilog = os.path.abspath(args.base_verilog)
if args.pcf_file:
args.pcf_file = os.path.abspath(args.pcf_file)
logger.info("pcf file %s", args.pcf_file)
def prepare_run_directory(run_dir):
@ -460,6 +484,17 @@ def prepare_run_directory(run_dir):
shutil.copy(args.openfpga_shell_template,
args.top_module+"_template.openfpga")
if (args.openfpga_vpr_packer_template):
args.openfpga_vpr_packer_template = os.path.expandvars(args.openfpga_vpr_packer_template)
if not os.path.isfile(args.openfpga_vpr_packer_template or ""):
logger.error("Openfpga shell file - %s" %
args.openfpga_vpr_packer_template)
clean_up_and_exit("Provided openfpga_vpr_packer_template" +
f" {args.openfpga_vpr_packer_template} file not found")
else:
shutil.copy(args.openfpga_vpr_packer_template,
args.top_module+"_vpr_packer.openfpga")
# Create benchmark dir in run_dir and copy flattern architecture file
os.mkdir("benchmark")
try:
@ -708,6 +743,31 @@ def run_pro_blif_3arg():
logger.info("blif_3args output is written in file %s" % filename)
def run_io_placement():
command = [
"--pcf", args.pcf_file,
"--blif", os.path.join(args.run_dir, args.top_module + ".blif"),
"--net", os.path.join(args.run_dir, args.top_module+".net"),
"--pinmap_xml", args.pinmap_xml_file,
"--csv_file", args.pinmap_csv_file,
"-o", os.path.join(args.run_dir, args.top_module+"_io.place"),
]
try:
#logger.info("run create_ioplace %s %s", os.path.join(flow_script_dir,'create_ioplace.py'), command)
if not os.path.isfile( os.path.join(flow_script_dir,'create_ioplace.py')):
clean_up_and_exit("Not able to locate io placement file " + os.path.join(flow_script_dir,'create_ioplace.py'))
process = subprocess.run(["python3", os.path.join(flow_script_dir,'create_ioplace.py')] +
command,
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
except:
logger.exception("Failed to run create_ioplace")
clean_up_and_exit("")
def collect_files_for_vpr():
# Sanitize provided Benchmark option
if len(args.benchmark_files) > 1:
@ -729,6 +789,29 @@ def collect_files_for_vpr():
shutil.copy(args.base_verilog, args.top_module+"_output_verilog.v")
def run_openfpga_vpr_packer():
tmpl = Template(open(args.top_module+"_vpr_packer.openfpga",
encoding='utf-8').read())
path_variables = script_env_vars["PATH"]
path_variables["VPR_ARCH_FILE"] = args.arch_file
path_variables["OPENFPGA_ARCH_FILE"] = args.openfpga_arch_file
path_variables["VPR_TESTBENCH_BLIF"] = args.top_module+".blif"
path_variables["ACTIVITY_FILE"] = args.top_module+"_ace_out.act"
path_variables["REFERENCE_VERILOG_TESTBENCH"] = args.top_module + \
"_output_verilog.v"
for indx in range(0, len(OpenFPGAArgs), 2):
tmpVar = OpenFPGAArgs[indx][2:].upper()
path_variables[tmpVar] = OpenFPGAArgs[indx+1]
with open(args.top_module+"_run_vpr_packer.openfpga", 'w', encoding='utf-8') as archfile:
archfile.write(tmpl.safe_substitute(path_variables))
command = [cad_tools["openfpga_shell_path"], "-batch", "-f",
args.top_module+"_run_vpr_packer.openfpga"]
run_command("OpenFPGA Shell Run", "openfpgashell_vpr_packer.log", command)
def run_openfpga_shell():
ExecTime["VPRStart"] = time.time()
# bench_blif, fixed_chan_width, logfile, route_only=False
@ -739,6 +822,9 @@ def run_openfpga_shell():
path_variables["VPR_ARCH_FILE"] = args.arch_file
path_variables["OPENFPGA_ARCH_FILE"] = args.openfpga_arch_file
path_variables["VPR_TESTBENCH_BLIF"] = args.top_module+".blif"
path_variables["VPR_IO_PLACE"] = "free"
if (args.pcf_file):
path_variables["VPR_IO_PLACE"] = args.top_module+"_io.place"
path_variables["ACTIVITY_FILE"] = args.top_module+"_ace_out.act"
path_variables["REFERENCE_VERILOG_TESTBENCH"] = args.top_module + \
"_output_verilog.v"

View File

@ -305,6 +305,7 @@ def generate_each_task_actions(taskname):
CurrBenchPara["verilog_file"] = SynthSection.get(
bech_name+"_verilog")
CurrBenchPara["pcf_file"] = SynthSection.get(bech_name+"_pcf")
# Add script parameter list in current benchmark
ScriptSections = [x for x in TaskFileSections if "SCRIPT_PARAM" in x]
script_para_list = {}
@ -417,6 +418,9 @@ def create_run_command(curr_job_dir, archfile, benchmark_obj, param, task_conf):
if benchmark_obj.get("activity_file"):
command += ["--activity_file", benchmark_obj.get("activity_file")]
if benchmark_obj.get("pcf_file"):
command += ["--pcf_file", benchmark_obj.get("pcf_file")]
if benchmark_obj.get("verilog_file"):
command += ["--base_verilog", benchmark_obj.get("verilog_file")]

View File

@ -0,0 +1,254 @@
from __future__ import print_function
from collections import OrderedDict, namedtuple
import itertools
import re
import eblif
import lxml.etree as ET
IoConstraint = namedtuple('IoConstraint', 'name x y z comment')
HEADER_TEMPLATE = """\
#{name:<{nl}} x y z pcf_line
#{s:-^{nl}} -- -- - ----"""
CONSTRAINT_TEMPLATE = '{name:<{nl}} {x: 3} {y: 3} {z: 2} # {comment}'
INOUT_REGEX = re.compile(r"^(.+)(_\$inp|_\$out)(.*)$")
NETNAME_REGEX = re.compile(r"(.+?)(\[[0-9]+\]$|$)")
class IoPlace(object):
def __init__(self):
self.constraints = OrderedDict()
self.inputs = set()
self.outputs = set()
self.net_to_block = None
self.net_map = {}
self.block_to_inst = {}
self.inout_nets = set()
self.net_to_pad = set()
self.net_file_io = set()
def read_io_loc_pairs(self, blif):
"""
Read IO_LOC_PAIRS parameters from eblif carrying the information
which package pin a specified top port is constrained, e.g. O_LOC_PAIRS = "portA:D1"
In case of differential inputs/outputs there are two pairs of the parameter,
i.e. IO_LOC_PAIRS = "portA_p:D2,portA_n:D4"
"""
if 'subckt' not in blif:
return
for attr in blif['subckt']:
if 'param' not in attr:
continue
if 'IO_LOC_PAIRS' in attr['param']:
locs = attr['param']['IO_LOC_PAIRS'][1:-1].split(',')
if 'NONE' in locs:
continue
for loc in locs:
net, pad = loc.split(':')
self.net_to_pad.add((net, pad))
def read_io_list_from_eblif(self, eblif_file):
blif = eblif.parse_blif(eblif_file)
self.inputs = set(blif['inputs']['args'])
self.outputs = set(blif['outputs']['args'])
# Build a net name map that maps products of an inout port split into
# their formet name.
#
# For example, an inout port 'A' is split into 'A_$inp' and 'A_$out',
# port B[2] into 'B_$inp[2]' and 'B_$out[2]'.
self.net_map = {}
self.inout_nets = set()
for net in itertools.chain(self.inputs, self.outputs):
match = INOUT_REGEX.match(net)
if match:
alias = match.group(1) + match.group(3)
self.inout_nets.add(alias)
self.net_map[net] = alias
else:
self.net_map[net] = net
self.read_io_loc_pairs(blif)
def load_block_names_from_net_file(self, net_file):
"""
.place files expect top-level block (cluster) names, not net names, so
build a mapping from net names to block names from the .net file.
"""
net_xml = ET.parse(net_file)
net_root = net_xml.getroot()
self.net_to_block = {}
for block in net_root.xpath("//block"):
instance = block.attrib["instance"]
if instance != "inpad[0]" and instance != "outpad[0]":
continue
top_block = block.getparent()
assert top_block is not None
while top_block.getparent() is not net_root:
assert top_block is not None
top_block = top_block.getparent()
self.net_to_block[block.get("name")] = top_block.get("name")
# Loop over all top-level blocks. Store block name to its instance
# correspondences.
for block_xml in net_root.findall("block"):
name = block_xml.attrib["name"]
inst = block_xml.attrib["instance"]
assert name not in self.block_to_inst, block_xml.attrib
self.block_to_inst[name] = inst
def load_net_file_ios(self, net_file):
"""
Loads input and outputs net names from the netlist file.
"""
def get_ios(net_root, io_types):
"Get the top level io signals from the netlist XML root."
for io_type in io_types:
io = net_root.xpath("/block/{}/text ()".format(io_type))
if len(io) == 1:
yield io[0]
net_xml = ET.parse(net_file)
net_root = net_xml.getroot()
for io_line in get_ios(net_root, ["inputs", "outputs"]):
io_list = io_line.split(" ")
for io in io_list:
self.net_file_io.add(io.replace("out:", ""))
def get_top_level_block_instance_for_net(self, net_name):
"""
Returns a name of the top-level block instance for the given net
name.
"""
assert self.is_net(net_name)
# VPR prefixes output constraints with "out:"
if net_name in self.outputs:
net_name = 'out:' + net_name
# This is an inout net
if net_name in self.inout_nets:
block_names = set()
for prefix, suffix in zip(["", "out:"], ["_$inp", "_$out"]):
match = NETNAME_REGEX.match(net_name)
name = prefix + match.group(1) + suffix + match.group(2)
block_names.add(self.net_to_block[name])
# Both parts of the net should point to the same block
assert len(block_names) == 1, (net_name, block_names)
return self.block_to_inst[list(block_names)[0]]
# A regular net
else:
if net_name in self.net_to_block:
block_name = self.net_to_block[net_name]
return self.block_to_inst[block_name]
else:
return None
def constrain_net(self, net_name, loc, comment=""):
assert len(loc) == 3
assert net_name not in self.constraints
assert self.is_net(net_name), "net {} not in eblif".format(net_name)
# VPR prefixes output constraints with "out:"
if net_name in self.outputs:
net_name = 'out:' + net_name
# This is an inout net
if net_name in self.inout_nets:
for prefix, suffix in zip(["", "out:"], ["_$inp", "_$out"]):
match = NETNAME_REGEX.match(net_name)
name = prefix + match.group(1) + suffix + match.group(2)
self.constraints[name] = IoConstraint(
name=name,
x=loc[0],
y=loc[1],
z=loc[2],
comment=comment,
)
# A regular net
else:
self.constraints[net_name] = IoConstraint(
name=net_name,
x=loc[0],
y=loc[1],
z=loc[2],
comment=comment,
)
def output_io_place(self, f):
max_name_length = max(len(c.name) for c in self.constraints.values())
print(
HEADER_TEMPLATE.format(
name="Block Name", nl=max_name_length, s=""
),
file=f
)
constrained_blocks = {}
for vpr_net, constraint in self.constraints.items():
name = constraint.name
name = self.net_to_block.get(name) if self.net_to_block else name
# This block is already constrained, check if there is no
# conflict there.
if name in constrained_blocks:
existing = constrained_blocks[name]
if existing.x != constraint.x or\
existing.y != constraint.y or\
existing.z != constraint.z:
print(
"Error: block '{}' has multiple conflicting constraints!"
.format(name)
)
print("", constrained_blocks[name])
print("", constraint)
exit(-1)
# Don't write the second constraing
continue
# omit if no corresponding block name for the net
if name is not None:
print(
CONSTRAINT_TEMPLATE.format(
name=name,
nl=max_name_length,
x=constraint.x,
y=constraint.y,
z=constraint.z,
comment=constraint.comment
),
file=f
)
# Add to constrained block list
constrained_blocks[name] = constraint
def is_net(self, net):
return net in self.net_map.values()
def is_net_packed(self, net):
return net in self.net_file_io
def get_nets(self):
for net in self.inputs:
yield self.net_map[net]
for net in self.outputs:
yield self.net_map[net]