mirror of https://github.com/lnis-uofu/SOFA.git
Added OpenFPGA-Physical submodule
This commit is contained in:
parent
f5ff147ddb
commit
a4d147e491
|
@ -0,0 +1,21 @@
|
|||
#!/bin/bash
|
||||
|
||||
# This section install the shell packages required for the documnetation
|
||||
apt-get install graphviz yosys nodejs parallel
|
||||
|
||||
# This section installs the python pakcages
|
||||
python3 -m venv /buildenv
|
||||
source /buildenv/bin/activate
|
||||
pip install --upgrade pip
|
||||
python3 -m pip install --upgrade --no-cache-dir pip
|
||||
if [ -f "requirements.txt" ]; then
|
||||
python3 -m pip install --upgrade --no-cache-dir -r requirements.txt
|
||||
fi
|
||||
if [ -f "docs/requirements.txt" ]; then
|
||||
python3 -m pip install --upgrade --no-cache-dir -r docs/requirements.txt
|
||||
fi
|
||||
export PYTHONPATH=$PYTHONPATH:${PWD}
|
||||
export PYTHON_EXEC=python3
|
||||
(cd docs && make html)
|
||||
sed -i "/.*docs.*build.*/d" .gitignore
|
||||
cat .gitignore
|
|
@ -0,0 +1,36 @@
|
|||
# Documentation building enviroment
|
||||
FROM ubuntu:20.04
|
||||
LABEL mantainer="gg_documentation_env <ganesh.gore@utah.edu>"
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
ENV APPDIR /app
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
# System dependencies
|
||||
|
||||
RUN apt-get -y update && apt-get -y install \
|
||||
software-properties-common
|
||||
|
||||
RUN add-apt-repository ppa:git-core/ppa
|
||||
RUN apt-get -y update
|
||||
|
||||
RUN apt-get -y install \
|
||||
nano \
|
||||
curl \
|
||||
git \
|
||||
texlive-latex-extra \
|
||||
texlive-science \
|
||||
texlive-pstricks \
|
||||
make \
|
||||
python-openssl \
|
||||
graphviz-dev\
|
||||
python3-venv
|
||||
|
||||
RUN apt-get -y install \
|
||||
libjpeg-dev
|
||||
|
||||
RUN apt-get -y install python3-pip
|
||||
|
||||
# Install Python tools/libs
|
||||
WORKDIR /home/docs
|
||||
CMD ["/bin/bash"]
|
|
@ -0,0 +1,65 @@
|
|||
# Documentation building enviroment
|
||||
FROM ubuntu:20.04
|
||||
LABEL mantainer="gg_verification_env <ganesh.gore@utah.edu>"
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
# System dependencies
|
||||
|
||||
RUN apt-get -y update && apt-get -y install \
|
||||
software-properties-common
|
||||
|
||||
RUN add-apt-repository ppa:git-core/ppa
|
||||
RUN apt-get -y update
|
||||
|
||||
RUN apt-get -y install \
|
||||
build-essential \
|
||||
python-dev \
|
||||
autoconf \
|
||||
flex \
|
||||
ccache \
|
||||
bison \
|
||||
bc \
|
||||
bison \
|
||||
nano \
|
||||
curl \
|
||||
git \
|
||||
make \
|
||||
python-openssl \
|
||||
python3-venv \
|
||||
gcc \
|
||||
g++ \
|
||||
iverilog \
|
||||
make \
|
||||
python \
|
||||
ca-certificates \
|
||||
flex \
|
||||
libxft2 \
|
||||
libfl-dev \
|
||||
libgoogle-perftools-dev \
|
||||
python3-pip \
|
||||
perl && apt-get clean && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY requirements.txt /tmp/requirements.txt
|
||||
RUN python3.8 -m pip install -r /tmp/requirements.txt
|
||||
|
||||
ARG REPO=https://github.com/verilator/verilator
|
||||
ARG SOURCE_COMMIT=master
|
||||
|
||||
|
||||
WORKDIR /tmp
|
||||
RUN git clone "${REPO}" verilator && \
|
||||
cd verilator && \
|
||||
git checkout "${SOURCE_COMMIT}" && \
|
||||
autoconf && \
|
||||
./configure && \
|
||||
make -j "$(nproc)" && \
|
||||
make install && \
|
||||
cd .. && \
|
||||
rm -r verilator
|
||||
|
||||
|
||||
# Install Python tools/libs
|
||||
WORKDIR /home/verification
|
||||
CMD ["/bin/bash"]
|
|
@ -0,0 +1,4 @@
|
|||
#!/bin/bash
|
||||
cd $(git rev-parse --show-cdup)
|
||||
docker build -t gg_documentation_env -f .github/docker/Dockerfile.docs .
|
||||
docker build -t gg_verification_env -f .github/docker/Dockerfile.verification .
|
|
@ -0,0 +1,88 @@
|
|||
name: Documentation
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- ganesh_dev
|
||||
- main
|
||||
|
||||
jobs:
|
||||
build_docs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
|
||||
container:
|
||||
image: ghcr.io/ganeshgore/gg_documentation_env:latest
|
||||
options: >-
|
||||
-v /var/tmp/OpenFPGA-Physical/docs:/var/tmp/OpenFPGA-Physical/docs
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
# ========================================
|
||||
# Build Documentation
|
||||
# ========================================
|
||||
- name: Build Documentation
|
||||
shell: bash
|
||||
id: build
|
||||
run: |
|
||||
cat ./.github/build_docs.sh
|
||||
bash ./.github/build_docs.sh
|
||||
- name: Upload regression results
|
||||
if: ${{ failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: docs_build
|
||||
retention-days: 1
|
||||
path: |
|
||||
docs/**/*
|
||||
- name: Deploy documentation
|
||||
if: ${{ (steps.build.outcome == 'success') }}
|
||||
shell: bash
|
||||
run: |
|
||||
BranchDir=$(basename ${GITHUB_REF})
|
||||
echo "Trigger branch ${BranchDir}"
|
||||
rm -rf /var/tmp/OpenFPGA-Physical/docs/${BranchDir}
|
||||
mkdir -p /var/tmp/OpenFPGA-Physical/docs/${BranchDir}
|
||||
cp -rf docs/_build/html /var/tmp/OpenFPGA-Physical/docs/${BranchDir}/
|
||||
- name: Deploy documentation
|
||||
if: ${{ (steps.build.outcome == 'success') }}
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_branch: docs
|
||||
publish_dir: docs/_build/html
|
||||
destination_dir: docs
|
||||
exclude_assets: "doctrees"
|
||||
- name: Add Head value to HTML
|
||||
if: ${{ steps.build.outcome == 'success' }}
|
||||
shell: bash
|
||||
run: |
|
||||
find docs/_build/html -name "*.html*" -exec sed -i "s/<head>/<head><base href=\"${BRANCH_NAME}\">/" {} \;
|
||||
if [[ ! ${BRANCH_NAME} == "main" ]]; then echo "body{border-top: 20px solid #FF9800;}" > docs/_build/html/_static/custom.css; fi
|
||||
# - name: Create dummy documentation
|
||||
# shell: bash
|
||||
# id: build
|
||||
# run: |
|
||||
# mkdir -p docs/_build/html
|
||||
# echo ${BRANCH_NAME}
|
||||
# echo "This is HTML page " > docs/_build/html/index.html
|
||||
# Not required SCP run is already doing that
|
||||
# - name: Removing previous documentation from server
|
||||
# uses: appleboy/ssh-action@master
|
||||
# with:
|
||||
# host: ${{ secrets.DOCS_HOST }}
|
||||
# username: ${{ secrets.DOCS_USERNAME }}
|
||||
# key: ${{ secrets.DOCS_KEY }}
|
||||
# script: |
|
||||
# whoami
|
||||
# ls ${{ secrets.DOCS_PATH }}
|
||||
# rm -rf ${{ secrets.DOCS_PATH }}/${{ github.head_ref || github.ref_name }}
|
||||
- name: Copy HTML files to docs server
|
||||
uses: appleboy/scp-action@master
|
||||
with:
|
||||
host: ${{ secrets.DOCS_HOST }}
|
||||
username: ${{ secrets.DOCS_USERNAME }}
|
||||
key: ${{ secrets.DOCS_KEY }}
|
||||
rm: True
|
||||
source: "docs/_build/html"
|
||||
strip_components : 3
|
||||
target: ${{ secrets.DOCS_PATH }}/${{ github.head_ref || github.ref_name }}
|
|
@ -0,0 +1,132 @@
|
|||
# Ignore all
|
||||
*
|
||||
# Unignore all with extensions
|
||||
!*.*
|
||||
# Unignore all dirs
|
||||
!*/
|
||||
# Unignore makefiles
|
||||
!Makefile*
|
||||
# Ignore direcotries starting with . in from root directory
|
||||
/.*/
|
||||
# All files starting underscroll
|
||||
_*.*
|
||||
# Unignore python init_file
|
||||
!__init__.py
|
||||
|
||||
!.github
|
||||
|
||||
# Ignore all the projects
|
||||
FPGA**
|
||||
|
||||
# # Ignore following files
|
||||
# *_PNR/*_task/config/task.conf
|
||||
# *_PNR/*_task/latest
|
||||
# *_PNR/*_task/run001
|
||||
# *_PNR/parser.out
|
||||
# *_PNR/parsetab.py
|
||||
# *_PNR/release/*/pickle/
|
||||
# *_PNR/release/*/TCL/
|
||||
# *_PNR/release/*/data/
|
||||
# *_PNR/release/*/SVG/
|
||||
|
||||
# # Skip some intermidiate verilog source
|
||||
# **/*_Verilog/TaskConfigCopy
|
||||
# **/*_Verilog/SRCBackup
|
||||
# **/*_Verilog/SRCOutline
|
||||
# # Unignore required SDC later
|
||||
# *_PNR/*_Verilog/SDC/*
|
||||
# !*_PNR/*_Verilog/SDC/disable_configure_ports.sdc
|
||||
# # Remove following later
|
||||
# **/*_Verilog/physical_contraints
|
||||
# *_PNR/*_Verilog/TESTBENCH/*/*
|
||||
# !*_PNR/*_Verilog/TESTBENCH/*/*.gz
|
||||
# *_PNR/*_Verilog/scandef
|
||||
# *_PNR/*_Verilog/*/*_tb.v
|
||||
# *_PNR/*_Verilog/*/*_formal_verification.v
|
||||
|
||||
# Python ignores
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# #PnR tool Ignores
|
||||
# **/*.ndm/
|
||||
# **/*_block.dlib/
|
||||
# **/dp/*/block_pg
|
||||
# **/dp/*/block_budgets
|
||||
# **/dp/*/work
|
||||
# **/dp/*/work_dir
|
||||
# **/dp/*/write_data_dir
|
||||
# **/dp/*/split
|
||||
# **/dp/*/logs_icc2
|
||||
# **/dp/*/outputs_icc2
|
||||
# **/dp/*/rpts_icc2
|
||||
# **/dp/*/over_utilization*.tcl
|
||||
*command.log
|
||||
icc2_*output.txt
|
||||
check_workspace.ems
|
||||
crte_*.txt
|
||||
check_design*.log
|
||||
utils/_tempFiles
|
||||
utils/LoadTools.*
|
||||
|
||||
# #Verification and simulation relates
|
||||
# **/Verification/*run/*
|
||||
# # !**/Verification/*run/*.vcd
|
||||
# # !**/Verification/*run/sim_run.log
|
||||
|
||||
# Documentation ignore
|
||||
**/_build
|
||||
docs/build.log
|
||||
|
||||
# # DP Related
|
||||
# **/dp/work*
|
||||
# **/dp/split
|
||||
# **/dp/logs_icc2
|
||||
# **/dp/rpts_icc2
|
||||
# **/dp/*.log
|
||||
|
||||
# # Unignore symbolic links
|
||||
# !INIT
|
||||
# !*_PNR/Verification/*_tests
|
||||
# !CustomModules
|
||||
# !sc_verilog
|
||||
# !custom_scripts
|
||||
# !extra_scripts
|
||||
# !rm_icc2_dp_scripts
|
||||
# !rm_icc2_pnr_scripts
|
||||
# !rm_setup
|
||||
# !PrimeTimeScripts
|
||||
# !PrimeTimeScripts
|
||||
# !rm_setup_common
|
||||
# !custom_pnr_scripts
|
||||
# !redhawk
|
||||
# !voltus_script
|
||||
# !Benchmarks
|
||||
|
||||
# # PNR Related
|
||||
# **/pnr/*.log
|
||||
# **/*_PNR/pnr/*__*
|
||||
# **_PNR/pnr/grid_clb
|
||||
# **_PNR/pnr/fpga_top/*
|
||||
# **_PNR/pnr/fpga_top/outputs_icc2
|
||||
# !**/pnr/fpga_top/rpts_pt
|
||||
|
||||
# # Ignore Verdi tempfiles
|
||||
# novas.conf
|
||||
# novas.rc
|
||||
# Verdi*Log
|
||||
# *.svf
|
||||
|
||||
# # Release files from DP
|
||||
# *_PNR/release/dp/floorplan/*.*
|
||||
# *_PNR/release/dp/floorplan/*_*/*
|
||||
# !*_PNR/release/dp/floorplan/*_*/floorplan.def
|
||||
# !*_PNR/release/dp/floorplan/*_*/*.tcl
|
||||
# !*_PNR/release/dp/*/pinConstraints/*_*/pinFixUps.txt
|
||||
# *_PNR/release/dp/fpga_top/preferred_pin_locations.tcl
|
||||
|
||||
# # Release files from PNR
|
||||
# *_PNR/release/pnr/fpga_top/*.def
|
||||
# !*_PNR/release/pnr/fpga_top/*.def.xz
|
||||
# *_PNR/release/pnr/fpga_top/*.sdc
|
|
@ -0,0 +1,606 @@
|
|||
# Design Project Makefile
|
||||
# ~~~~~~~~~~~~~~~~~~~~~~~
|
||||
#
|
||||
# This makefile is placed in the design project, it is used for generating
|
||||
# verilog netlist, execute OpenFPGA and post-process the netlist to
|
||||
# make it easier to place and route.
|
||||
#
|
||||
# List of important targets
|
||||
#
|
||||
# Planning:
|
||||
#
|
||||
# * **generate_shapes**: Render configured FPGA fabric
|
||||
# * **generate_fabric_key**: Create fabric key and render
|
||||
# * **generate_clock_tree**: Create feedthrough for clock signals
|
||||
# * **generate_global_connectivity**: Create feedthrough for global signals
|
||||
#
|
||||
# Main flow:
|
||||
#
|
||||
# * **run_openfpga**: Generate netlist with OpenFPGA
|
||||
# * **netlist_cleanup**: Collect sources and lint netlist for better version control
|
||||
# * **netlist_synth**: Script to run after netlist is cleaned up
|
||||
# * **restructure_netlist**: Restructure physical hierarchy of the netlist
|
||||
# * **floorplan**: Perform explicit shaping if required
|
||||
#
|
||||
#
|
||||
# .. graphviz::
|
||||
#
|
||||
# digraph main_project_makefiles {
|
||||
# "generate_shapes" ->
|
||||
# "run_openfpga" ->
|
||||
# "netlist_cleanup" ->
|
||||
# "netlist_synth" ->
|
||||
# "restructure_netlist";
|
||||
# "generate_shapes" ->
|
||||
# "generate_clock_tree" ->
|
||||
# "generate_global_connectivity" ->
|
||||
# "restructure_netlist";
|
||||
# "netlist_cleanup" ->
|
||||
# "generate_fabric_key" ->
|
||||
# "restructure_netlist" ->
|
||||
# "floorplan" ->
|
||||
# "write_source_files" ->
|
||||
# "timing_extraction";
|
||||
# }
|
||||
|
||||
|
||||
SHELL =bash
|
||||
PYTHON_EXEC ?= python3.8
|
||||
ICC2_LM_EXEC = icc2_lm_shell
|
||||
DC_SHELL_EXEC ?= dc_shell
|
||||
PT_SHELL_EXEC ?= pt_shell
|
||||
FM_SHELL_EXEC ?= fm_shell
|
||||
RERUN = 0
|
||||
TB = top
|
||||
OPTIONS =
|
||||
OPENPHY_ROOT ?= ../../
|
||||
LOAD_TOOLS ?= ${OPENPHY_ROOT}/LoadTools.sh
|
||||
CONF = task_simulation
|
||||
VERDI_EXEC ?= Verdi-3
|
||||
SCRIPTS_DIR ?=scripts
|
||||
PYTHON_OPT ?=
|
||||
|
||||
.SILENT:
|
||||
.ONESHELL:
|
||||
.DEFAULT:help
|
||||
|
||||
generate_shapes:
|
||||
# Renders the FPGA Fabric is SVG format
|
||||
#
|
||||
# Generate SVG for original floorplan and modified fllorplan in this script
|
||||
# also store the pickle information (``RENDER_FABRIC_SCRIPT`` )
|
||||
#
|
||||
# Refer template : render_fabric.py
|
||||
#
|
||||
source ${LOAD_TOOLS}
|
||||
source config.sh
|
||||
echo $${RENDER_FABRIC_SCRIPT}
|
||||
RENDER_FABRIC_SCRIPT=$${RENDER_FABRIC_SCRIPT:='../../openfpga-physical/render_fabric.py'}
|
||||
if test -f "$${RENDER_FABRIC_SCRIPT}"; then
|
||||
[[ $$RENDER_FABRIC_SCRIPT == *".py"* ]] && $${PYTHON_EXEC} $${RENDER_FABRIC_SCRIPT}
|
||||
[[ $$RENDER_FABRIC_SCRIPT == *".sh"* ]] && source $${RENDER_FABRIC_SCRIPT}
|
||||
else
|
||||
echo "[ERROR] RENDER_FABRIC_SCRIPT does not exist ($${RENDER_FABRIC_SCRIPT})"
|
||||
fi
|
||||
date > generate_shapes
|
||||
|
||||
generate_global_connectivity: generate_shapes
|
||||
# generate_global_connectivity
|
||||
#
|
||||
# Generates post-OpenFPGA netlist modifications for global signals
|
||||
#
|
||||
source ${LOAD_TOOLS}
|
||||
source config.sh
|
||||
if test -f "$${GLOBAL_FT_SCRIPT}"; then
|
||||
[[ $$GLOBAL_FT_SCRIPT == *".py"* ]] && $${PYTHON_EXEC} $${GLOBAL_FT_SCRIPT}
|
||||
[[ $$GLOBAL_FT_SCRIPT == *".sh"* ]] && source $${GLOBAL_FT_SCRIPT}
|
||||
else
|
||||
echo "[ERROR] GLOBAL_FT_SCRIPT does not exist ($${GLOBAL_FT_SCRIPT})"
|
||||
fi
|
||||
date > generate_global_connectivity
|
||||
|
||||
generate_clock_tree: generate_shapes
|
||||
# generate_clock_tree
|
||||
#
|
||||
# Generates post-OpenFPGA netlist modifications for all clocks
|
||||
#
|
||||
source ${LOAD_TOOLS}
|
||||
source config.sh
|
||||
if test -f "$${CLOCK_FT_SCRIPT}"; then
|
||||
[[ $$CLOCK_FT_SCRIPT == *".py"* ]] && $${PYTHON_EXEC} $${CLOCK_FT_SCRIPT}
|
||||
[[ $$CLOCK_FT_SCRIPT == *".sh"* ]] && source $${CLOCK_FT_SCRIPT}
|
||||
else
|
||||
echo "[ERROR] CLOCK_FT_SCRIPT does not exist ($${CLOCK_FT_SCRIPT})"
|
||||
fi
|
||||
date > generate_clock_tree
|
||||
|
||||
generate_fabric_key: generate_shapes
|
||||
# Generate the fabric key and stores in <proj_name>_task/arch direcrory
|
||||
#
|
||||
# External python script is used to generate XML based fabric key file
|
||||
# following parameters affect this receipe
|
||||
#
|
||||
# - ``GENERATE_FABRIC_KEY`` Python script which generates the fabric key XML file
|
||||
# - ``FABRIC_KEY_PATTERN`` This parameter is passed to the the python script
|
||||
source ${LOAD_TOOLS}
|
||||
source config.sh
|
||||
RENDER_FABRIC_SCRIPT=$${GENERATE_FABRIC_KEY:='../../openfpga-physical/generate_fabric_key.py'}
|
||||
echo "Executing $${RENDER_FABRIC_SCRIPT}"
|
||||
if test -f "$${GENERATE_FABRIC_KEY}"; then
|
||||
[[ $$GENERATE_FABRIC_KEY == *".py"* ]] && $${PYTHON_EXEC} ${PYTHON_OPT} $${GENERATE_FABRIC_KEY}
|
||||
[[ $$GENERATE_FABRIC_KEY == *".sh"* ]] && source $${GENERATE_FABRIC_KEY}
|
||||
else
|
||||
echo "[ERROR] GENERATE_FABRIC_KEY does not exist ($${GENERATE_FABRIC_KEY})"
|
||||
fi
|
||||
date > generate_fabric_key
|
||||
|
||||
|
||||
run_openfpga: generate_shapes
|
||||
# Generates the FPGA verilog netlist from the ``task_generation.conf`` task
|
||||
#
|
||||
# Generated Verilog netlist is stored in the ``VERILOG_PROJ_DIR`` (`<PROJ_NAME>_verilog`) directory
|
||||
# Following variables are refered from the ``config.sh`` file
|
||||
#
|
||||
# - ``TASK_DIR_NAME``:- Variable used as find the task, by default ``task_generation.conf`` is executed
|
||||
# - ``CUSTOM_MODULES_LIST``:- This is a file list file which get copied to custom modules directory after netlist generation
|
||||
|
||||
SECONDS=0
|
||||
source ${LOAD_TOOLS}
|
||||
source config.sh
|
||||
CURR_OF_VERSION=$$($${OPENFPGA_PATH}/**/openfpga --version | head -n 1 | cut -c 10-16)
|
||||
if test -f "$${OF_VERSION}"; then
|
||||
if [[ $${CURR_OF_VERSION} != $${OF_VERSION} ]]; then
|
||||
echo "OPENFPGA_VERSION is not compatible expected $${OF_VERSION}, found $${CURR_OF_VERSION}"
|
||||
exit
|
||||
fi
|
||||
fi
|
||||
# ============================================================================
|
||||
# =================== Clean Previous Run ===================================
|
||||
# ============================================================================
|
||||
rm -f $${OPENFPGA_PATH}/openfpga_flow/tasks/$${TASK_DIR_NAME}
|
||||
(cd ./$${TASK_DIR_NAME}/config && rm -f task.conf && cp task_generation.conf task.conf)
|
||||
|
||||
# ============================================================================
|
||||
# ===================== Generate Netlist ===================================
|
||||
# ============================================================================
|
||||
rm -rf $${TASK_DIR_NAME}/run**
|
||||
(currDir=$${PWD} && cd $$OPENFPGA_PATH && source openfpga.sh && cd $$currDir &&
|
||||
run-task $${TASK_DIR_NAME} --remove_run_dir all
|
||||
run-task $${TASK_DIR_NAME} ${OPTIONS})
|
||||
if [ $$? -eq 1 ]; then
|
||||
echo "X X X X X X Failed to generate netlist X X X X X X"; exit 1;
|
||||
fi
|
||||
# Created run directory locally
|
||||
run_dir=$$(realpath --relative-to=$${PWD} $$(readlink -f */latest/*/*/*))
|
||||
echo "Run Directory: $${run_dir}"
|
||||
ln -sfn ./$${run_dir} _run_dir
|
||||
|
||||
# === Remove timestamps from generated sources for better version control ====
|
||||
find ./$${TASK_DIR_NAME}/latest/*/*/*/ -type f -name "*.v" -print0 | xargs -0 sed -i "/^\/\/.*Date.*/d"
|
||||
find ./$${TASK_DIR_NAME}/latest/*/*/*/ -type f -name "*.sdc" -print0 | xargs -0 sed -i "/^#.*Date.*/d"
|
||||
find ./$${TASK_DIR_NAME}/latest/*/*/*/ -type f -name "*.xml" -print0 | xargs -0 sed -i "/^.*Date.*/d"
|
||||
|
||||
# ======================= Log runtime info =================================
|
||||
duration=$$SECONDS
|
||||
date > run_openfpga
|
||||
echo "$$(($$duration / 60)) minutes and $$(($$duration % 60)) seconds elapsed." >> run_openfpga
|
||||
|
||||
netlist_cleanup: run_openfpga
|
||||
# This clean up the netlist
|
||||
#
|
||||
# Following steps are executed to cleanup the netlist for versio control purpose
|
||||
#
|
||||
# * Remove datetime stamps from all files (.v/.xml/.sdc)
|
||||
# * Create list of directories (SRC, SDC, TESTBENCH, SRCOutline, SRCOriginal, SRCLint )
|
||||
# * Copy OpenFPGA generated verilog netlist and SDC to `SRCOriginal` and `SDC` diectory respectively
|
||||
# * Copy all the files from `sc_verilog` and `CustomModules` directory available is ``*_task`` directory to ``*_verilog`` directory
|
||||
# * If ``CUSTOM_MODULES_LIST`` varaible is set, copy all files listed in this file to `CustomModules` directory
|
||||
# * Clean (remove variables lines) from openfpgashell.log file and backup in ``VERILOG_PROJ_DIR`` directory
|
||||
# * Use ``spydrnet-physical`` to lint all verilog sources in ``SRCOriginal`` and store in ``SRCLint`` directory
|
||||
# * Crate a copy of SRCLint to SRC before post processing netlistfor post processing
|
||||
#
|
||||
|
||||
SECONDS=0
|
||||
source ${LOAD_TOOLS}
|
||||
source config.sh
|
||||
|
||||
echo "spydrnet_physical" > .spydrnet
|
||||
# ============================================================================
|
||||
# ===================== Copy generated files ===============================
|
||||
# ============================================================================
|
||||
for directory in SDC XML GSB TESTBENCH SRCOutline SRCOriginal SRCLint \
|
||||
/SRCOriginal/CustomModules /SRCOriginal/sc_verilog; do
|
||||
echo "Creating directory $${VERILOG_PROJ_DIR}/$${directory}"
|
||||
rm -rf $${VERILOG_PROJ_DIR}/$${directory}
|
||||
mkdir -p $${VERILOG_PROJ_DIR}/$${directory}
|
||||
done
|
||||
|
||||
cp -r ./$${TASK_DIR_NAME}/latest/*/*/*/SRC/* $${VERILOG_PROJ_DIR}/SRCOriginal
|
||||
cp -r ./$${TASK_DIR_NAME}/latest/*/*/*/SDC/* $${VERILOG_PROJ_DIR}/SDC || true
|
||||
cp -r ./$${TASK_DIR_NAME}/latest/*/*/*/gsb/* $${VERILOG_PROJ_DIR}/GSB || true
|
||||
cp -r ./$${TASK_DIR_NAME}/latest/*/*/*/*.xml $${VERILOG_PROJ_DIR}/XML || true
|
||||
cp ./$${TASK_DIR_NAME}/sc_verilog/*.v $${VERILOG_PROJ_DIR}/SRCOriginal/sc_verilog || true
|
||||
cp -r ./$${TASK_DIR_NAME}/CustomModules/* $${VERILOG_PROJ_DIR}/SRCOriginal/CustomModules || true
|
||||
|
||||
# ============================================================================
|
||||
# ===================== Copy Custom Modules =================================
|
||||
# ============================================================================
|
||||
if test -f "$${CUSTOM_MODULES_LIST}"; then
|
||||
cat $${CUSTOM_MODULES_LIST} | while read line; do
|
||||
cp $$line $${VERILOG_PROJ_DIR}/SRCOriginal/CustomModules/;
|
||||
echo "[ Info] Copied custom module $$line"
|
||||
done
|
||||
else
|
||||
echo "[WARN] CUSTOM_MODULES_LIST variable not found, no custom module is copied ($${CUSTOM_MODULES_LIST})"
|
||||
fi
|
||||
# Flatterns the multi-line wire declaration
|
||||
# find $${VERILOG_PROJ_DIR}/SRCOriginal/CustomModules -type f -name "*.v" -print0 | xargs -0 sed -i ':begin;$$!N;s/\([)|,]\)\n\s*/\1/;tbegin;P;D'
|
||||
|
||||
# ============================================================================
|
||||
# ====================== Keep Copy of OpenFPGA.log ===========================
|
||||
# ============================================================================
|
||||
cp -r ./$${TASK_DIR_NAME}/latest/*/*/*/openfpgashell.log $${VERILOG_PROJ_DIR}
|
||||
sed -i "s/^Compiled.*/--line removed--/" $${VERILOG_PROJ_DIR}/openfpgashell.log
|
||||
sed -i "s/.*took.*seconds.*/--line removed--/" $${VERILOG_PROJ_DIR}/openfpgashell.log
|
||||
sed -i "s/^\*\*.*/--line removed--/" $${VERILOG_PROJ_DIR}/openfpgashell.log
|
||||
sed -i "s/\/.*xml/--line removed--/" $${VERILOG_PROJ_DIR}/openfpgashell.log
|
||||
sed -i "s/\/.*openfpga -batch/--line removed-- openfpga -batch/" $${VERILOG_PROJ_DIR}/openfpgashell.log
|
||||
sed -i "s/tool_comment.*/tool_comment=\"\">/" $${VERILOG_PROJ_DIR}/XML/*.xml
|
||||
find ./$${VERILOG_PROJ_DIR}/GSB -name "*.xml" -type f -exec xmllint --output '{}' --format '{}' \;
|
||||
find ./$${VERILOG_PROJ_DIR}/XML -name "*.xml" -type f -exec xmllint --output '{}' --format '{}' \;
|
||||
|
||||
# ============================================================================
|
||||
# ======================= Pre Verilog Script ================================
|
||||
# ============================================================================
|
||||
|
||||
if test -f "$${PRE_VERILOG_LINT}"; then
|
||||
[[ $$PRE_VERILOG_LINT == *".py"* ]] && $${PYTHON_EXEC} $${PRE_VERILOG_LINT}
|
||||
[[ $$PRE_VERILOG_LINT == *".sh"* ]] && source $${PRE_VERILOG_LINT}
|
||||
fi
|
||||
|
||||
# ============================================================================
|
||||
# ======================= Lint Original netlist =============================
|
||||
# ============================================================================
|
||||
find $${VERILOG_PROJ_DIR}/SRCOriginal -type f -name "*.v" -exec sed -i "s/^\`default_nettype/\/\/ \`default_nettype/g" '{}' \;
|
||||
cp -r $${VERILOG_PROJ_DIR}/SRCOriginal/CustomModules $${VERILOG_PROJ_DIR}/SRCLint/
|
||||
# To create a complete linted netlist version for version control
|
||||
for eachFile in $$(grep -rln "^module " $${VERILOG_PROJ_DIR}/SRCOriginal/); do
|
||||
[[ $$eachFile == *"_tb"* ]] && continue
|
||||
[[ $$eachFile == *"_verification"* ]] && continue
|
||||
[[ $$eachFile == *"sc_verilog"* ]] && continue
|
||||
# Need to remove this currently this skips the linting of custom modules
|
||||
[[ $$eachFile == *"CustomModules"* ]] && continue
|
||||
if [[ $${eachFile} == *"fpga_top.v"* ]]; then
|
||||
echo "Found fpga_top.v"
|
||||
$${PYTHON_EXEC} -c "import spydrnet as sdn; import yaml; from pathlib import Path; import os;\
|
||||
netlist = sdn.parse('$${eachFile}'); netlist.name='$$PROJ_NAME'; \
|
||||
Path(os.path.dirname('$${eachFile/SRCOriginal/SRCLint}')).mkdir(parents=True, exist_ok=True); \
|
||||
modules = sorted([e for e in netlist.top_instance.reference.get_definitions()], key=lambda x: x.name); \
|
||||
instances = {m.name:sorted([i.name for i in m.references]) for m in modules}; \
|
||||
yaml.dump(instances, open('$${VERILOG_PROJ_DIR}/SRCLint/top_hierarchy.yml', 'w'));\
|
||||
sdn.compose(netlist, '$${eachFile/SRCOriginal/SRCLint}', write_blackbox=False, skip_constraints=True)" | grep -v "Plugins"
|
||||
else
|
||||
echo -n $${eachFile/SRCOriginal/SRCLint}
|
||||
$${PYTHON_EXEC} -c "import spydrnet as sdn; import yaml; from pathlib import Path; import os;\
|
||||
netlist = sdn.parse('$${eachFile}'); netlist.name='$$PROJ_NAME'; \
|
||||
Path(os.path.dirname('$${eachFile/SRCOriginal/SRCLint}')).mkdir(parents=True, exist_ok=True); \
|
||||
sdn.compose(netlist, '$${eachFile/SRCOriginal/SRCLint}', write_blackbox=False, skip_constraints=True)" | grep -v "Plugins"
|
||||
echo " "$$?
|
||||
fi
|
||||
done
|
||||
|
||||
find $${VERILOG_PROJ_DIR}/SRCOriginal -type f -name "*.v" -exec sed -i "s/.*default_nettype/\`default_nettype/g" '{}' \;
|
||||
|
||||
# ============================================================================
|
||||
# ======================= Post Verilog Script ================================
|
||||
# ============================================================================
|
||||
if test -f "$${POST_VERILOG_LINT}"; then
|
||||
[[ $$POST_VERILOG_LINT == *".py"* ]] && $${PYTHON_EXEC} $${POST_VERILOG_LINT}
|
||||
[[ $$POST_VERILOG_LINT == *".sh"* ]] && source $${POST_VERILOG_LINT}
|
||||
fi
|
||||
|
||||
# ======================= Log runtime info =================================
|
||||
duration=$$SECONDS
|
||||
date > netlist_cleanup
|
||||
echo "$$(($$duration / 60)) minutes and $$(($$duration % 60)) seconds elapsed." >> netlist_cleanup
|
||||
|
||||
netlist_synth: netlist_cleanup
|
||||
# Post netlist clean up script
|
||||
#
|
||||
# - ``NETLIST_SYNTH_SCRIPT``:- This variable points to shell script which execute after verilog netlist generation
|
||||
SECONDS=0
|
||||
source ${LOAD_TOOLS}
|
||||
source config.sh
|
||||
# ============================================================================
|
||||
# ====================== Post RunOpenFPGA ====================================
|
||||
# ============================================================================
|
||||
if test -f "$${NETLIST_SYNTH_SCRIPT}"; then
|
||||
if [[ $$NETLIST_SYNTH_SCRIPT == *".py"* ]]; then $${PYTHON_EXEC} $${NETLIST_SYNTH_SCRIPT}; fi
|
||||
if [[ $$NETLIST_SYNTH_SCRIPT == *".sh"* ]]; then source $${NETLIST_SYNTH_SCRIPT}; fi
|
||||
else
|
||||
echo "[ERROR] NETLIST_SYNTH_SCRIPT does not exist ($${NETLIST_SYNTH_SCRIPT})"
|
||||
fi
|
||||
echo $$?
|
||||
# ======================= Log runtime info =================================
|
||||
duration=$$SECONDS
|
||||
date > netlist_synth
|
||||
echo "$$(($$duration / 60)) minutes and $$(($$duration % 60)) seconds elapsed." >> netlist_synth
|
||||
|
||||
|
||||
restructure_netlist: netlist_synth generate_fabric_key
|
||||
# This netlist restructuring phase,
|
||||
#
|
||||
# - Collect fabric_indepenent_bitstream.xml if its generated
|
||||
# - Restructure netlist It reads netlist from ``SRCLint` directory and creates restructured version in ``SRC`` directory
|
||||
# - Clean up GSB modules
|
||||
#
|
||||
# Following variables are refered from the ``config.sh`` file
|
||||
# ``RESTRUCT_NETLIST``:- This is a python or bash file which will be executed to restructure the netlist
|
||||
#
|
||||
SECONDS=0
|
||||
source ${LOAD_TOOLS}
|
||||
source config.sh
|
||||
|
||||
rm -rf $${VERILOG_PROJ_DIR}/SRC
|
||||
mkdir -p $${VERILOG_PROJ_DIR}/SRC
|
||||
cp -r $${VERILOG_PROJ_DIR}/SRCLint/CustomModules $${VERILOG_PROJ_DIR}/SRC/
|
||||
|
||||
# If there are any bitstream generate copy that and save in ${VERILOG_PROJ_DIR}/TESTBENCH/
|
||||
for TBFileOriginal in $$(find ./$${TASK_DIR_NAME}/latest/*/*/*/* -name 'fabric_indepenent_bitstream.xml'); do
|
||||
echo "= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = "
|
||||
# Extract Topmodule name of the benchmark
|
||||
TopModuleName=$$(echo $$TBFileOriginal | sed -e "s/.*_task\/[^\/]*\/[^\/]*\/\([^\/]*\).*/\1/g" )
|
||||
mkdir -p $${VERILOG_PROJ_DIR}/TESTBENCH/$${TopModuleName}
|
||||
cp -r ./$${TASK_DIR_NAME}/latest/*/$${TopModuleName}/*/*_bitstream.xml $${VERILOG_PROJ_DIR}/TESTBENCH/$${TopModuleName}
|
||||
cp -r ./$${TASK_DIR_NAME}/latest/*/$${TopModuleName}/*/*_bitstream.bit $${VERILOG_PROJ_DIR}/TESTBENCH/$${TopModuleName}
|
||||
done
|
||||
|
||||
# ============================================================================
|
||||
# ====================== Execute Restructuring script ========================
|
||||
# ============================================================================
|
||||
if test -f "$${RESTRUCT_NETLIST}"; then
|
||||
echo "Executing RESTRUCT_NETLIST: $${RESTRUCT_NETLIST}"
|
||||
if [[ $$RESTRUCT_NETLIST == *".py"* ]]; then $${PYTHON_EXEC} $${RESTRUCT_NETLIST}; fi
|
||||
if [[ $$RESTRUCT_NETLIST == *".sh"* ]]; then source $${RESTRUCT_NETLIST}; fi
|
||||
fi
|
||||
|
||||
# ========================= Log runtime info =================================
|
||||
duration=$$SECONDS
|
||||
date > restructure_netlist
|
||||
echo "$$(($$duration / 60)) minutes and $$(($$duration % 60)) seconds elapsed." >> restructure_netlist
|
||||
|
||||
|
||||
floorplan: restructure_netlist
|
||||
source ${LOAD_TOOLS}
|
||||
source config.sh
|
||||
if test -f "$${FLOORPLAN_SCRIPT}"; then
|
||||
[[ $$FLOORPLAN_SCRIPT == *".py"* ]] && $${PYTHON_EXEC} $${FLOORPLAN_SCRIPT}
|
||||
[[ $$FLOORPLAN_SCRIPT == *".sh"* ]] && source $${FLOORPLAN_SCRIPT}
|
||||
else
|
||||
echo "[WARN] FLOORPLAN_SCRIPT does not exist ($${FLOORPLAN_SCRIPT})"
|
||||
fi
|
||||
date > floorplan
|
||||
|
||||
run_openfpga_sim:
|
||||
# Run the simulation Deck
|
||||
#
|
||||
# TODO explain more
|
||||
#
|
||||
SECONDS=0
|
||||
source ${LOAD_TOOLS}
|
||||
source config.sh
|
||||
CONF=${CONF}
|
||||
# =================== Clean Previous Run =================================
|
||||
rm -f $${OPENFPGA_PATH}/openfpga_flow/tasks/$${TASK_DIR_NAME}
|
||||
(cd ./$${TASK_DIR_NAME}/config && rm -f task.conf && cp ${CONF}.conf task.conf)
|
||||
|
||||
# ===================== Generate Netlist =================================
|
||||
(currDir=$${PWD} && cd $$OPENFPGA_PATH && source openfpga.sh && cd $$currDir &&
|
||||
run-task $${TASK_DIR_NAME} --remove_run_dir all &&
|
||||
run-task $${TASK_DIR_NAME} ${OPTIONS})
|
||||
if [ $$? -eq 1 ]; then
|
||||
echo "X X X X X X Failed to generate netlist X X X X X X"; exit 1;
|
||||
fi
|
||||
rm -rf $${VERILOG_PROJ_DIR}/TESTBENCH/${CONF}
|
||||
# ================= Created run directory locally =================
|
||||
run_dir=$$(realpath --relative-to=$${PWD} $$(readlink -f */latest/*/*/*))
|
||||
echo "Run Directory: $${run_dir}"
|
||||
ln -sfn ./$${run_dir} _run_dir
|
||||
# ================= Copy Bitstream from generated source ===================
|
||||
for TBFileOriginal in $$(find ./$${TASK_DIR_NAME}/latest/*/*/*/* -name '*fabric_bitstream.xml'); do
|
||||
echo "= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = "
|
||||
# Extract Topmodule name of the benchmark
|
||||
TopModuleName=$$(echo $$TBFileOriginal | sed -e "s/.*_task\/[^\/]*\/[^\/]*\/\([^\/]*\).*/\1/g" )
|
||||
echo "Found Testbench = $${TopModuleName}"
|
||||
if test -f "$${OPENFPGA_RUN_POSTPROCESS}"; then
|
||||
echo "Sourcing OPENFPGA_RUN_POSTPROCESS $${OPENFPGA_RUN_POSTPROCESS}"
|
||||
source $${OPENFPGA_RUN_POSTPROCESS}
|
||||
fi
|
||||
done
|
||||
|
||||
all: floorplan
|
||||
date > all
|
||||
|
||||
|
||||
report_broken_symlinks:
|
||||
# Reports broken symbolic links in current directory
|
||||
#
|
||||
symlinks -r .
|
||||
|
||||
extract_area:
|
||||
# Extract module wise area of given design
|
||||
#
|
||||
# TCL_EXTRACT_AREA_SCRIPT: DC script which extracts area inforamtion
|
||||
# ``../dp/fpga_top/custom_scripts_$${TECHNOLOGY}/design_compiler_$${TECHNOLOGY}.tcl"``
|
||||
#
|
||||
source ${LOAD_TOOLS}
|
||||
source config.sh
|
||||
mkdir -p _dc_run && cd _dc_run
|
||||
if test -f "../$${TCL_EXTRACT_AREA_SCRIPT}"; then
|
||||
echo "Executing $${TCL_EXTRACT_AREA_SCRIPT} in DC"
|
||||
${DC_SHELL_EXEC} -f ../$${TCL_EXTRACT_AREA_SCRIPT} ${DC_OPTIONS} | tee -i _dc_run.log
|
||||
else
|
||||
echo "[ ERR] TCL_EXTRACT_AREA_SCRIPT does not exist ($${TCL_EXTRACT_AREA_SCRIPT})"
|
||||
fi
|
||||
date > extract_area
|
||||
|
||||
run_primetime:
|
||||
# Starts primetime sesession on PrePNR netlist
|
||||
#
|
||||
# PT_PRE_PNR_SCRIPT
|
||||
#
|
||||
source ${LOAD_TOOLS}
|
||||
source config.sh
|
||||
mkdir -p _pt_run && cd _pt_run
|
||||
REF_DIR=.. ${PT_SHELL_EXEC} -file ../$${PT_PRE_PNR_SCRIPT} | tee -i _primetime.log
|
||||
date > run_primetime
|
||||
|
||||
run_formality:
|
||||
# Starts primetime sesession on PrePNR netlist
|
||||
#
|
||||
# FM_PRE_PNR_SCRIPT
|
||||
#
|
||||
source ${LOAD_TOOLS}
|
||||
source config.sh
|
||||
mkdir -p _fm_run && cd _fm_run
|
||||
REF_DIR=.. ${FM_SHELL_EXEC} -file $${FM_PRE_PNR_SCRIPT} | tee -i _formality.log
|
||||
date > run_formality
|
||||
|
||||
|
||||
pt_post_pnr:
|
||||
source ${LOAD_TOOLS}
|
||||
source config.sh
|
||||
mkdir -p _pt_post_pnr_run && cd _pt_post_pnr_run
|
||||
${PT_SHELL_EXEC} -f ../pnr/PrimeTimeScripts/post_pnr_pt_$${TECHNOLOGY}.tcl | tee -i post_pnr_pt.log
|
||||
|
||||
clean:
|
||||
-\rm -rf netlist_cleanup generate_fabric_key cleanNetlist run_openfpga_sim run_openfpga restructure_netlist *.rpt *.net vpr_stdout.log
|
||||
-\rm -rf **/task.conf **/latest **/run00*
|
||||
|
||||
clean_all: clean
|
||||
-\rm -rf generate_shapes generate_global_connectivity generate_clock_tree proj_const.tcl release *_verilog _dc* _pt* _fm*
|
||||
|
||||
OpenVerdi:
|
||||
# OpenVerdi for schematic
|
||||
source ${LOAD_TOOLS}
|
||||
source config.sh
|
||||
mkdir -p _verdi_run && cd _verdi_run
|
||||
${VERDI_EXEC}
|
||||
|
||||
OpenVPR:
|
||||
# OpenVPR GUI
|
||||
#
|
||||
# Useful when OpenFPGA execution
|
||||
source ${LOAD_TOOLS}
|
||||
source config.sh
|
||||
vpr_line=$$(find ./*_task/latest/*/** -type f -name 'openfpgashell.log')
|
||||
vpr_cmd=$$(grep -r "Command line to execute: vpr " $$vpr_line)
|
||||
vpr_cmd=$$(echo $${vpr_cmd} | sed "s/.*: vpr/vpr/")
|
||||
echo "vpr_cmd=$${vpr_cmd}"
|
||||
cd $$(dirname $$vpr_line)
|
||||
VPR_EXEC=$$(readlink -f $${OPENFPGA_PATH}/build/*/vpr/vpr)
|
||||
echo $$(dirname $${VPR_EXEC})/$${vpr_cmd}
|
||||
eval $$(dirname $${VPR_EXEC})/$${vpr_cmd} --disp on
|
||||
|
||||
generate_fabric_bitstream:
|
||||
# Modifies fabric_bitstream.xml for given ccff_chain
|
||||
#
|
||||
# Given a fabric_bitstream.xml and new sequence of ccff chain (hierarchical)
|
||||
# it will create updated fabric_bitstream.xml
|
||||
SECONDS=0
|
||||
echo "Generating fabric bitstream"
|
||||
source ${LOAD_TOOLS}
|
||||
source config.sh
|
||||
DEFAULT_INPUT_BITSTREAM=$${VERILOG_PROJ_DIR}/XML/and_openfpga_sample_bitstream.xml
|
||||
INPUT_BITSTREAM=$${INPUT_BITSTREAM:-$${DEFAULT_INPUT_BITSTREAM}}
|
||||
echo "INPUT_BITSTREAM : $${INPUT_BITSTREAM}"
|
||||
OUTPUT_BITSTREAM=$${OUTPUT_BITSTREAM:-_recompiled_bitstream.xml}
|
||||
$${PYTHON_EXEC} -u $${FABRIC_BITSTREAM_GENERATE} \
|
||||
--original_bitstream $${INPUT_BITSTREAM} \
|
||||
--original_bitstream_distribution $${VERILOG_PROJ_DIR}/XML/and_bitstream_distribution.xml \
|
||||
--instance_mapping $${RELEASE_DIRECTORY}/post_restruct_rpts/InstanceMap.json \
|
||||
--ccff_path_directory $${RELEASE_DIRECTORY}/pre_pnr_performance/post_tile_ccff/{}_ccff.yaml \
|
||||
--output_bitstream_xml $${OUTPUT_BITSTREAM} \
|
||||
--tile_instance_mapping $${RELEASE_DIRECTORY}/post_restruct_rpts/tile_instances.yaml \
|
||||
--verbose
|
||||
sed -i ':a;N;$$!ba;s/\n\s*<\/bit/<\/bit/g' $${OUTPUT_BITSTREAM}
|
||||
echo "Saved file: $${OUTPUT_BITSTREAM}"
|
||||
duration=$$SECONDS
|
||||
echo "$$(($$duration / 60)) minutes and $$(($$duration % 60)) seconds elapsed."
|
||||
|
||||
generate_fabric_constraints:
|
||||
# Generate TCL constraint file based on the fabric_dependent_bitstream.xml
|
||||
SECONDS=0
|
||||
output_constraint_file=$${OUTPUT_CONSTRINTS:-_recompiled_constraints.tcl}
|
||||
bitstream_file=$${INPUT_BITSTREAM:-_recompiled_bitstream.xml}
|
||||
cat $${bitstream_file} | grep "new_path" \
|
||||
| sed -n 's#.*value="\(.*\)" path.*new_path="\(.*\)">.*#set_case_analysis \1 \2#p' > \
|
||||
$${output_constraint_file}
|
||||
|
||||
sed -i '/set_case_analysis x .*/d' $${output_constraint_file}
|
||||
if [ -n "$${CLEAN_CONSTRAINT}" ]; then
|
||||
echo "Removing set_case_analysis to 0"
|
||||
sed -i '/set_case_analysis 0 .*/d' $${output_constraint_file}
|
||||
fi
|
||||
duration=$$SECONDS
|
||||
echo -n "Written $${output_constraint_file} "
|
||||
echo "$$(($$duration / 60)) minutes and $$(($$duration % 60)) seconds elapsed."
|
||||
|
||||
split_bitstream:
|
||||
source ${LOAD_TOOLS}
|
||||
source config.sh
|
||||
rm -rf $${RELEASE_DIRECTORY}/split_bitstreams
|
||||
$${PYTHON_EXEC} -c "from spydrnet_physical.util import split_fabric_bitstream;
|
||||
split_fabric_bitstream('$${VERILOG_PROJ_DIR}/XML/fabric_independent_bitstream.xml', \
|
||||
'$${VERILOG_PROJ_DIR}/SRCLint/top_hierarchy.yml', output_dir='$${RELEASE_DIRECTORY}/split_bitstreams')"
|
||||
find $${RELEASE_DIRECTORY}/split_bitstreams -name "*.xml" -type f -exec xmllint --output '{}' --format '{}' \;
|
||||
|
||||
merge_bitstream:
|
||||
$${PYTHON_EXEC} -c "from spydrnet_physical.util import merge_fabric_bitstream;
|
||||
merge_fabric_bitstream('$${VERILOG_PROJ_DIR}/XML/fabric_independent_bitstream_regenerated.xml', \
|
||||
'$${VERILOG_PROJ_DIR}/SRCLint/top_hierarchy.yml', output_dir='split_bitstreams')"
|
||||
|
||||
# XMLLint regenerated bitstream file
|
||||
xml_file=fabric_independent_bitstream_regenerated.xml
|
||||
XMLLINT_INDENT=" " xmllint --format $${xml_file} > _reg.tmp && mv _reg.tmp $${xml_file}
|
||||
|
||||
# XMLLint original bitstream file
|
||||
xml_file=./${DESIGN_NAME}_bitstreams/top/fabric_independent_bitstream.xml
|
||||
XMLLINT_INDENT=" " xmllint --format $${xml_file} > _reg.tmp && mv _reg.tmp $${xml_file}
|
||||
|
||||
find ./${DESIGN_NAME}_gsb -name "*.xml" -type f -exec xmllint --output '{}' --format '{}' \;
|
||||
|
||||
|
||||
split_defs:
|
||||
# Split defs files in mutiple files based on sections
|
||||
#
|
||||
sed -n '/^VIAS/, /END VIAS/p' release/dp/floorplan/cbx_1__0_/floorplan.def
|
||||
sed -n '/^PINS/, /END PINS/p' release/dp/floorplan/cbx_1__0_/floorplan.def
|
||||
sed -n '/^PINPROPERTIES/, /END PINPROPERTIES/p' release/dp/floorplan/cbx_1__0_/floorplan.def
|
||||
sed -n '/^SPECIALNETS/, /END SPECIALNETS/p' release/dp/floorplan/cbx_1__0_/floorplan.def
|
||||
sed -n '/^COMPONENTS/, /END COMPONENTS/p' release/dp/floorplan/cbx_1__0_/floorplan.def
|
||||
sed -n '/^NETS/, /END NETS/p' release/dp/floorplan/cbx_1__0_/floorplan.def
|
||||
|
||||
merge_defs:
|
||||
# Merges nultiple def file in to one single .def file
|
||||
#
|
||||
echo "NotImplemented"
|
||||
|
||||
-include ./Makefile_*
|
||||
|
||||
|
||||
export COMMENT_EXTRACT
|
||||
help:
|
||||
# Prints help message for this makefile
|
||||
@${PYTHON_EXEC} -c "$$COMMENT_EXTRACT"
|
||||
|
||||
define COMMENT_EXTRACT
|
||||
import re
|
||||
with open ('Makefile', 'r' ) as f:
|
||||
matches = re.finditer('^([a-zA-Z-_]*):.*\n#(.*)', f.read(), flags=re.M)
|
||||
space, help = 0, []
|
||||
for _, match in enumerate(matches, start=1):
|
||||
space = max(space, len(match[1]))
|
||||
help.append((match[1], match[2]))
|
||||
print("\n".join([(a.ljust(space) + b) for a, b in help]))
|
||||
endef
|
|
@ -0,0 +1,163 @@
|
|||
#
|
||||
# Main Makefile
|
||||
# ~~~~~~~~~~~~~
|
||||
# This is a main makefile, used to create, sync and update design projects
|
||||
#
|
||||
|
||||
SHELL =bash
|
||||
PYTHON_EXEC :=python3.8
|
||||
GIT_EXEC :=git
|
||||
OPTIONS =
|
||||
PORT :=8001
|
||||
GITHUB_COM ?=github
|
||||
OPENPHY_OWNER ?=ganeshgore
|
||||
GIT_OPTIONS ?=
|
||||
|
||||
|
||||
.ONESHELL:
|
||||
.SILENT:
|
||||
|
||||
define copy_files
|
||||
# Script Files
|
||||
ln -$(1) ../src/generate_top_qlap3.py .
|
||||
ln -$(1) ../src/PostOpenFPGAScript.sh .
|
||||
ln -$(1) ../src/Makefile .
|
||||
ln -$(1) ../src/RestructureNetlist_QLAP3.py .
|
||||
ln -$(1) ../src/generate_scandef_and_case_analysis.sh .
|
||||
|
||||
# Design Planning
|
||||
echo "Linking PnR Scripts in project folder"
|
||||
mkdir -p dp/fpga_top
|
||||
mkdir -p pnr
|
||||
ln -$(1) ../../../src/dp/fpga_top/custom_scripts dp/fpga_top/
|
||||
ln -$(1) ../../../src/dp/fpga_top/extra_scripts dp/fpga_top/
|
||||
ln -$(1) ../../../src/dp/fpga_top/rm_setup dp/fpga_top/
|
||||
ln -$(1) ../../../src/dp/fpga_top/rm_icc2_dp_scripts dp/fpga_top/
|
||||
ln -$(1) ../../../src/dp/fpga_top/Makefile dp/fpga_top/
|
||||
|
||||
# Place and route
|
||||
ln -$(1) ../../src/pnr/custom_pnr_scripts pnr/
|
||||
ln -$(1) ../../src/pnr/PrimeTimeScripts pnr/
|
||||
ln -$(1) ../../src/pnr/rm_icc2_pnr_scripts pnr/
|
||||
ln -$(1) ../../src/pnr/rm_setup_common pnr/
|
||||
echo "PnR Scripts linked in project folder"
|
||||
endef
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
export COMMENT_EXTRACT
|
||||
help:
|
||||
# Prints help message for this makefile
|
||||
@${PYTHON_EXEC} -c "$$COMMENT_EXTRACT"
|
||||
|
||||
docs:
|
||||
# Hosts the docs branch of hit repository over python http server
|
||||
#
|
||||
# Most of the physical design projects are closed source and hosted on
|
||||
# the private repository, making hosting documentation on ``readthedocs``
|
||||
# or ``Github pages`` challenging. In this framework, as soon as the
|
||||
# documentation is updated in the repository, it is compiled using a configured
|
||||
# runner, and output HTML is stored in a separate branch named ``docs``.
|
||||
#
|
||||
# ``make docs`` command allows you to pull the docs branch and host HTMlL pages
|
||||
# using a local python webserver.
|
||||
#
|
||||
${GIT_EXEC} fetch origin docs
|
||||
${GIT_EXEC} checkout origin/docs -- docs/_build/html
|
||||
git rm -r --cached docs/_build/html
|
||||
echo "Hosting server on $$PORT port"
|
||||
${PYTHON_EXEC} -m http.server ${PORT} --directory docs/_build/html &
|
||||
|
||||
create_project: FORCE
|
||||
# This command creates design project
|
||||
#
|
||||
# This command requires mandatory argument ``PROJ_NAME`` which should be in
|
||||
# ``FPGA<XX>x<YY>_<name>`` format where ``XX`` and ``YY`` is FPGA grid
|
||||
# dimensions and ``name`` is any unique project name for the design
|
||||
#
|
||||
[ -z "$$PROJ_NAME" ] && echo "PROJ_NAME variable not provided" && exit
|
||||
[ -d "$${PROJ_NAME}_pnr" ] && echo "Project $$PROJ_NAME already exists" && exit
|
||||
[[ $$PROJ_NAME != FPGA[0-9]*x[0-9]*_* ]] && echo "Project name format is wrong $$PROJ_NAME " && exit
|
||||
echo -n "Do you want to create project directory $${PROJ_NAME}_pnr (y/n)? "
|
||||
read answer
|
||||
if [ "$$answer" != "$${answer#[Yy]}" ] ;then
|
||||
echo "Creating project directory"
|
||||
mkdir -p $${PROJ_NAME}_pnr && cd $${PROJ_NAME}_pnr
|
||||
# Move and rename example
|
||||
cp -r ../src/Example_OpenFPGA_task .
|
||||
mv Example_OpenFPGA_task $${PROJ_NAME}_task
|
||||
cp ../src/config.sh .
|
||||
sed -i "s/PROJ_NAME=.*\#/PROJ_NAME=$${PROJ_NAME} \#/g" config.sh
|
||||
$(call copy_files,s)
|
||||
else
|
||||
echo "Skipping project initialization"
|
||||
fi
|
||||
|
||||
FORCE: ;
|
||||
|
||||
pull_openfpga_physical:
|
||||
# This target pushes the local changes to openfpga-physical main repository
|
||||
#
|
||||
# ``BRANCH`` To specify pull from specific branch
|
||||
#
|
||||
# This target pushes the local changes to openfpga-physical main repository
|
||||
#
|
||||
# ``BRANCH`` To specify pull from specific branch
|
||||
#
|
||||
BRANCH=$${BRANCH:='main'}
|
||||
git config alias.merge merge --no-commit
|
||||
echo "Executing ${GIT_EXEC} subtree pull --prefix=openfpga-physical \
|
||||
git@${GITHUB_COM}.com:${OPENPHY_OWNER}/openfpga-physical.git $${BRANCH} --squash "
|
||||
${GIT_EXEC} subtree --squash --prefix=openfpga-physical pull \
|
||||
git@${GITHUB_COM}.com:${OPENPHY_OWNER}/openfpga-physical.git $${BRANCH} $${GIT_OPTIONS}
|
||||
echo "Pulling new changes to openfpga-physical script"
|
||||
git config -unset alias.merge
|
||||
|
||||
push_openfpga_physical:
|
||||
# This target pushes the local changes to openfpga-physical main repository
|
||||
BRANCH=$${BRANCH:='main'}
|
||||
if [[ "$${BRANCH}" == "main" ]]; then
|
||||
echo "**** Use make push BRNACH_NAME=<name> ****"
|
||||
echo "subtree push to main branch is not supported "
|
||||
exit
|
||||
fi
|
||||
echo "Trying to push subtree to $${BRANCH}"
|
||||
|
||||
current_dir=$$(pwd)
|
||||
gitroot=$$(git rev-parse --show-toplevel)
|
||||
rel_path=$$(realpath --relative-to=$${gitroot} $$current_dir)
|
||||
|
||||
echo "current_dir $${current_dir}"
|
||||
echo "gitroot $${gitroot}"
|
||||
echo "rel_path $${rel_path}"
|
||||
|
||||
${GIT_EXEC} subtree push --prefix=openfpga-physical \
|
||||
git@${GITHUB_COM}.com:${OPENPHY_OWNER}/openfpga-physical.git $${BRANCH} $${GIT_OPTIONS}
|
||||
|
||||
# >>> deprecated pull_openfpga_physical target performs merge wth no commit <<
|
||||
#
|
||||
# diff_openfpga_physical:
|
||||
# # This target shows diff of local subtree with remote repository
|
||||
# #
|
||||
# BRANCH=$${BRANCH:='main'}
|
||||
# if ! ${GIT_EXEC} remote | grep "openfpga-physical" > /dev/null; then
|
||||
# echo "[ INFO] Adding openfpga-physical in remote"
|
||||
# git remote add openfpga-physical git@${GITHUB_COM}.com:${OPENPHY_OWNER}/openfpga-physical.git
|
||||
# fi
|
||||
# git fetch --depth 1 openfpga-physical $${BRANCH}
|
||||
# git diff openfpga-physical/$${BRANCH} HEAD:openfpga-physical --name-only
|
||||
|
||||
openfpga_physical_version:
|
||||
# This target shows current version of openfpga-physical in the repository
|
||||
echo "TODO"
|
||||
echo "Show version from readme file"
|
||||
echo "Showlast upadated option"
|
||||
|
||||
|
||||
define COMMENT_EXTRACT
|
||||
import re
|
||||
with open ('Makefile', 'r' ) as f:
|
||||
matches = re.finditer('^([a-zA-Z-_]*):.*\n#(.*)', f.read(), flags=re.M)
|
||||
for _, match in enumerate(matches, start=1):
|
||||
header, content = match[1], match[2]
|
||||
print(f" {header:10} {content}")
|
||||
endef
|
|
@ -0,0 +1,16 @@
|
|||
# OpenFPGA-Physical
|
||||
A scalable physical design framework for tileable FPGA architectures provides all the required functionality, from generating Verilog netlist using the OpenFPGA framework to implementing a final tapeout-ready GDS layout.
|
||||
|
||||
![OpenFPGA_physical Framework](docs/source/images/openfpga_physical_color.svg)
|
||||
|
||||
## How to use OpenFPGA-Physical
|
||||
This repository contains all the extensible base scripts for netlist_generation, design restructuring, place and route, and signoff.
|
||||
To design an FPGA fabric, please clone the following tapeout template repository, which contains *openfpga-physical* repository as a subtree.
|
||||
[OpenFPGA-Physical-Tapeout-Template]()
|
||||
|
||||
Please read [How to contribute to the Document's OpenFPGA Physical]() section to understand how to keep in sync with the updated scripts.
|
||||
|
||||
|
||||
## Documentation
|
||||
The complete API documentation for the OpenFPGA-Physical scripts is available on the following link.
|
||||
[OpenFPGA-Physical Documentation](http://openphy.ganeshgore.com/)
|
|
@ -0,0 +1,111 @@
|
|||
#!/usr/bin/env bash
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# Design Configuration
|
||||
# ~~~~~~~~~~~~~~~~~~~~
|
||||
#
|
||||
# This file contains all the design variable use in the
|
||||
# various stages of the project
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
|
||||
# TODO unction to check if 3 arguments are passed
|
||||
export_ () { command export $1=$3; } # Export pretty printed lines
|
||||
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# Design project variables
|
||||
# ************************
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
|
||||
export_ PROJ_NAME = "<PROJECT_NAME>" # Project Name
|
||||
export_ FPGA_SIZE_X = 0 # Grid X Size
|
||||
export_ FPGA_SIZE_Y = 0 # Grid Y Size
|
||||
export_ LAYOUT = "high_density"
|
||||
export_ TECHNOLOGY = "skywater" # Techology name label
|
||||
export_ DESIGN_NAME = "fpga_top" # Complete Chip (fpga_top) or eFPGA (fpga_core)
|
||||
|
||||
export_ DIE_WIDTH = 2655.08 # Default dia width
|
||||
export_ DIE_HEIGHT = 2146.6 # Default dia height
|
||||
|
||||
export_ PROJ_DIR = ${PWD} # Design roject root directory
|
||||
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# Script configurations variable
|
||||
# ******************************
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
export_ RENDER_FABRIC_SCRIPT = ../../openfpga-physical/render_fabric.py
|
||||
export_ GENERATE_FABRIC_KEY = ../../openfpga-physical/generate_fabric_key.py
|
||||
export_ FABRIC_KEY_PATTERN = "vertical"
|
||||
export_ GLOBAL_FT_SCRIPT = ""
|
||||
export_ CLOCK_FT_SCRIPT = ""
|
||||
export_ FLOORPLAN_SCRIPT = ""
|
||||
export_ CUSTOM_MODULES_LIST = ""
|
||||
|
||||
export_ NETLIST_SYNTH_SCRIPT = ""
|
||||
export_ RESTRUCT_NETLIST = ""
|
||||
export_ PT_PRE_PNR_SCRIPT = ""
|
||||
|
||||
export_ TCL_EXTRACT_AREA_SCRIPT = "./dp/fpga_top/custom_scripts_${TECHNOLOGY}/design_compiler_${TECHNOLOGY}.tcl" # DC SCRIPT to extract area or run synthesis
|
||||
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# Derived Or Fixed Variables
|
||||
# ***************************
|
||||
#
|
||||
# These are derived from above list variables, no need to set them explicitly
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
export_ OPENFPGA_ENGINE_PATH = ${OPENFPGA_PATH}
|
||||
export_ TASK_DIR_NAME = ${PROJ_NAME}_task
|
||||
export_ VERILOG_PROJ_DIR = ${RELEASE_DIRECTORY}/${PROJ_NAME}_Verilog
|
||||
|
||||
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# Variable to copy files to destination
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
export_ TAPEOUT_DIRECTORY = ${TAPEOUT_BASE}/OpenFPGA-ArcticPro3
|
||||
export_ TAPEOUT_SCRIPT = ../utils/tapeout_script.sh
|
||||
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# Place and route related variables
|
||||
# *********************************
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
|
||||
export_ SH_PRE_INIT_DESIGN = ""
|
||||
export_ SH_POST_INIT_DESIGN = ""
|
||||
export_ SH_PRE_PLACE_OPT = ""
|
||||
export_ SH_POST_PLACE_OPT = ""
|
||||
export_ SH_PRE_CLOCK_OPT_SCRIPT = ""
|
||||
export_ SH_POST_CLOCK_OPT_SCRIPT = ""
|
||||
export_ SH_PRE_CLOCK_OPT_SCRIPT = ""
|
||||
export_ SH_POST_CLOCK_OPT_SCRIPT = ""
|
||||
export_ SH_PRE_ROUTE_AUTO_SCRIPT = ""
|
||||
export_ SH_POST_ROUTE_AUTO_SCRIPT = ""
|
||||
export_ SH_PRE_ROUTE_OPT_SCRIPT = ""
|
||||
export_ SH_POST_ROUTE_OPT_SCRIPT = ""
|
||||
export_ SH_PRE_ROUTE_OPT_SCRIPT = ""
|
||||
export_ SH_PRE_SCRIPT = ""
|
||||
export_ SH_POST_SCRIPT = ""
|
||||
export_ SH_POST_ROUTE_OPT_SCRIPT = ""
|
||||
export_ SH_PRE_ICV_IN_DESIGN_SCRIPT = ""
|
||||
export_ SH_POST_ICV_IN_DESIGN_SCRIPT = ""
|
||||
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# FLOWVAR
|
||||
# *******
|
||||
#
|
||||
# Any variable name starting with FLOWVAR_* will be loaded in tcl scripts by default
|
||||
# and can be access with variable ${standard_cells}
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
export_ FLOWVAR_STANDARD_CELLS = "sc_hd"
|
||||
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# Default variables
|
||||
# *****************
|
||||
#
|
||||
# These variable has default values, typically does not require any change
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
|
||||
export_ DESIGN_STYLE = "hier" # Only hier supported
|
||||
export_ INIT_DESIGN_INPUT = "ASCII" # Load physical flow from DEF
|
||||
export_ RELEASE_DIRECTORY = "release"
|
||||
export_ SCRIPT_DIR = $(realpath "../../openfpga-physical/scripts") # This directory contains all the common scripts dc_utils, pt_utils, icc2_utils
|
||||
export_ DC_SCRIPT_DIR = ${SCRIPT_DIR}/dc_utils/ # Extendable variable "path1 path2"
|
||||
export_ ICC2_SCRIPT_DIR = ${SCRIPT_DIR}/icc2_utils/ # Extendable variable "path1 path2"
|
||||
export_ PT_SCRIPT_DIR = ${SCRIPT_DIR}/pt_utils/ # Extendable variable "path1 path2"
|
|
@ -0,0 +1,70 @@
|
|||
# ##############################################################################
|
||||
# Tool: OpenFPGA-Physical
|
||||
# Script: generate_fabric_key.py
|
||||
# Description : This script cretes a fabric_key.xml file for give size of FPGA
|
||||
# Currently this script generate pattern which routes configuration chain
|
||||
# from right top corner to left bottom corner byt traversing horizontally
|
||||
# in every row of the FPGA grid
|
||||
################################################################################
|
||||
"""
|
||||
File Title
|
||||
"""
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import pickle
|
||||
from glob import glob
|
||||
|
||||
import spydrnet as sdn
|
||||
from spydrnet_physical.util import FabricKeyGenCCFF, FPGAGridGen
|
||||
|
||||
logger = logging.getLogger("spydrnet_logs")
|
||||
|
||||
|
||||
def formatter(prog):
|
||||
return argparse.HelpFormatter(prog, max_help_position=60)
|
||||
|
||||
|
||||
PROJ_NAME = os.environ["PROJ_NAME"]
|
||||
RELEASE_DIR = os.environ["RELEASE_DIRECTORY"]
|
||||
FABRIC_KEY_PATTERN = os.environ["FABRIC_KEY_PATTERN"]
|
||||
TASK_DIR_NAME = os.environ.get("TASK_DIR_NAME")
|
||||
LAYOUT = os.environ["LAYOUT"]
|
||||
TASK_DIR_NAME = os.environ["TASK_DIR_NAME"]
|
||||
SVG_DIR = f"{RELEASE_DIR}/svg"
|
||||
PICKLE_DIR = f"{RELEASE_DIR}/pickle"
|
||||
|
||||
|
||||
class custom_fabric_key(FabricKeyGenCCFF):
|
||||
pass
|
||||
|
||||
# def create_fabric_key(self, pattern=None):
|
||||
# Extend or replace if you want
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Main method to execute function
|
||||
"""
|
||||
# Parse architecture file and get layput block
|
||||
try:
|
||||
VPR_ARCH_FILE = glob((f"{TASK_DIR_NAME}/arch/*vpr*"))[0]
|
||||
except IndexError:
|
||||
logger.exception(
|
||||
"Architecture file not found ['%s/arch/*vpr*']", TASK_DIR_NAME
|
||||
)
|
||||
|
||||
# Load the existing grid from generate shapes
|
||||
fpga = pickle.load(open(f"{PICKLE_DIR}/{PROJ_NAME}_fpgagridgen.pickle", "rb"))
|
||||
|
||||
fabric_key = custom_fabric_key(fpga)
|
||||
fabric_key.create_fabric_key(FABRIC_KEY_PATTERN)
|
||||
|
||||
filename = os.path.join(SVG_DIR, f"{PROJ_NAME}_CCFF_Chain.svg")
|
||||
fabric_key.render_svg(filename=filename)
|
||||
fabric_filename = os.path.join(TASK_DIR_NAME, "flow_inputs", "fabric_key.xml")
|
||||
fabric_key.save_fabric_key(filename=fabric_filename)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -0,0 +1,886 @@
|
|||
# ##############################################################################
|
||||
# Tool: OpenFPGA-Physical
|
||||
# Script: fpga_grid_gen.py
|
||||
################################################################################
|
||||
'''
|
||||
fpga_render_svg
|
||||
---------------
|
||||
|
||||
TODO: Requires Restructuring
|
||||
This scripts read the layout section of the VPR architecture file and
|
||||
render the layout in SVG format. The outputs are stored in the release
|
||||
directory (location passed as an argument) to this script.
|
||||
|
||||
python3.8 RenderArchitectureSVG.py \
|
||||
--design_name FPGA66_flex \
|
||||
--arch_file example_files/vpr_arch_render_demo.xml \
|
||||
--layout dp \
|
||||
--skip_channels \
|
||||
--output_root _release
|
||||
|
||||
'''
|
||||
|
||||
import argparse
|
||||
import csv
|
||||
import logging
|
||||
import math
|
||||
import os
|
||||
import pickle
|
||||
import sys
|
||||
import xml.etree.ElementTree as ET
|
||||
from importlib import util
|
||||
# Prinitng and logging related packages
|
||||
from pprint import pprint
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import svgwrite
|
||||
import yaml
|
||||
from svgwrite.container import Group
|
||||
from openfpga_physical import fpga_grid_gen
|
||||
if util.find_spec("coloredlogs"):
|
||||
import coloredlogs
|
||||
|
||||
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
||||
# Configure logging system
|
||||
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
||||
LOG_FORMAT = "%(levelname)5s (%(threadName)15s) - %(message)s"
|
||||
if util.find_spec("coloredlogs"):
|
||||
coloredlogs.install(level='INFO', stream=sys.stdout,
|
||||
fmt=LOG_FORMAT)
|
||||
else:
|
||||
logging.basicConfig(level=logging.INFO, stream=sys.stdout,
|
||||
format=LOG_FORMAT)
|
||||
logger = logging.getLogger('RenderArchitecureSVG_logs')
|
||||
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
||||
|
||||
|
||||
def formatter(prog): return argparse.HelpFormatter(prog, max_help_position=60)
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(formatter_class=formatter)
|
||||
|
||||
# Mandatory arguments
|
||||
parser.add_argument('--design_name', type=str, default="FPGA22_QLAP3_SOFA_HD")
|
||||
parser.add_argument('--arch_file', type=str, default="vpr_arch.xml")
|
||||
parser.add_argument('--layout', type=str, default="dp")
|
||||
|
||||
# Configuration files
|
||||
parser.add_argument('--shaping_conf_file', type=str, default="")
|
||||
parser.add_argument('--area_file', type=str, default="")
|
||||
parser.add_argument('--output_root', type=str, default="release")
|
||||
# Rendering Related Parameters
|
||||
parser.add_argument('--physical', action='store_true')
|
||||
parser.add_argument('--clear_color', action='store_true')
|
||||
parser.add_argument('--skip_channels', action='store_true')
|
||||
parser.add_argument('--add_pads', action='store_true')
|
||||
parser.add_argument('--cmap', action='store_true')
|
||||
parser.add_argument('--debug', action='store_true')
|
||||
args = parser.parse_args()
|
||||
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
||||
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
||||
|
||||
|
||||
area, width, height = 0, 1, 2
|
||||
CPP = 4
|
||||
SC_HEIGHT = 4
|
||||
LOGICAL = not args.physical
|
||||
|
||||
|
||||
def main():
|
||||
grid = fpga_grid_gen(
|
||||
design_name=args.design_name,
|
||||
arch_file=args.arch_file,
|
||||
layout=args.layout,
|
||||
release_root="")
|
||||
FPGA_SIZE = tuple([grid.get_width(), grid.get_height()])
|
||||
|
||||
DESIGN_NAME = args.design_name
|
||||
SaveLocation = [args.output_root, ]
|
||||
for eachLoc in ["data", "pickle", "TCL", "SVG", "ConnectNets", "PNG"]:
|
||||
os.makedirs(os.path.join(*SaveLocation, eachLoc), exist_ok=True)
|
||||
# = = = = = = Create dummy area file = = = = = = = = = = = = = = = = = = =
|
||||
# incase of logical model create intermidiate file
|
||||
if LOGICAL:
|
||||
args.area_file = "_areafile.txt"
|
||||
with open(args.area_file, "w") as fp:
|
||||
CLBA, CBA = (32*32, 40)
|
||||
fp.write(f"grid_clb_1__1_ {CLBA} 0 0 0\n")
|
||||
fp.write(f"cbx_1__1_ {CBA} 0 0 0\n")
|
||||
|
||||
# = = = = = = Shaping Configuration file = = = = = = = = = = = = = = = = =
|
||||
args.shaping_conf_file = "_shapingConf.yml"
|
||||
with open(args.shaping_conf_file, "w") as file:
|
||||
yaml.dump({"CLB_CHAN_X": 10,
|
||||
"SC_RATIO": SC_HEIGHT/CPP,
|
||||
"CPP": CPP,
|
||||
"SC_HEIGHT": SC_HEIGHT,
|
||||
"GRID_CLB_RATIO": 1,
|
||||
"CBX_WIDTH_RATIO": 0.6,
|
||||
"CBY_HEIGHT_RATIO": 0.6,
|
||||
"GRID_RATIO_X": 1.4,
|
||||
"GRID_RATIO_Y": 1.4,
|
||||
"CBx_CHAN_X": 0,
|
||||
"gridIO_HT": 10,
|
||||
"gridIO_HB": 10,
|
||||
"gridIO_WL": 12,
|
||||
"gridIO_WR": 12,
|
||||
"CBx_CHAN_Y": 0}, file)
|
||||
|
||||
# = = = = = = Read Pinmap = = = = = = = = = = = = = = = = = = = = = = = =
|
||||
FPGAShape = FPGAShaping(FPGA_SIZE[0], FPGA_SIZE[1],
|
||||
debug=args.debug,
|
||||
areaFile=args.area_file,
|
||||
shapingConf=args.shaping_conf_file,
|
||||
gridIO=LOGICAL,
|
||||
arch_file=args.arch_file,
|
||||
layout_name=args.layout,
|
||||
padFile=None)
|
||||
|
||||
FPGAShape.ComputeGrid(skipChannels=args.skip_channels)
|
||||
FPGAShape.CreateDatabase()
|
||||
# = = = = = = = = Setting up SVG Canvas = = = = = = = = = = = = = = = = = =
|
||||
CoreBBox = (0, 0, int(FPGAShape.CLB_GRID_X*(FPGA_SIZE[0]+1))*CPP,
|
||||
int(FPGAShape.CLB_GRID_Y*(FPGA_SIZE[1]+1))*SC_HEIGHT)
|
||||
svgFilepath = os.path.join(
|
||||
*SaveLocation, "SVG", DESIGN_NAME + '_Render.svg')
|
||||
|
||||
dwg = svgwrite.Drawing(svgFilepath, CoreBBox[2:])
|
||||
dwg.viewbox(0, -1*CoreBBox[3], CoreBBox[2], CoreBBox[3])
|
||||
|
||||
dwgMain = dwg.add(Group(id="main", transform="scale(1,-1)"))
|
||||
dwgMain.add(dwg.rect(size=CoreBBox[2:],
|
||||
id="core_boundary",
|
||||
class_="boundary",
|
||||
stroke="grey",
|
||||
fill="none",
|
||||
stroke_width=2))
|
||||
dwg.defs.add(dwg.style("""
|
||||
text{font-family: Verdana; }
|
||||
.cbx_1__0__def{fill:#d9d9f3}
|
||||
.cbx_1__1__def{fill:#d9d9f3}
|
||||
.cbx_1__2__def{fill:#d9d9f3}
|
||||
|
||||
.cby_0__1__def{fill:#a8d0db}
|
||||
.cby_1__1__def{fill:#a8d0db}
|
||||
.cby_2__1__def{fill:#a8d0db}
|
||||
|
||||
.sb_0__0__def{fill:#ceefe4}
|
||||
.sb_0__1__def{fill:#ceefe4}
|
||||
.sb_0__2__def{fill:#ceefe4}
|
||||
.sb_1__0__def{fill:#ceefe4}
|
||||
.sb_1__1__def{fill:#ceefe4}
|
||||
.sb_1__2__def{fill:#ceefe4}
|
||||
.sb_2__0__def{fill:#ceefe4}
|
||||
.sb_2__1__def{fill:#ceefe4}
|
||||
.sb_2__2__def{fill:#ceefe4}
|
||||
|
||||
.grid_def{fill:#f4f0e6;}
|
||||
"""))
|
||||
dwgShapes = dwgMain.add(Group(id="mainShapes"))
|
||||
dwgText = dwgMain.add(Group(id="mainText"))
|
||||
dwgGridShapes = dwgMain.add(Group(id="gridShapes"))
|
||||
dwgGridText = dwgMain.add(Group(id="gridText"))
|
||||
dwgMarker = dwgMain.add(Group(id="mainMarker"))
|
||||
|
||||
# addGrid markers
|
||||
for i in range(1, FPGAShape.sizeX+1):
|
||||
dwgMarker.add(dwg.line(start=(i*FPGAShape.CLB_GRID_X*CPP, CoreBBox[1]),
|
||||
end=(i*FPGAShape.CLB_GRID_X*CPP, CoreBBox[3]),
|
||||
class_="marker"))
|
||||
for i in range(1, FPGAShape.sizeY+1):
|
||||
dwgMarker.add(dwg.line(start=(CoreBBox[0],
|
||||
i*FPGAShape.CLB_GRID_Y*SC_HEIGHT),
|
||||
end=(CoreBBox[2], i *
|
||||
FPGAShape.CLB_GRID_Y*SC_HEIGHT),
|
||||
class_="marker"))
|
||||
|
||||
defList = {}
|
||||
UniqueModules = [i["module"] for _, i in FPGAShape.PlacementDBKey.items()]
|
||||
UniqueModules = set(UniqueModules)
|
||||
UniqueModules = list(
|
||||
filter(lambda x: not "grid_io" in x, set(UniqueModules)))
|
||||
|
||||
for eachModule in sorted(FPGAShape.PlacementDB, key=lambda x: "y" if "grid_clb" in x else 'z' if "grid" in x else x):
|
||||
if (not args.add_pads) and ("grid_io" in eachModule):
|
||||
continue
|
||||
# Shape Definitions
|
||||
mouleinfo = FPGAShape.PlacementDBKey[eachModule]
|
||||
Module = mouleinfo["module"]
|
||||
if not Module in defList.keys():
|
||||
defList[Module] = dwg.symbol(id=Module)
|
||||
xx = min([i[0] for i in mouleinfo["shape"]])
|
||||
yy = min([i[1] for i in mouleinfo["shape"]])
|
||||
|
||||
if len(mouleinfo["shape"]) == 1:
|
||||
llx, lly, w, h = mouleinfo["shape"][0]
|
||||
defList[Module].add(dwg.rect(size=(float(w)*CPP, float(h)*SC_HEIGHT),
|
||||
insert=((llx-xx)*CPP,
|
||||
(lly-yy)*SC_HEIGHT),
|
||||
# fill=mouleinfo["color"],
|
||||
fill_opacity="1" if "grid" in Module else "0.7",
|
||||
stroke="black"))
|
||||
elif len(mouleinfo["shape"]) == 2:
|
||||
Points = f"M "
|
||||
Points += f"{mouleinfo['points'][0]*CPP} "
|
||||
Points += f"{mouleinfo['points'][1]*SC_HEIGHT} "
|
||||
Points += f"L "
|
||||
|
||||
for X, Y in np.reshape(np.array(mouleinfo['points'][2:]),
|
||||
(int(len(mouleinfo['points'])*0.5)-1, 2)):
|
||||
Points += f"{X*CPP} "
|
||||
Points += f"{Y*SC_HEIGHT} "
|
||||
defList[Module].add(dwg.path(d=Points + " z",
|
||||
fill=mouleinfo["color"],
|
||||
fill_opacity="0.7",
|
||||
stroke="black"))
|
||||
dwg.defs.add(defList[Module])
|
||||
|
||||
# Instantiation
|
||||
x1, y1, w, h = mouleinfo["bbox"]
|
||||
shapedwg = dwgGridShapes if "grid" in Module else dwgShapes
|
||||
shapedwg.add(dwg.use(defList[Module],
|
||||
class_=f"{Module}_def {eachModule}" +
|
||||
(" grid_def" if "grid" in Module else ""),
|
||||
insert=((x1*CPP), (y1*SC_HEIGHT))))
|
||||
|
||||
# # Uncomment following lines to print center of module
|
||||
# dwgShapes.add(dwg.circle(
|
||||
# center=(mouleinfo['center'][0]*CPP,
|
||||
# mouleinfo['center'][1]*SC_HEIGHT),
|
||||
# r=2,fill="red"))
|
||||
|
||||
x1, y1 = mouleinfo["center"]
|
||||
textdwg = dwgGridText if "grid" in Module else dwgText
|
||||
textdwg.add(dwg.text(f" {mouleinfo['short_name']} ",
|
||||
insert=((x1*CPP), (-1*y1*SC_HEIGHT)),
|
||||
transform="scale(1,-1)",
|
||||
class_="moduleLabel",
|
||||
fill="black",
|
||||
alignment_baseline="middle",
|
||||
text_anchor="middle"))
|
||||
if LOGICAL:
|
||||
logger.info("Printing Logical Model")
|
||||
logger.info(f"Rendered image stored in {svgFilepath}")
|
||||
dwg.save(pretty=True, indent=4)
|
||||
pickle.dump(FPGAShape.get_variables(),
|
||||
open(os.path.join(*SaveLocation, "pickle",
|
||||
f"{DESIGN_NAME}_FPGAShapeVars.pickle"), 'wb'))
|
||||
pickle.dump(FPGAShape.PlacementDBKey,
|
||||
open(os.path.join(*SaveLocation, "pickle",
|
||||
f"{DESIGN_NAME}_PlacementDBKey.pickle"), 'wb'))
|
||||
pickle.dump(FPGAShape.PlacementDB,
|
||||
open(os.path.join(*SaveLocation, "pickle",
|
||||
f"{DESIGN_NAME}_PlacementDB.pickle"), 'wb'))
|
||||
pickle.dump(dwg,
|
||||
open(os.path.join(*SaveLocation, "pickle",
|
||||
f"{DESIGN_NAME}_dwg.pickle"), 'wb'))
|
||||
pickle.dump(FPGAShape.grid,
|
||||
open(os.path.join(*SaveLocation, "pickle",
|
||||
f"{DESIGN_NAME}_grid.pickle"), 'wb'))
|
||||
return
|
||||
|
||||
|
||||
class FPGAShaping():
|
||||
"""
|
||||
Accepts the architecture file and layout name to generate a tile database
|
||||
to render as an SVG
|
||||
|
||||
args:
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, sizeX, sizeY,
|
||||
debug=False,
|
||||
areaFile=None,
|
||||
padFile=None,
|
||||
gridIO=False,
|
||||
arch_file=None,
|
||||
layout_name="",
|
||||
shapingConf=None):
|
||||
self.sizeX = sizeX
|
||||
self.sizeY = sizeY
|
||||
self.PlacementDB = []
|
||||
self.PlacementDBKey = {}
|
||||
self.GPIOPlacmentKey = []
|
||||
self.debug = debug
|
||||
|
||||
self.get_default_configuration()
|
||||
|
||||
self.areaFile = areaFile
|
||||
self.padFile = padFile
|
||||
self.gridIO = gridIO
|
||||
self.PadNames = {}
|
||||
|
||||
self.skipChannels = False
|
||||
|
||||
# Color Setting
|
||||
self.CLB_COLOR = "#f4f0e6"
|
||||
self.CBX_COLOR = "#d9d9f3"
|
||||
self.CBY_COLOR = "#a8d0db"
|
||||
self.SB_COLOR = "#ceefe4"
|
||||
self.PAD_COLOR = "#204969"
|
||||
self.GRID_IO_COLOR = "#ff8000"
|
||||
|
||||
# Pads Related
|
||||
self.pad_w = 80
|
||||
self.pad_h = 10
|
||||
|
||||
self.arch_file = arch_file
|
||||
self.layout_name = layout_name
|
||||
if shapingConf:
|
||||
self.update_default_configuration(shapingConf)
|
||||
|
||||
self.grid = fpga_grid_gen("", arch_file, layout_name, "")
|
||||
self.fpga_grid = self.grid.enumerate_grid()
|
||||
self.grid.print_grid()
|
||||
|
||||
def update_default_configuration(self, shapingConf):
|
||||
with open(shapingConf, "r") as file:
|
||||
for eachKey, eachValue in yaml.load(file, Loader=yaml.FullLoader).items():
|
||||
setattr(self, eachKey, eachValue)
|
||||
|
||||
def get_default_configuration(self):
|
||||
# Grid clb shape
|
||||
self.SC_RATIO = 8 # This is SC_HEIGHT/CPP of stadard cell
|
||||
self.GRID_CLB_RATIO = 1 # This is actual dimension of the CLB unit width/height
|
||||
|
||||
# Connection box size
|
||||
self.GRID_RATIO_X, self.GRID_RATIO_Y = 1.34, 1.48
|
||||
self.CBX_WIDTH_RATIO, self.CBY_HEIGHT_RATIO = 0.6, 0.6
|
||||
|
||||
# Channel spacing between blocks
|
||||
self.CLB_CHAN_T, self.CLB_CHAN_B = 1, 1
|
||||
self.CLB_CHAN_L, self.CLB_CHAN_R = 4, 4
|
||||
|
||||
self.CBX_CHAN_T, self.CBX_CHAN_B = 0, 0
|
||||
self.CBX_CHAN_L, self.CBX_CHAN_R = 4, 4
|
||||
|
||||
self.CBY_CHAN_T, self.CBY_CHAN_B = 1, 1
|
||||
self.CBY_CHAN_L, self.CBY_CHAN_R = 0, 0
|
||||
|
||||
self.gridIO_MT, self.gridIO_MB = 1, 1
|
||||
self.gridIO_ML, self.gridIO_MR = 2, 2
|
||||
|
||||
self.gridIO_HT, self.gridIO_HB = 10, 10
|
||||
self.gridIO_WL, self.gridIO_WR = 48, 48
|
||||
|
||||
self.GRID_IOV_H_RATIO = 1
|
||||
self.GRID_IOH_W_RATIO = 1
|
||||
|
||||
# TODO: Deprecate this
|
||||
self.GPIO_CHAN_X, self.GPIO_CHAN_Y = 12, 4
|
||||
self.GPIO_WIDTH, self.GPIO_HEIGHT = 40, 8
|
||||
|
||||
def get_variables(self):
|
||||
return {
|
||||
"CPP": self.CPP,
|
||||
"SC_HEIGHT": self.SC_HEIGHT,
|
||||
"CLB_COLOR": self.CLB_COLOR,
|
||||
"CBX_COLOR": self.CBX_COLOR,
|
||||
"CBY_COLOR": self.CBY_COLOR,
|
||||
"SB_COLOR": self.SB_COLOR,
|
||||
"PAD_COLOR": self.PAD_COLOR,
|
||||
"GRID_IO_COLOR": self.GRID_IO_COLOR,
|
||||
"CORE_BBOX": (0, 0, int(self.CLB_GRID_X*(self.sizeX+1)),
|
||||
int(self.CLB_GRID_Y*(self.sizeY+1)))
|
||||
}
|
||||
|
||||
def figSize(self):
|
||||
size = (4+(1*self.sizeX), 4+(1*self.sizeY))
|
||||
if self.sizeX < 16:
|
||||
dpi = 300
|
||||
elif self.sizeX < 64:
|
||||
dpi = 100
|
||||
else:
|
||||
dpi = 50
|
||||
return {"size": size, "dpi": dpi}
|
||||
|
||||
def snapDims(self, num, dim=2):
|
||||
return math.ceil(num/dim)*dim
|
||||
|
||||
def setChannelSpacing(self, module, X, Y):
|
||||
raise NotImplementedError
|
||||
|
||||
def ComputeGrid(self, skipChannels=False):
|
||||
self.skipChannels = skipChannels
|
||||
if self.areaFile:
|
||||
BlockArea = {}
|
||||
for eachLine in open(self.areaFile, "r"):
|
||||
module, dims = eachLine.split(" ", 1)
|
||||
BlockArea[module] = list(map(float, list(dims.split())))
|
||||
self.CLB_DIM = BlockArea["grid_clb_1__1_"]
|
||||
self.CB_DIM = BlockArea["cbx_1__1_"]
|
||||
# self.CLB_DIM = math.floor(BlockArea["grid_clb_1__1_"][1]*0.5)*2
|
||||
# self.CB_DIM = [self.CLB_DIM*0.6, 0, 0]
|
||||
else:
|
||||
self.CLB_DIM = [2500, 24*8, 24]
|
||||
self.CB_DIM = [2500*0.6, 0, 0]
|
||||
|
||||
# Snap CLB Height and Width to next Multiple of 2
|
||||
self.CLB_UNIT = math.sqrt(
|
||||
self.CLB_DIM[area]/(self.GRID_CLB_RATIO*self.SC_RATIO))
|
||||
|
||||
self.CLB_H = self.snapDims(self.CLB_UNIT, 2)
|
||||
self.CLB_W = self.snapDims(self.CLB_DIM[area]/self.CLB_H, 2)
|
||||
|
||||
self.CLB_GRID_X = self.snapDims(self.CLB_W*self.GRID_RATIO_X, 2)
|
||||
self.CLB_GRID_Y = self.snapDims(self.CLB_H*self.GRID_RATIO_Y, 2)
|
||||
|
||||
self.CBX_W = self.snapDims(self.CLB_W*self.CBX_WIDTH_RATIO, 2)
|
||||
self.CBX_H = self.CLB_GRID_Y-self.CLB_H
|
||||
|
||||
self.CBY_W = self.CLB_GRID_X-self.CLB_W
|
||||
self.CBY_H = self.snapDims(self.CLB_H*self.CBY_HEIGHT_RATIO, 2)
|
||||
|
||||
self.SB_W = self.CLB_GRID_X - self.CBX_W
|
||||
self.SB_H = self.CLB_GRID_Y - self.CBY_H
|
||||
self.SIDE_X = self.CLB_GRID_X - self.CLB_W
|
||||
self.SIDE_Y = self.CLB_GRID_Y - self.CLB_H
|
||||
|
||||
self.GRID_IOV_H = self.CLB_H*self.GRID_IOV_H_RATIO
|
||||
self.GRID_IOH_W = self.CLB_W*self.GRID_IOH_W_RATIO
|
||||
|
||||
if self.debug:
|
||||
print(f"self.CLB_W {self.CLB_W}")
|
||||
print(f"self.CLB_H {self.CLB_H}")
|
||||
print(f"self.CLB_GRID_X {self.CLB_GRID_X}")
|
||||
print(f"self.CLB_GRID_Y {self.CLB_GRID_Y}")
|
||||
print(f"self.CBX_W {self.CBX_W}")
|
||||
print(f"self.CBX_H {self.CBX_H}")
|
||||
print(f"self.CBY_W {self.CBY_W}")
|
||||
print(f"self.CBY_H {self.CBY_H}")
|
||||
print(f"self.SB_W {self.SB_W}")
|
||||
print(f"self.SB_H {self.SB_H}")
|
||||
|
||||
if self.padFile:
|
||||
if os.path.exists(self.padFile):
|
||||
print(f"Found PinMapFile {self.padFile}")
|
||||
df_pinMap = pd.read_csv(self.padFile)
|
||||
df_pinMap.rename(columns=lambda x: x.strip(), inplace=True)
|
||||
self.PadNames["L"] = df_pinMap["Remark"]
|
||||
self.PadNames["T"] = df_pinMap["Remark.1"]
|
||||
self.PadNames["R"] = df_pinMap["Remark.2"]
|
||||
self.PadNames["B"] = df_pinMap["Remark.3"]
|
||||
self.NumOfPads = len(df_pinMap.index)
|
||||
|
||||
def CreateDatabase(self):
|
||||
# Create Blocks
|
||||
grid_ele_size = {}
|
||||
|
||||
for x in range(self.sizeX+1):
|
||||
for y in range(self.sizeY+1):
|
||||
|
||||
self.add_sb(x, y)
|
||||
if x < self.sizeX:
|
||||
self.add_cbx(x, y)
|
||||
if y < self.sizeY:
|
||||
self.add_cby(x, y)
|
||||
if (x < self.sizeX) and (y < self.sizeY):
|
||||
label = self.fpga_grid[y+1][x+1]
|
||||
if not (label in [self.grid.RIGHT_ARROW, self.grid.UP_ARROW, "EMPTY"]):
|
||||
if not label in grid_ele_size.keys():
|
||||
ele_w, ele_h = self.grid.fpga_arch.tiles[label]
|
||||
grid_ele_size[label] = (ele_w, ele_h)
|
||||
else:
|
||||
ele_w, ele_h = grid_ele_size[label]
|
||||
self.add_clb(x, y, width=ele_w,
|
||||
height=ele_h, lbl=label)
|
||||
# Create gridIOs
|
||||
if self.gridIO:
|
||||
if (y == self.sizeY) and (x < self.sizeX):
|
||||
self.add_gridIOH(x, y, side="top")
|
||||
if (y == 0) and (x < self.sizeX):
|
||||
self.add_gridIOH(x, y, side="bottom")
|
||||
if (x == 0) and (y < self.sizeY):
|
||||
self.add_gridIOV(x, y, side="left")
|
||||
if (x == self.sizeX) and (y < self.sizeY):
|
||||
self.add_gridIOV(x, y, side="right")
|
||||
|
||||
# Create Pins
|
||||
if self.PadNames:
|
||||
for side in ["L", "T", "R", "B"]:
|
||||
for i in range(self.NumOfPads):
|
||||
self.add_pad(side, i, self.PadNames[side][i])
|
||||
return self.PlacementDB
|
||||
|
||||
def add_clb(self, xi, yi, width=1, height=1, lbl="grid_clb"):
|
||||
x, y = (xi+1)*self.CLB_GRID_X, (yi+1)*self.CLB_GRID_Y
|
||||
llx = x-self.snapDims(self.CLB_W*0.5)
|
||||
lly = y-self.snapDims(self.CLB_H*0.5)
|
||||
W1 = self.CLB_W + ((width-1) * self.CLB_GRID_X)
|
||||
H1 = self.CLB_H + ((height-1) * self.CLB_GRID_Y)
|
||||
initShape = [(llx, lly, W1, H1)]
|
||||
x += ((width-1) * self.CLB_GRID_X)*0.5
|
||||
y += ((height-1) * self.CLB_GRID_Y)*0.5
|
||||
if not self.skipChannels:
|
||||
llx += self.CLB_CHAN_L
|
||||
lly += self.CLB_CHAN_B
|
||||
W1 = self.CLB_W-self.CLB_CHAN_L-self.CLB_CHAN_R
|
||||
H1 = self.CLB_H-self.CLB_CHAN_T-self.CLB_CHAN_B
|
||||
block_name = f"grid_{lbl}_{xi+1}__{yi+1}_"
|
||||
short_block_name = f"{lbl}_{xi+1}_{yi+1}"
|
||||
COLOR = self.CLB_COLOR
|
||||
points = [0, 0, 0, self.CLB_H, self.CLB_W, self.CLB_H, self.CLB_W, 0]
|
||||
self.PlacementDB.append(block_name)
|
||||
self.PlacementDBKey[block_name] = {"name": block_name,
|
||||
"short_name": short_block_name,
|
||||
"bbox": [llx, lly,
|
||||
llx+W1, lly+H1],
|
||||
"points": points,
|
||||
"module": f"grid_{lbl}_1__1_",
|
||||
"center": [x, y],
|
||||
"color": COLOR,
|
||||
"shape": [(llx, lly, W1, H1)],
|
||||
"initShape": initShape,
|
||||
"xi": xi,
|
||||
"yi": yi}
|
||||
|
||||
def add_cbx(self, xi, yi, lbl=None):
|
||||
x, y = (xi+1)*self.CLB_GRID_X, (yi+1)*self.CLB_GRID_Y
|
||||
llx = x-self.snapDims((self.CBX_W)*0.5)
|
||||
lly = y-self.snapDims((self.CLB_H*0.5)+self.CBX_H)
|
||||
W1 = self.CBX_W
|
||||
H1 = self.CBX_H
|
||||
initShape = [(llx, lly, W1, H1)]
|
||||
|
||||
if not self.skipChannels:
|
||||
llx += self.CBX_CHAN_L
|
||||
lly += self.CBX_CHAN_B
|
||||
W1 = self.CBX_W-self.CBX_CHAN_L-self.CBX_CHAN_R
|
||||
H1 = self.CBX_H-self.CBX_CHAN_T-self.CBX_CHAN_B
|
||||
|
||||
block_name = f"cbx_{xi+1}__{yi}_"
|
||||
short_block_name = f"CX_{xi+1}_{yi}"
|
||||
points = [0, 0, 0, W1, H1, W1, H1, 0]
|
||||
self.PlacementDB.append(block_name)
|
||||
moduleName = "cbx_1__0_" if yi == 0 else "cbx_1__2_" if yi == self.sizeY else "cbx_1__1_"
|
||||
self.PlacementDBKey[block_name] = {"name": block_name,
|
||||
"short_name": short_block_name,
|
||||
"bbox": [llx, lly, llx+W1, lly+H1],
|
||||
"points": points,
|
||||
"center": [llx+W1*0.5, lly+H1*0.5],
|
||||
"module": moduleName,
|
||||
"color": self.CBX_COLOR,
|
||||
"shape": [(llx, lly, W1, H1)],
|
||||
"initShape": initShape,
|
||||
"xi": xi,
|
||||
"yi": yi}
|
||||
|
||||
def add_cby(self, xi, yi, lbl=None):
|
||||
x, y = (xi+1)*self.CLB_GRID_X, (yi+1)*self.CLB_GRID_Y
|
||||
llx = x-self.snapDims((self.CLB_W*0.5)+self.CBY_W)
|
||||
lly = y-self.snapDims(self.CBY_H)*0.5
|
||||
W1 = self.CBY_W
|
||||
H1 = self.CBY_H
|
||||
initShape = [(llx, lly, W1, H1)]
|
||||
|
||||
if not self.skipChannels:
|
||||
llx += self.CBY_CHAN_L
|
||||
lly += self.CBY_CHAN_B
|
||||
W1 = self.CBY_W-self.CBY_CHAN_L-self.CBY_CHAN_R
|
||||
H1 = self.CBY_H-self.CBY_CHAN_T-self.CBY_CHAN_B
|
||||
|
||||
block_name = f"cby_{xi}__{yi+1}_"
|
||||
short_block_name = f"CY_{xi}_{yi+1}"
|
||||
points = [0, 0, 0, W1, H1, W1, H1, 0]
|
||||
self.PlacementDB.append(block_name)
|
||||
moduleName = "cby_0__1_" if xi == 0 else "cby_2__1_" if xi == self.sizeY else "cby_1__1_"
|
||||
self.PlacementDBKey[block_name] = {"name": block_name,
|
||||
"short_name": short_block_name,
|
||||
"bbox": [llx, lly, llx+W1, lly+H1],
|
||||
"points": points,
|
||||
"center": [llx+W1*0.5, lly+H1*0.5],
|
||||
"module": moduleName,
|
||||
"color": self.CBY_COLOR,
|
||||
"shape": [(llx, lly, W1, H1)],
|
||||
"initShape": initShape,
|
||||
"xi": xi,
|
||||
"yi": yi}
|
||||
|
||||
def get_stype(self, x, y):
|
||||
if x == 0:
|
||||
if y == 0:
|
||||
return 1
|
||||
elif y == self.sizeY:
|
||||
return 3
|
||||
else:
|
||||
return 2
|
||||
elif x == self.sizeX:
|
||||
if y == 0:
|
||||
return 7
|
||||
elif y == self.sizeY:
|
||||
return 5
|
||||
else:
|
||||
return 6
|
||||
else:
|
||||
if y == 0:
|
||||
return 8
|
||||
elif y == self.sizeY:
|
||||
return 4
|
||||
else:
|
||||
if self.grid.get_block(x, y+1) == self.grid.get_block(x+1, y+1):
|
||||
return 4
|
||||
if self.grid.get_block(x+1, y+1) == self.grid.get_block(x+1, y):
|
||||
return 6
|
||||
if self.grid.get_block(x, y) == self.grid.get_block(x+1, y):
|
||||
return 8
|
||||
if self.grid.get_block(x, y) == self.grid.get_block(x, y+1):
|
||||
return 2
|
||||
else:
|
||||
return 0
|
||||
|
||||
def unique(self, sequence):
|
||||
seen = set()
|
||||
u = [x for x in sequence if not (x in seen or seen.add(x))]
|
||||
return [val for sublist in u for val in sublist]
|
||||
|
||||
def add_sb(self, xi, yi):
|
||||
'''
|
||||
d
|
||||
+----+
|
||||
c | |
|
||||
b | | e
|
||||
+----+ +----+
|
||||
a | | Cross Shape
|
||||
| | -lengths {a b c d e f}
|
||||
+----+ +----+
|
||||
| |
|
||||
| | f
|
||||
+----+
|
||||
'''
|
||||
x = xi*self.CLB_GRID_X
|
||||
y = yi*self.CLB_GRID_Y
|
||||
|
||||
llxB1 = x+(0.5*self.CLB_W)
|
||||
llyB1 = y+(self.CBY_H*0.5)
|
||||
WidthB1 = self.SIDE_X
|
||||
HeightB1 = self.SB_H
|
||||
|
||||
llxB2 = x + (self.CBX_W*0.5)
|
||||
llyB2 = y + (self.CLB_H*0.5)
|
||||
WidthB2 = self.SB_W
|
||||
HeightB2 = self.SIDE_Y
|
||||
|
||||
a = self.SIDE_Y
|
||||
b = e = (WidthB2-self.SIDE_X) * 0.5
|
||||
c = f = (HeightB1-self.SIDE_Y)*0.5
|
||||
d = self.SIDE_X
|
||||
|
||||
Stype = self.get_stype(xi, yi)
|
||||
if Stype == 1: # SB_0__0_
|
||||
llyB1 += c
|
||||
HeightB1 += -c
|
||||
llxB2 += b
|
||||
WidthB2 += -b
|
||||
b = f = 0
|
||||
elif Stype == 2: # SB_0__1_
|
||||
llxB2 += b
|
||||
WidthB2 -= b
|
||||
b = 0
|
||||
elif Stype == 3: # SB_0__2_
|
||||
llxB2 += b
|
||||
WidthB2 -= b
|
||||
HeightB1 -= f
|
||||
c = b = 0
|
||||
elif Stype == 4: # SB_1__2_
|
||||
HeightB1 -= c
|
||||
c = 0
|
||||
elif Stype == 5: # SB_2__2_
|
||||
HeightB1 -= c
|
||||
WidthB2 -= e
|
||||
c = e = 0
|
||||
elif Stype == 6: # SB_2__1_
|
||||
WidthB2 -= e
|
||||
e = 0
|
||||
elif Stype == 7: # SB_2__0_
|
||||
llyB1 += f
|
||||
HeightB1 -= f
|
||||
WidthB2 -= e
|
||||
e = f = 0
|
||||
elif Stype == 8: # SB_1__0_
|
||||
llyB1 += f
|
||||
HeightB1 -= f
|
||||
f = 0
|
||||
|
||||
block_name = f"sb_{xi}__{yi}_"
|
||||
short_block_name = f"SB_{xi}_{yi}"
|
||||
initShape = [(llxB1, llyB1, WidthB1, HeightB1),
|
||||
(llxB2, llyB2, WidthB2, HeightB2)]
|
||||
# initShape = [(llxB1, llyB1, WidthB1, HeightB1), ]
|
||||
# initShape = [(llxB2, llyB2, WidthB2, HeightB2), ]
|
||||
|
||||
points = self.unique([(b, 0), (b, f),
|
||||
(0, f), (0, (f+a)),
|
||||
(b, (f+a)), (b, (a+c+f)),
|
||||
((b+d), (a+c+f)), ((b+d), (a+f)),
|
||||
((b+d+e), (a+f)), ((b+d+e), f),
|
||||
((b+d), f), ((b+d), 0)])
|
||||
self.PlacementDB.append(block_name)
|
||||
moduleNames = [
|
||||
"sb_1__1_", "sb_0__0_", "sb_0__1_",
|
||||
"sb_0__2_", "sb_1__2_", "sb_2__2_",
|
||||
"sb_2__1_", "sb_2__0_", "sb_1__0_",
|
||||
]
|
||||
# ┿ ┗ ┝ ┏ ┯ ┓ ┫ ┛ ┷ ┃ ━
|
||||
llx = min([i[0] for i in initShape])
|
||||
lly = min([i[1] for i in initShape])
|
||||
|
||||
self.PlacementDBKey[block_name] = {"name": block_name,
|
||||
"short_name": short_block_name,
|
||||
"bbox": [llx, lly, llx+f+a+c, lly+b+d+e],
|
||||
"points": points,
|
||||
"center": [llx+(WidthB1*0.5)+b,
|
||||
lly+(HeightB2*0.5)+f],
|
||||
"module": moduleNames[Stype],
|
||||
"color": self.SB_COLOR,
|
||||
"shape": initShape,
|
||||
"xi": xi,
|
||||
"yi": yi,
|
||||
"dims": [a, b, c, d, e, f],
|
||||
"initShape": initShape}
|
||||
|
||||
def add_gridIOH(self, xi, yi, side, lbl=None):
|
||||
x, y = (xi+1)*self.CLB_GRID_X, (yi+1)*self.CLB_GRID_Y
|
||||
llx = x-self.snapDims((self.GRID_IOH_W)*0.5)
|
||||
lly = y-self.snapDims((self.CLB_H*0.5)+self.CBX_H)
|
||||
lly += (-1*self.gridIO_HB) if side == "bottom" else self.CBX_H
|
||||
W1 = self.GRID_IOH_W
|
||||
H1 = self.gridIO_HB
|
||||
initShape = [(llx, lly, W1, H1)]
|
||||
|
||||
if not self.skipChannels:
|
||||
llx += self.CBX_CHAN_L
|
||||
lly += 0 if side == "bottom" else self.gridIO_MT
|
||||
W1 = self.GRID_IOH_W-self.CBX_CHAN_L-self.CBX_CHAN_R
|
||||
H1 = self.gridIO_HB-self.gridIO_MB
|
||||
|
||||
if side == "bottom":
|
||||
moduleName = "grid_io_bottom_bottom"
|
||||
block_name = f"grid_io_{side}_{side}_{xi+1}__{yi}_"
|
||||
short_block_name = f"io{side}_{xi+1}_{yi}"
|
||||
else:
|
||||
moduleName = "grid_io_top_top"
|
||||
block_name = f"grid_io_{side}_{side}_{xi+1}__{yi+1}_"
|
||||
short_block_name = f"io{side}_{xi+1}_{yi+1}"
|
||||
points = [0, 0, 0, W1, H1, W1, H1, 0]
|
||||
self.PlacementDB.append(block_name)
|
||||
|
||||
self.PlacementDBKey[block_name] = {"name": block_name,
|
||||
"short_name": short_block_name,
|
||||
"bbox": [llx, lly, llx+W1, lly+H1],
|
||||
"points": points,
|
||||
"center": [llx+W1*0.5, lly+H1*0.5],
|
||||
"module": moduleName,
|
||||
"color": self.GRID_IO_COLOR,
|
||||
"shape": [(llx, lly, W1, H1)],
|
||||
"initShape": initShape}
|
||||
|
||||
def add_gridIOV(self, xi, yi, side, lbl=None):
|
||||
x, y = (xi+1)*self.CLB_GRID_X, (yi+1)*self.CLB_GRID_Y
|
||||
llx = x-self.snapDims((self.CLB_W*0.5)+self.CBY_W)
|
||||
lly = y-self.snapDims(self.GRID_IOV_H)*0.5
|
||||
llx += (-1*(self.gridIO_WL)) if side == "left" else self.CBY_W
|
||||
W1 = self.gridIO_WL
|
||||
H1 = self.GRID_IOV_H
|
||||
initShape = [(llx, lly, W1, H1)]
|
||||
|
||||
if not self.skipChannels:
|
||||
llx += self.CBY_CHAN_L
|
||||
llx += (-1*self.gridIO_ML) if side == "left" else self.gridIO_MR
|
||||
lly += self.CBY_CHAN_B
|
||||
W1 = self.gridIO_WL-self.gridIO_ML
|
||||
H1 = self.GRID_IOV_H-self.CBY_CHAN_T-self.CBY_CHAN_B
|
||||
|
||||
if side == "left":
|
||||
block_name = f"grid_io_{side}_{side}_{xi}__{yi+1}_"
|
||||
short_block_name = f"io{side}_{xi}_{yi+1}"
|
||||
moduleName = "grid_io_left_left"
|
||||
else:
|
||||
block_name = f"grid_io_{side}_{side}_{xi+1}__{yi+1}_"
|
||||
short_block_name = f"io{side}_{xi+1}_{yi+1}"
|
||||
moduleName = "grid_io_right_right"
|
||||
points = [0, 0, 0, W1, H1, W1, H1, 0]
|
||||
self.PlacementDB.append(block_name)
|
||||
|
||||
self.PlacementDBKey[block_name] = {"name": block_name,
|
||||
"short_name": short_block_name,
|
||||
"bbox": [llx, lly, llx+W1, lly+H1],
|
||||
"points": points,
|
||||
"center": [llx+W1*0.5, lly+H1*0.5],
|
||||
"module": moduleName,
|
||||
"color": self.GRID_IO_COLOR,
|
||||
"shape": [(llx, lly, W1, H1)],
|
||||
"initShape": initShape}
|
||||
|
||||
def add_pad(self, side="L", number=0, padname="xx"):
|
||||
CoreMinX, CoreMinY = (0.5*self.CLB_W), (0.5*self.CLB_H)
|
||||
CoreMaxX, CoreMaxY = (((self.sizeX+0.5) * self.CLB_GRID_X)+0.5*self.CBY_W,
|
||||
((self.sizeY+0.5) * self.CLB_GRID_Y)+0.5*self.CBX_H)
|
||||
if side in ["L", "R"]:
|
||||
pad_w = self.pad_w
|
||||
pad_h = (((self.CLB_H+self.CBX_H)*self.sizeY+1) +
|
||||
self.CBX_H)/self.NumOfPads
|
||||
shift = (number*pad_h)
|
||||
initialshitX = (self.CLB_GRID_Y - self.CBX_H-(self.CLB_H*0.5))
|
||||
initialshitY = (self.CLB_GRID_X - self.CBY_W-(self.CLB_W*0.5))
|
||||
pad_spacing = 24
|
||||
if side == "L":
|
||||
pad_x = CoreMinX - (pad_w*0.5) - pad_spacing
|
||||
pad_y = initialshitX + shift + pad_h*0.5
|
||||
pad_llx = pad_x - (pad_w*0.5)
|
||||
pad_lly = pad_y - (pad_h*0.5)
|
||||
pad_w, pad_h = pad_w, pad_h
|
||||
rot = 0
|
||||
t = 0.5
|
||||
elif side == "R":
|
||||
pad_x = CoreMaxX + (pad_w*0.5) + pad_spacing
|
||||
pad_y = initialshitX + shift + pad_h*0.5
|
||||
pad_llx = pad_x - (pad_w*0.5)
|
||||
pad_lly = pad_y - (pad_h*0.5)
|
||||
pad_w, pad_h = pad_w, pad_h
|
||||
rot = 0
|
||||
t = 0.5
|
||||
else:
|
||||
pad_w = (((self.CLB_W+self.CBY_W)*self.sizeX+1) +
|
||||
self.CBY_W)/self.NumOfPads
|
||||
pad_h = self.pad_h
|
||||
shift = (number*pad_w)
|
||||
initialshitY = (self.CLB_GRID_X - self.CBY_W-(self.CLB_W*0.5))
|
||||
pad_spacing = 3
|
||||
if side == "T":
|
||||
pad_x = initialshitY + shift + pad_w*0.5
|
||||
pad_y = CoreMaxY + pad_spacing + pad_h*0.5
|
||||
pad_llx = pad_x - (0.5*pad_w)
|
||||
pad_lly = pad_y - pad_h*0.5
|
||||
pad_w, pad_h = pad_w, pad_h
|
||||
rot = 90
|
||||
t = 0.5
|
||||
elif side == "B":
|
||||
pad_x = initialshitY + shift + pad_w*0.5
|
||||
pad_y = CoreMinY - pad_spacing - pad_h*0.5
|
||||
pad_llx = pad_x - (0.5*pad_w)
|
||||
pad_lly = pad_y - pad_h*0.5
|
||||
pad_w, pad_h = pad_w, pad_h
|
||||
rot = -90
|
||||
t = 0.5
|
||||
|
||||
self.GPIOPlacmentKey.append(
|
||||
{
|
||||
"side": side,
|
||||
"rot": rot,
|
||||
"text": padname.strip(),
|
||||
"shape": [(pad_llx, pad_lly, pad_w, pad_h)],
|
||||
"color": self.PAD_COLOR,
|
||||
"center": [pad_x, pad_y],
|
||||
}
|
||||
)
|
||||
|
||||
def moduleFmt(self, mod, X, Y):
|
||||
return f"{mod}_{X}__{Y}_"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -0,0 +1,2 @@
|
|||
|
||||
# from .fpga_grid_gen import fpga_grid_gen
|
|
@ -0,0 +1,30 @@
|
|||
<!-- Minimal VPR architecture file to demonstrate FPGA render script -->
|
||||
<architecture>
|
||||
<tiles>
|
||||
<tile name="io_top" capacity="6" area="0"/>
|
||||
<tile name="io_right" capacity="6" area="0"/>
|
||||
<tile name="io_bottom" capacity="6" area="0"/>
|
||||
<tile name="io_left" capacity="6" area="0"/>
|
||||
<tile name="clb" width="1" height="1" area="0"/>
|
||||
<tile name="dsp" width="1" height="2" area="0"/>
|
||||
<tile name="ram9k" width="2" height="2" area="0"/>
|
||||
</tiles>
|
||||
|
||||
<layout tileable="true" through_channel="true">
|
||||
<fixed_layout name="dp" width="8" height="8">
|
||||
<!--Perimeter of 'io' blocks with 'EMPTY' blocks at corners-->
|
||||
<row type="io_top" starty="H-1" priority="100"/>
|
||||
<row type="io_bottom" starty="0" priority="100"/>
|
||||
<col type="io_left" startx="0" priority="100"/>
|
||||
<col type="io_right" startx="W-1" priority="100"/>
|
||||
<corners type="EMPTY" priority="101"/>
|
||||
<!--Fill with 'clb'-->
|
||||
<fill type="clb" priority="10"/>
|
||||
<!--Row of DSP with 'EMPTY' blocks wherever a DSP does not fit. Vertical offset by 1 for perimeter.-->
|
||||
<row type="dsp" startx="1" starty="3" incrx="2" priority="20"/>
|
||||
<!--Row of RAM9K with 'EMPTY' blocks wherever a RAM9k does not fit. Vertical offset by 1 for perimeter.-->
|
||||
<!-- <row type="ram9k" startx="1" starty="4" repeaty="6" priority="20"/> -->
|
||||
<region type="ram9k" startx="1" endx="2" starty="1" endy="3" priority="20"/>
|
||||
</fixed_layout>
|
||||
</layout>
|
||||
</architecture>
|
|
@ -0,0 +1,241 @@
|
|||
# ##############################################################################
|
||||
# Tool: OpenFPGA-Physical
|
||||
# Script: generate_fabric_key.py
|
||||
# Description : This script cretes a fabric_key.xml file for give size of FPGA
|
||||
# Currently this script generate pattern which routes configuration chain
|
||||
# from right top corner to left bottom corner byt traversing horizontally
|
||||
# in every row of the FPGA grid
|
||||
################################################################################
|
||||
'''
|
||||
File Title
|
||||
'''
|
||||
import argparse
|
||||
import os
|
||||
import pickle
|
||||
import xml.etree.ElementTree as ET
|
||||
from pprint import pprint
|
||||
from xml.dom import minidom
|
||||
|
||||
import svgwrite
|
||||
from svgwrite.container import Group
|
||||
|
||||
from openfpga_physical import fpga_grid_gen
|
||||
|
||||
|
||||
def formatter(prog): return argparse.HelpFormatter(prog, max_help_position=60)
|
||||
|
||||
|
||||
# Mandatory arguments
|
||||
|
||||
def parse_argument():
|
||||
parser = argparse.ArgumentParser(formatter_class=formatter)
|
||||
parser.add_argument('--design_name')
|
||||
parser.add_argument('--arch_file', type=str)
|
||||
parser.add_argument('--layout', type=str)
|
||||
parser.add_argument('--show_gridIO', action="store_true")
|
||||
parser.add_argument('--out_file', type=str, default="fabric_key.xml")
|
||||
parser.add_argument('--release_root', type=str, default="release")
|
||||
parser.add_argument('--pattern_type', type=str,
|
||||
choices=['single', 'vertical', 'horizontal'],
|
||||
default="horizontal")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def print_grid(grid):
|
||||
"""
|
||||
Prints the 2D FPGA grid on console
|
||||
"""
|
||||
for row in grid[::-1]:
|
||||
for y in row:
|
||||
print(f"{y:^10}", end=" ")
|
||||
print("")
|
||||
|
||||
|
||||
def _get_val(root, param, default, vars={}):
|
||||
"""
|
||||
Parses the startx, starty, repeatx and repeaty variables to integer
|
||||
"""
|
||||
val = root.attrib.get(param, str(default))
|
||||
if val.isnumeric():
|
||||
return int(val)
|
||||
else:
|
||||
val = val.replace("W", "{W}")
|
||||
val = val.replace("H", "{H}")
|
||||
return eval(val.format(**vars))
|
||||
|
||||
|
||||
def _set_value(grid, x, y, value):
|
||||
"""
|
||||
Sets value in the FPGA grid
|
||||
"""
|
||||
try:
|
||||
grid[y][x] = value
|
||||
return 1
|
||||
except:
|
||||
return 0
|
||||
|
||||
|
||||
def enumerate_grid(root, layout, width, height):
|
||||
'''
|
||||
Enumerate FPGA grid
|
||||
'''
|
||||
block_grid = [[0 for x in range(width)] for y in range(height)]
|
||||
# return block_grid
|
||||
for each in sorted(layout, key=lambda x: int(x.attrib["priority"])):
|
||||
tag = each.tag
|
||||
ele_type = each.attrib["type"]
|
||||
ele = root.find(f".//tile[@name='{ele_type}']")
|
||||
ele_w, ele_h = (int(ele.attrib.get("width", 1)),
|
||||
int(ele.attrib.get("height", 1))) if ele else (1, 1)
|
||||
|
||||
vars = {"W": width, "H": height}
|
||||
startx = _get_val(each, "startx", 0, vars)
|
||||
starty = _get_val(each, "starty", 0, vars)
|
||||
repx = _get_val(each, "repeatx", ele_w if tag ==
|
||||
"row" else width, vars)
|
||||
repy = _get_val(each, "repeaty", ele_h if tag ==
|
||||
"col" else height, vars)
|
||||
repx, repy = (ele_w, ele_h) if tag == "fill" else (repx, repy)
|
||||
repx, repy = (width-1, height-1) if tag == "corners" else (repx, repy)
|
||||
|
||||
for x in range(startx, width, repx):
|
||||
for y in range(starty, height, repy):
|
||||
_set_value(block_grid, x, y, ele_type)
|
||||
for i in range(1, ele_w):
|
||||
_set_value(block_grid, x+1, y, "")
|
||||
for i in range(1, ele_h):
|
||||
_set_value(block_grid, x, y+1, "")
|
||||
return block_grid
|
||||
|
||||
|
||||
def create_vertical_cc(fpga, key):
|
||||
grid = fpga.grid
|
||||
width = len(grid[0])
|
||||
height = len(grid)
|
||||
print(f"Creating {width-2} configuration chains")
|
||||
start = 0
|
||||
for x in range(1,width):
|
||||
ord_mod = []
|
||||
# These are bottom up connections
|
||||
for y in range(0, height):
|
||||
if y < (height-1):
|
||||
ord_mod.append(f"sb_{x-1}__{y}_")
|
||||
if y < (height-2):
|
||||
if not grid[y+1][x] in [fpga.RIGHT_ARROW, fpga.UP_ARROW]:
|
||||
ord_mod.append(f"cby_{x-1}__{y+1}_")
|
||||
if (x == 1):
|
||||
label = grid[y+1][x-1]
|
||||
ord_mod.append(f"grid_{label}_left_{x-1}__{y+1}_")
|
||||
# These are top down connections
|
||||
for y in range(height-1,-1,-1):
|
||||
if y < (height-1):
|
||||
ord_mod.append(f"cbx_{x}__{y}_")
|
||||
# if x == (width-2):
|
||||
# ord_mod.append(f"sb_{x}__{y}_")
|
||||
# if y > 0:
|
||||
# label = grid[y][x+1]
|
||||
# ord_mod.append(f"cby_{x}__{y}_")
|
||||
# ord_mod.append(f"grid_{label}_right_{x+1}__{y}_")
|
||||
if y == (height-1):
|
||||
label = grid[y][x]
|
||||
ord_mod.append(f"grid_{label}_top_{x}__{y}_")
|
||||
if y < (height-1) and (y > 0):
|
||||
label = grid[y][x]
|
||||
if not label in [fpga.RIGHT_ARROW, fpga.UP_ARROW, "EMPTY"]:
|
||||
label = f"grid_{label}"
|
||||
ord_mod.append(f"{label}_{x}__{y}_")
|
||||
label = grid[y][x]
|
||||
ord_mod.append(f"grid_{label}_bottom_{x}__0_")
|
||||
|
||||
# EAdd last chain
|
||||
if x == width-1:
|
||||
ord_mod = []
|
||||
ord_mod.append(f"sb_{width-2}__0_")
|
||||
for y in range(1, height-1):
|
||||
ord_mod.append(f"cby_{width-2}__{y}_")
|
||||
ord_mod.append(f"grid_{grid[y][width-1]}_right_{width-1}__{y}_")
|
||||
ord_mod.append(f"sb_{width-2}__{y}_")
|
||||
|
||||
region = ET.SubElement(key, "region", {'id': str(x-1)})
|
||||
for i, each in enumerate(ord_mod):
|
||||
ET.SubElement(region, 'key', {'id': str(start+i), 'alias': each})
|
||||
start += i+1
|
||||
|
||||
|
||||
def save_fabric_key(key, filename):
|
||||
with open(filename, "w") as fp:
|
||||
rough_string = ET.tostring(key, 'utf-8')
|
||||
reparsed = minidom.parseString(rough_string)
|
||||
fp.write(reparsed.toprettyxml(indent=" "))
|
||||
|
||||
|
||||
def main():
|
||||
'''
|
||||
Main method to execute function
|
||||
'''
|
||||
# Parse architecture file and get layput block
|
||||
args = parse_argument()
|
||||
arch = ET.parse(args.arch_file)
|
||||
root = arch.getroot()
|
||||
layout = root.find(f".//fixed_layout[@name='{args.layout}']")
|
||||
assert layout, "Specified layput not found in the architecture file"
|
||||
# Extract height and width paramter and create grid layout
|
||||
width = int(layout.attrib["width"])
|
||||
height = int(layout.attrib["height"])
|
||||
|
||||
# fpga_grid = enumerate_grid(root, layout, width, height)
|
||||
|
||||
grid = fpga_grid_gen(args.design_name, args.arch_file, args.layout, "")
|
||||
grid.enumerate_grid()
|
||||
grid.print_grid()
|
||||
key = ET.Element('fabric_key')
|
||||
if args.pattern_type == "vertical":
|
||||
create_vertical_cc(grid, key)
|
||||
save_fabric_key(key, args.out_file)
|
||||
RenderSVG(key, args.design_name, args.release_root)
|
||||
|
||||
|
||||
def getGroup(dwg, id):
|
||||
for ele in dwg.elements:
|
||||
if ele.get_id() == id:
|
||||
return ele
|
||||
|
||||
|
||||
def RenderSVG(FKey, DESIGN_NAME, SaveLocation, show_gridIO=False):
|
||||
PlacementDBKey = pickle.load(open(os.path.join(SaveLocation, "pickle",
|
||||
f"{DESIGN_NAME}_PlacementDBKey.pickle"), 'rb'))
|
||||
FPGAShapeVars = pickle.load(open(os.path.join(SaveLocation, "pickle",
|
||||
f"{DESIGN_NAME}_FPGAShapeVars.pickle"), 'rb'))
|
||||
dwg = pickle.load(open(os.path.join(SaveLocation, "pickle",
|
||||
f"{DESIGN_NAME}_dwg.pickle"), 'rb'))
|
||||
ccffSVGPath = os.path.join(
|
||||
SaveLocation, "SVG", DESIGN_NAME + '_CCFF_Chain.svg')
|
||||
|
||||
DRMarker = dwg.marker(refX="30", refY="30",
|
||||
viewBox="0 0 120 120",
|
||||
markerUnits="strokeWidth",
|
||||
markerWidth="8", markerHeight="10", orient="auto")
|
||||
DRMarker.add(dwg.path(d="M 0 0 L 60 30 L 0 60 z", fill="blue"))
|
||||
dwg.defs.add(DRMarker)
|
||||
|
||||
# Add Content
|
||||
dwgMain = getGroup(dwg, "main")
|
||||
dwgChain = dwgMain.add(Group(id="ffChains"))
|
||||
for region in FKey.iter("region"):
|
||||
CCFFChainCenter = []
|
||||
for eachEle in region:
|
||||
module = eachEle.attrib["alias"]
|
||||
if ("io" in module) and (not show_gridIO):
|
||||
continue
|
||||
C = PlacementDBKey[module]["center"]
|
||||
CCFFChainCenter.append(str(C[0]*FPGAShapeVars['CPP']))
|
||||
CCFFChainCenter.append(str(C[1]*FPGAShapeVars['SC_HEIGHT']))
|
||||
dwgChain.add(dwg.path(stroke="red", fill="none", stroke_width=3,
|
||||
marker_mid=DRMarker.get_funciri(),
|
||||
d=f"M{CCFFChainCenter[0]} {CCFFChainCenter[1]} " + " ".join(CCFFChainCenter)))
|
||||
dwg.saveas(ccffSVGPath, pretty=True, indent=4)
|
||||
print(f"SVG file saved as {ccffSVGPath}")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,293 @@
|
|||
# ##############################################################################
|
||||
# Tool: OpenFPGA-Physical
|
||||
# Script: fpga_grid_gen.py
|
||||
################################################################################
|
||||
'''
|
||||
fpga_grid_gen
|
||||
-------------
|
||||
|
||||
This scripts read the layout section of the VPR architecture file and
|
||||
create a 2D matrix of the FPGA grid.
|
||||
'''
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import xml.etree.ElementTree as ET
|
||||
from spydrnet_physical.util.openfpga_arch import OpenFPGA_Arch
|
||||
|
||||
logger = logging.getLogger('spydrnet_logs')
|
||||
def formatter(prog): return argparse.HelpFormatter(prog, max_help_position=60)
|
||||
|
||||
|
||||
help_msg = {
|
||||
"design_name": "Design name, Generally in FPGAxxxx_xxxx format"
|
||||
}
|
||||
|
||||
|
||||
UP_ARROW = chr(8593)
|
||||
RIGHT_ARROW = chr(8594)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Execute when this file called as a script
|
||||
"""
|
||||
args = parse_argument()
|
||||
grid = fpga_grid_gen(args.design_name, args.arch_file,
|
||||
args.layout, args.release_root)
|
||||
grid.enumerate_grid()
|
||||
grid.print_grid()
|
||||
|
||||
|
||||
def parse_argument() -> argparse.Namespace:
|
||||
f"""
|
||||
Parse commnad line arguement
|
||||
{help_msg['design_name']}
|
||||
"""
|
||||
parser = argparse.ArgumentParser(formatter_class=formatter)
|
||||
parser.add_argument('--design_name',
|
||||
help="Design name, Generally in FPGAxxxx_xxxx format")
|
||||
parser.add_argument('--arch_file', type=str,
|
||||
help="VPR architecture file, It should atleast contain on fixed_layout")
|
||||
parser.add_argument('--layout', type=str, default=None,
|
||||
help="Specific layout name to render from the provided XML file")
|
||||
parser.add_argument('--release_root', type=str, default=None,
|
||||
help="Location to store pickled object of the 2D FPGA grid matrix")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
class fpga_grid_gen():
|
||||
'''
|
||||
This class generates the 2D matrix of the FPGA grid.
|
||||
|
||||
**Example**:
|
||||
|
||||
python3.8 fpga_grid_gen.py **--design_name** FPGA66_flex
|
||||
**--layout** dp
|
||||
**--arch_file** example_files/vpr_arch_render_demo.xml
|
||||
|
||||
**Expected Output**:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
EMPTY io_top io_top io_top io_top io_top io_top EMPTY
|
||||
io_left clb clb clb clb clb clb io_right
|
||||
io_left clb clb clb clb clb clb io_right
|
||||
io_left ram9k ram9k ram9k io_right
|
||||
io_left clb clb clb clb clb clb io_right
|
||||
io_left dsp dsp dsp io_right
|
||||
io_left clb clb clb clb clb clb io_right
|
||||
EMPTY io_bottom io_bottom io_bottom io_bottom io_bottom io_bottom EMPTY
|
||||
|
||||
'''
|
||||
|
||||
def __init__(self, design_name, arch_file, layout, release_root) -> None:
|
||||
'''
|
||||
args:
|
||||
design_name (str): Design name
|
||||
arch_file (str): Path to architecture file
|
||||
layout (str): Fixed layout selection from architecture file
|
||||
release_root (str): Directory to output bianries
|
||||
'''
|
||||
self.design_name = design_name
|
||||
self.release_root = release_root
|
||||
self.fpga_arch = OpenFPGA_Arch(arch_file, None, layout=layout)
|
||||
# Parse values
|
||||
self.clb = None
|
||||
self.arch_tree = ET.parse(arch_file)
|
||||
self.root = self.arch_tree.getroot()
|
||||
self.layout = self.root.find(f".//fixed_layout[@name='{layout}']")
|
||||
assert layout, "Specified layout not found in the architecture file"
|
||||
self.width = self.fpga_arch.width
|
||||
self.height = self.fpga_arch.height
|
||||
self.pb_type = {}
|
||||
self.grid = [[0 for x in range(self.width)]
|
||||
for y in range(self.height)]
|
||||
self.RIGHT_ARROW = RIGHT_ARROW
|
||||
self.UP_ARROW = UP_ARROW
|
||||
|
||||
def get_width(self):
|
||||
''' Get width of FPGA '''
|
||||
return self.width-2
|
||||
|
||||
def get_height(self):
|
||||
''' Get height of FPGA '''
|
||||
return self.height-2
|
||||
|
||||
def get_block_size(self, block):
|
||||
''' Get width of FPGA '''
|
||||
return self.pb_type[block]
|
||||
|
||||
def print_grid(self):
|
||||
"""
|
||||
Prints the 2D FPGA grid on console
|
||||
|
||||
"""
|
||||
for row in self.grid[::-1]:
|
||||
for y in row:
|
||||
print(f"{y:^10}", end=" ")
|
||||
print("")
|
||||
|
||||
def get_block(self, x, y):
|
||||
'''
|
||||
This method returns the module present in specific x and y
|
||||
cordinate. The return value contains module name and
|
||||
adjusted X and Y cordianates
|
||||
'''
|
||||
value = self.grid[y][x]
|
||||
while value in [self.RIGHT_ARROW, self.UP_ARROW]:
|
||||
if value == self.UP_ARROW:
|
||||
y -= 1
|
||||
if value == self.RIGHT_ARROW:
|
||||
x -= 1
|
||||
if x < 1 and y < 1:
|
||||
break
|
||||
value = self.grid[y][x]
|
||||
return value, x, y
|
||||
|
||||
@staticmethod
|
||||
def _get_val(ele, param, default, vars={}):
|
||||
"""
|
||||
Parses the startx, starty, repeatx and repeaty variables to integer
|
||||
"""
|
||||
val = ele.attrib.get(param, str(default))
|
||||
if val.isnumeric():
|
||||
return int(val)
|
||||
else:
|
||||
val = val.replace("W", "{W}")
|
||||
val = val.replace("H", "{H}")
|
||||
return int(eval(val.format(**vars)))
|
||||
|
||||
def _set_value(self, x, y, value, width=1, height=1):
|
||||
"""
|
||||
Sets value in the FPGA grid
|
||||
"""
|
||||
try:
|
||||
for xi in range(0, width):
|
||||
for yi in range(0, height):
|
||||
self.grid[y+yi][x+xi] = value if(xi, yi) == (0, 0) else \
|
||||
RIGHT_ARROW if yi == 0 else UP_ARROW
|
||||
return 1
|
||||
except:
|
||||
logger.warning(f"Trying to set grid location {(x, y)}")
|
||||
return 0
|
||||
|
||||
def add_fill(self, ele):
|
||||
ele_type = ele.attrib['type']
|
||||
self.clb = ele_type
|
||||
ele_w, ele_h = self.fpga_arch.tiles[ele_type]
|
||||
for x in range(0, self.width, ele_w):
|
||||
for y in range(0, self.height, ele_h):
|
||||
self._set_value(x, y, ele_type, ele_w, ele_h)
|
||||
|
||||
def add_perimeter(self, ele):
|
||||
ele_type = ele.attrib['type']
|
||||
ele_w, ele_h = self.fpga_arch.tiles[ele_type]
|
||||
|
||||
for y in [0, self.fpga_arch.height-1]:
|
||||
for x in range(0, self.fpga_arch.width):
|
||||
self._set_value(x, y, ele_type, ele_w, ele_h)
|
||||
for x in [0, self.fpga_arch.width-1]:
|
||||
for y in range(0, self.fpga_arch.height):
|
||||
self._set_value(x, y, ele_type, ele_w, ele_h)
|
||||
|
||||
def add_corners(self, ele):
|
||||
ele_type = ele.attrib['type']
|
||||
ele_w, ele_h = self.fpga_arch.tiles[ele_type]
|
||||
self._set_value(0, 0, ele_type)
|
||||
self._set_value(0, self.height-1, ele_type)
|
||||
self._set_value(self.width-1, 0, ele_type)
|
||||
self._set_value(self.width-1, self.height-1, ele_type, ele_w, ele_h)
|
||||
|
||||
def add_single(self, ele):
|
||||
ele_type = ele.attrib['type']
|
||||
ele_w, ele_h = self.fpga_arch.tiles[ele_type]
|
||||
x = int(ele.attrib['x'])
|
||||
y = int(ele.attrib['y'])
|
||||
self._set_value(x-1, y-1, ele_type, ele_w, ele_h)
|
||||
|
||||
def add_row(self, ele):
|
||||
ele_type = ele.attrib['type']
|
||||
ele_w, ele_h = self.fpga_arch.tiles[ele_type]
|
||||
var = {'w': ele_w, 'h': ele_h,
|
||||
'W': self.fpga_arch.width, 'H': self.fpga_arch.height}
|
||||
startx = self._get_val(ele, 'startx', ele_w, var)
|
||||
incrx = self._get_val(ele, 'incrx', ele_w, var)
|
||||
starty = self._get_val(ele, 'starty', 1, var)
|
||||
repy = self._get_val(ele, 'repeaty', self.fpga_arch.height, var)
|
||||
for x in range(startx, self.width, incrx):
|
||||
for y in range(starty, self.height, repy):
|
||||
self._set_value(x, y, ele_type, ele_w, ele_h)
|
||||
|
||||
def add_col(self, ele):
|
||||
ele_type = ele.attrib['type']
|
||||
ele_w, ele_h = self.fpga_arch.tiles[ele_type]
|
||||
var = {'w': ele_w, 'h': ele_h,
|
||||
'W': self.fpga_arch.width, 'H': self.fpga_arch.height}
|
||||
startx = self._get_val(ele, 'startx', 0, var)
|
||||
repeatx = self._get_val(ele, 'repeatx', self.fpga_arch.width, var)
|
||||
starty = self._get_val(ele, 'starty', 1, var)
|
||||
incry = self._get_val(ele, 'incry', ele_h, var)
|
||||
for x in range(startx, self.width, repeatx):
|
||||
for y in range(starty, self.height, incry):
|
||||
self._set_value(x, y, ele_type, ele_w, ele_h)
|
||||
|
||||
def add_region(self, ele):
|
||||
ele_type = ele.attrib['type']
|
||||
ele_w, ele_h = self.fpga_arch.tiles[ele_type]
|
||||
var = {'w': ele_w, 'h': ele_h,
|
||||
'W': self.fpga_arch.width, 'H': self.fpga_arch.height}
|
||||
startx = self._get_val(ele, "startx", 0, var)
|
||||
endx = self._get_val(ele, "endx", self.fpga_arch.width, var)
|
||||
incrx = self._get_val(ele, "incrx", ele_w, var)
|
||||
repeatx = self._get_val(ele, "repeatx", self.fpga_arch.width, var)
|
||||
starty = self._get_val(ele, "starty", 0, var)
|
||||
endy = self._get_val(ele, "endy", self.fpga_arch.height, var)
|
||||
incry = self._get_val(ele, "incry", ele_h, var)
|
||||
repeaty = self._get_val(ele, "repeaty", self.fpga_arch.height, var)
|
||||
|
||||
for xstep in range(0, self.width, repeatx):
|
||||
for ystep in range(0, self.height, repeaty):
|
||||
for x in range(startx, endx, incrx):
|
||||
for y in range(starty, endy, incry):
|
||||
self._set_value(xstep+x, ystep+y,
|
||||
ele_type, ele_w, ele_h)
|
||||
|
||||
def enumerate_grid(self):
|
||||
'''
|
||||
Enumerates the FPGA grid
|
||||
|
||||
Returns:
|
||||
(list(list(str))): Returns 2D grid
|
||||
'''
|
||||
for element in sorted(self.layout, key=lambda x: int(x.attrib["priority"])):
|
||||
tag = element.tag.lower()
|
||||
ele_type = element.attrib["type"].lower()
|
||||
if tag == "fill":
|
||||
logger.debug("Adding Fill")
|
||||
self.add_fill(element)
|
||||
elif tag == "corners":
|
||||
logger.debug("Adding Corners")
|
||||
self.add_corners(element)
|
||||
elif tag == "single":
|
||||
logger.debug("Adding Single")
|
||||
self.add_single(element)
|
||||
elif tag == "perimeter":
|
||||
logger.debug("Adding Perimeter")
|
||||
self.add_perimeter(element)
|
||||
elif tag == "row":
|
||||
logger.debug("Adding Row")
|
||||
self.add_row(element)
|
||||
elif tag == "col":
|
||||
logger.debug("Adding Col")
|
||||
self.add_col(element)
|
||||
elif tag == "region":
|
||||
logger.debug("Adding region")
|
||||
self.add_region(element)
|
||||
else:
|
||||
continue
|
||||
return self.grid
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,161 @@
|
|||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import yaml
|
||||
import logging
|
||||
import xml.etree.ElementTree as ET
|
||||
from itertools import chain
|
||||
from pprint import pprint
|
||||
|
||||
logger = logging.getLogger('Bitstream conversion')
|
||||
|
||||
#PROJ_NAME = os.environ.get('PROJ_NAME')
|
||||
|
||||
|
||||
def formatter(prog): return argparse.HelpFormatter(prog, max_help_position=60)
|
||||
|
||||
|
||||
def get_module_of_instance(mapping, instance):
|
||||
for module, instance_list in mapping.items():
|
||||
if instance in instance_list:
|
||||
return module
|
||||
return None
|
||||
|
||||
|
||||
def get_ccff_paths(module, ccff_path_directory):
|
||||
filename = ccff_path_directory.format(module)
|
||||
with open(filename, "r") as stream:
|
||||
try:
|
||||
bitstream = yaml.safe_load(stream)
|
||||
except yaml.YAMLError as exc:
|
||||
print(exc)
|
||||
return bitstream
|
||||
|
||||
|
||||
def restructure_bitstream(original_bitstream, original_bitstream_distribution, instance_mapping, ccff_path_directory, output_bitstream_xml, fabric_name):
|
||||
print(f"-> original_bitstream {original_bitstream}")
|
||||
print(f"-> instance_mapping {instance_mapping}")
|
||||
print(f"-> ccff_path_directory {ccff_path_directory}")
|
||||
|
||||
with open(instance_mapping, "r") as fp:
|
||||
instance_map = json.load(fp)
|
||||
|
||||
bitstream_tree = ET.parse(original_bitstream)
|
||||
bitstream = bitstream_tree.getroot()
|
||||
|
||||
tree = ET.parse(original_bitstream_distribution)
|
||||
bitstream_dist = tree.getroot()
|
||||
|
||||
with open(ccff_path_directory.format("fpga_top"), "r") as stream:
|
||||
top_ccff = yaml.safe_load(stream)
|
||||
|
||||
validate_bitstream_length(instance_map, bitstream_dist, ccff_path_directory, fabric_name)
|
||||
|
||||
# Iterate over each region in fabric key
|
||||
for each_region, instances in top_ccff.items():
|
||||
region_id = each_region.split("_")[-1]
|
||||
bitvalues = bitstream.findall(f'.//region[@id="{region_id}"]/*')
|
||||
bit_number = 0
|
||||
# Iterate over each module in fabric region
|
||||
for instance, head_port in instances:
|
||||
module = get_module_of_instance(instance_map, instance)
|
||||
or_bit = bitstream_dist.findall(f'.//*block[@name="{instance}"]')[0]
|
||||
if module is None:
|
||||
logger.debug(f"skipping {instance}")
|
||||
continue # Skip if module is merged already
|
||||
paths = get_ccff_paths(module, ccff_path_directory)[head_port]
|
||||
for line in paths:
|
||||
# replace slash in the line with dot
|
||||
line_dot = line.replace('/', '.')
|
||||
line_dot = line_dot[:-1] + "Q"
|
||||
# print(bit_number, f"fpga_top/fpga_core_inst/{instance}/{line}")
|
||||
bitvalues[-1*(bit_number+1)].attrib["new_path"] = f"fpga_top.fpga_core_inst.{instance}.{line_dot}"
|
||||
bitvalues[-1*(bit_number+1)].attrib["id"] = str(bit_number)
|
||||
bit_number += 1
|
||||
# print(f"module {module:10} bitno={bit_number:<10} paths={len(paths): 4}")
|
||||
print(f">>>>> Region {region_id:4} TotalBits {bit_number:5} /" +
|
||||
f" {len(bitvalues):5} = {(bit_number-len(bitvalues)):3}")
|
||||
bitstream_tree.write(output_bitstream_xml)
|
||||
|
||||
|
||||
def validate_bitstream_length(instance_map, bitstream_dist, ccff_path_directory, fabric_name):
|
||||
seen = []
|
||||
bitmap = {
|
||||
"cby_0__1_": ["grid_io_left_left_0__1_"],
|
||||
"cby_8__1_": ["grid_io_right_right_9__1_"],
|
||||
"cbx_1__0_": ["grid_io_bottom_bottom_1__0_"],
|
||||
"cbx_1__8_": ["grid_io_top_top_1__9_"],
|
||||
"cby_0__6_": ["sb_0__5_", "sb_0__6_", "grid_io_left_left_0__1_"],
|
||||
"cby_8__6_": ["sb_8__5_", "sb_8__6_", "grid_io_right_right_9__1_"],
|
||||
"cby_0__3_": ["sb_0__2_", "sb_0__3_", "grid_io_left_left_0__1_"],
|
||||
"cby_8__3_": ["sb_8__2_", "sb_8__3_", "grid_io_right_right_9__1_"],
|
||||
"cby_2__3_": ["sb_6__2_", "sb_6__3_"],
|
||||
"cby_2__6_": ["sb_6__5_", "sb_6__6_"],
|
||||
"grid_ram9k": ["cbx_1__2_", "cbx_1__3_",
|
||||
"sb_1__2_", "sb_1__3_",
|
||||
"cbx_2__2_", "cbx_2__3_"],
|
||||
"grid_dsp": ["cbx_1__5_", "cbx_1__6_",
|
||||
"sb_1__5_", "sb_1__6_",
|
||||
"cbx_2__5_", "cbx_2__6_"]
|
||||
}
|
||||
if fabric_name == "FPGA25x24_flex4k":
|
||||
bitmap = {
|
||||
"cby_0__1_": ["grid_io_left_left_0__1_"],
|
||||
"cby_8__1_": ["grid_io_right_right_25__1_"],
|
||||
"cbx_1__0_": ["grid_io_bottom_bottom_1__0_"],
|
||||
"cbx_1__8_": ["grid_io_top_top_1__26_"],
|
||||
"cby_0__6_": ["sb_0__12_", "sb_0__13_", "grid_io_left_left_0__1_"],
|
||||
"cby_8__6_": ["sb_24__12_", "sb_24__13_", "grid_io_right_right_25__1_"],
|
||||
"cby_0__3_": ["sb_0__2_", "sb_0__3_", "grid_io_left_left_0__1_"],
|
||||
"cby_8__3_": ["sb_24__2_", "sb_24__3_", "grid_io_right_right_25__1_"],
|
||||
"cby_2__3_": ["sb_6__2_", "sb_6__3_"],
|
||||
"cby_2__6_": ["sb_6__12_", "sb_6__13_"],
|
||||
"grid_ram9k": ["cbx_1__2_", "cbx_1__3_",
|
||||
"sb_1__2_", "sb_1__3_",
|
||||
"cbx_2__2_", "cbx_2__3_"],
|
||||
"grid_dsp": ["cbx_1__12_", "cbx_1__13_",
|
||||
"sb_1__12_", "sb_1__13_",
|
||||
"cbx_2__12_", "cbx_2__13_"]
|
||||
}
|
||||
|
||||
print("{:15} {:7} {:8} {:4}".format(
|
||||
"Modules", "expected", "obtained", "diff"))
|
||||
for module, instances in instance_map.items():
|
||||
bit_cnt = 0
|
||||
for m in bitmap.get(module, []) + [instances[0], ]:
|
||||
try:
|
||||
or_bit = bitstream_dist.findall(
|
||||
f'.//*block[@name="{m}"]')[0]
|
||||
except IndexError:
|
||||
print(f"{m} not found")
|
||||
bit_cnt += int(or_bit.attrib['number_of_bits'])
|
||||
|
||||
# CCFF Paths
|
||||
filename = ccff_path_directory.format(module)
|
||||
bitstream = yaml.safe_load(open(filename, "r"))
|
||||
path_cnt = sum([len(i) for i in bitstream.values()])
|
||||
|
||||
print(f"{module:15} {bit_cnt:8} {path_cnt:8} {(bit_cnt-path_cnt):4}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
parser = argparse.ArgumentParser(formatter_class=formatter)
|
||||
|
||||
# Mandatory arguments
|
||||
parser.add_argument('--original_bitstream', type=str,
|
||||
required=True)
|
||||
parser.add_argument('--instance_mapping', type=str,
|
||||
required=True)
|
||||
parser.add_argument('--ccff_path_directory', type=str,
|
||||
required=True)
|
||||
parser.add_argument('--original_bitstream_distribution', type=str,
|
||||
required=True)
|
||||
parser.add_argument('--output_bitstream_xml', type=str,
|
||||
required=True)
|
||||
parser.add_argument('--fabric_name', type=str,
|
||||
default=os.environ.get('PROJ_NAME'))
|
||||
args = parser.parse_args()
|
||||
|
||||
restructure_bitstream(args.original_bitstream, args.original_bitstream_distribution, args.instance_mapping, args.ccff_path_directory, args.output_bitstream_xml, args.fabric_name)
|
|
@ -0,0 +1,19 @@
|
|||
from setuptools import setup, find_packages
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
setup(
|
||||
name="openfpga-physical",
|
||||
version="0.0.1",
|
||||
maintainer="Ganesh Gore",
|
||||
maintainer_email="ganesh.gore@utah.edu",
|
||||
author="ganesh.gore@utah.edu",
|
||||
author_email="ganesh.gore@utah.edu",
|
||||
packages=find_packages(),
|
||||
install_requires=['numpy >= 1.22.2'],
|
||||
python_requires='>=3.8',
|
||||
entry_points={
|
||||
'console_scripts': [],
|
||||
},
|
||||
)
|
||||
a
|
|
@ -0,0 +1,63 @@
|
|||
import logging
|
||||
import os
|
||||
import pickle
|
||||
from glob import glob
|
||||
from pathlib import Path
|
||||
|
||||
import spydrnet as sdn
|
||||
from spydrnet_physical.util import FPGAGridGen
|
||||
|
||||
logger = logging.getLogger("spydrnet_logs")
|
||||
|
||||
PROJ_NAME = os.environ["PROJ_NAME"]
|
||||
RELEASE_DIR = os.environ["RELEASE_DIRECTORY"]
|
||||
LAYOUT = os.environ["LAYOUT"]
|
||||
TASK_DIR_NAME = os.environ["TASK_DIR_NAME"]
|
||||
SVG_DIR = f"{RELEASE_DIR}/rpts/SVG"
|
||||
PICKLE_DIR = f"{RELEASE_DIR}/rpts/pickle"
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Main rednering script
|
||||
"""
|
||||
try:
|
||||
VPR_ARCH_FILE = glob((f"{TASK_DIR_NAME}/arch/*vpr*"))[0]
|
||||
except IndexError:
|
||||
logger.exception("Architecture file not found ['%s/arch/*vpr*']", TASK_DIR_NAME)
|
||||
exit(1)
|
||||
# Demonstrates how to modify the structure
|
||||
fpga = FPGAGridGen(
|
||||
design_name=PROJ_NAME,
|
||||
arch_file=VPR_ARCH_FILE,
|
||||
release_root=RELEASE_DIR,
|
||||
layout=LAYOUT,
|
||||
)
|
||||
|
||||
fpga.enumerate_grid()
|
||||
# fpga.default_parameters["cbx"][0] = 10 # uncomment to force square plan
|
||||
# fpga.default_parameters["cby"][1] = 10 # uncomment to force square plan
|
||||
Path(SVG_DIR).mkdir(parents=True, exist_ok=True)
|
||||
Path(PICKLE_DIR).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
dwg = fpga.render_layout(filename=f"{SVG_DIR}/{PROJ_NAME}_render.svg", grid_io=True)
|
||||
|
||||
dwg.save(pretty=True, indent=4)
|
||||
pickle.dump(dwg, open(f"{PICKLE_DIR}/{PROJ_NAME}_render.pickle", "wb"))
|
||||
logger.info("Saving file %s/%s_render.svg", SVG_DIR, PROJ_NAME)
|
||||
|
||||
# ============ Modify your floorplan here ============
|
||||
|
||||
# ====================== END =========================
|
||||
|
||||
dwg.saveas(
|
||||
filename=f"{SVG_DIR}/{PROJ_NAME}_restruct_render.svg", pretty=True, indent=4
|
||||
)
|
||||
pickle.dump(dwg, open(f"{PICKLE_DIR}/{PROJ_NAME}_restruct_render.pickle", "wb"))
|
||||
pickle.dump(fpga, open(f"{PICKLE_DIR}/{PROJ_NAME}_fpgagridgen.pickle", "wb"))
|
||||
logger.info("Saving file %s/%s_restruct_render.svg", SVG_DIR, PROJ_NAME)
|
||||
return dwg
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -0,0 +1,7 @@
|
|||
pandas
|
||||
numpy
|
||||
envyaml
|
||||
matplotlib
|
||||
svgwrite
|
||||
spydrnet
|
||||
git+https://github.com/ganeshgore/spydrnet-physical.git
|
Loading…
Reference in New Issue