Merge pull request #266 from antmicro/86-cell_cross_index
86 cell cross index
This commit is contained in:
commit
f77cbc2dff
|
@ -31,6 +31,11 @@ conda:
|
|||
submodules:
|
||||
include:
|
||||
- libraries/sky130_fd_io/latest
|
||||
- libraries/sky130_fd_sc_hd/latest
|
||||
- libraries/sky130_fd_sc_hdll/latest
|
||||
- libraries/sky130_fd_sc_hs/latest
|
||||
- libraries/sky130_fd_sc_ls/latest
|
||||
- libraries/sky130_fd_sc_ms/latest
|
||||
recursive: false
|
||||
|
||||
formats:
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
../../scripts/python-skywater-pdk/skywater_pdk/
|
|
@ -31,9 +31,9 @@
|
|||
import docutils
|
||||
import os
|
||||
import re
|
||||
# import sys
|
||||
import sys
|
||||
# sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
sys.path.insert(0, os.path.abspath('./_ext'))
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
|
||||
|
@ -67,6 +67,8 @@ extensions = [
|
|||
'sphinx.ext.todo',
|
||||
'sphinxcontrib_hdl_diagrams',
|
||||
'sphinxcontrib.bibtex',
|
||||
'skywater_pdk.cells.cross_index',
|
||||
'skywater_pdk.cells.generate.readme',
|
||||
]
|
||||
|
||||
bibtex_default_style = 'plain'
|
||||
|
@ -410,3 +412,6 @@ def setup(app):
|
|||
app.add_role('lib', lib_role)
|
||||
app.add_role('cell', cell_role)
|
||||
app.add_role('model', cell_role)
|
||||
|
||||
app.emit("cells_generate_readme", 'contents/libraries/*/cells/*')
|
||||
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
.. cross_index:: libraries/*
|
|
@ -146,3 +146,8 @@ The SKY130 currently offers two :lib_type:`build space` libraries. Build space l
|
|||
|
||||
libraries/sky130_ef_io/README
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:name: Cells in libraries cross-index
|
||||
|
||||
cell-index
|
||||
|
|
|
@ -0,0 +1,307 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2020 SkyWater PDK Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import pprint
|
||||
import sys
|
||||
import textwrap
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import Directive
|
||||
from docutils.statemachine import ViewList
|
||||
from sphinx.util.nodes import nested_parse_with_titles
|
||||
|
||||
from typing import Tuple, List, Dict
|
||||
|
||||
verbose = False
|
||||
|
||||
# using a list-table here to allow for easier line breaks in description
|
||||
rst_header_line_char = '-'
|
||||
rst_header = 'Cells in libraries cross-index'
|
||||
rst_template ="""\
|
||||
{header_line}
|
||||
{header_underline}
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
* - Cell name
|
||||
- {lib_suffixes}
|
||||
- Number of libraries
|
||||
{cell_list}
|
||||
"""
|
||||
|
||||
cell_template = """\
|
||||
* - {cell_name}
|
||||
- {lib_suffixes_match}
|
||||
- {lib_count}
|
||||
"""
|
||||
|
||||
tab_entry = '\n - '
|
||||
|
||||
def collect(library_dir) -> Tuple[str, List[str]]:
|
||||
"""Collect the available definitions for cells in a library
|
||||
|
||||
Parameters
|
||||
----------
|
||||
library_dir: str or pathlib.Path
|
||||
Path to a library.
|
||||
|
||||
Returns
|
||||
-------
|
||||
lib : str
|
||||
Library name
|
||||
|
||||
cells : list of pathlib.Path
|
||||
definition files for cells in the library.
|
||||
"""
|
||||
|
||||
if not isinstance(library_dir, pathlib.Path):
|
||||
library_dir = pathlib.Path(library_dir)
|
||||
|
||||
libname = None
|
||||
cells = set()
|
||||
|
||||
for p in library_dir.rglob("definition.json"):
|
||||
if not p.is_file():
|
||||
continue
|
||||
define_data = json.load(open(p))
|
||||
if not define_data['type'] == 'cell':
|
||||
continue
|
||||
cells.add(p)
|
||||
if libname is None:
|
||||
libname = define_data['library']
|
||||
|
||||
cells = list(sorted(cells))
|
||||
if not len(cells):
|
||||
raise FileNotFoundError("No cell definitions found")
|
||||
assert len(libname) > 0
|
||||
return libname, cells
|
||||
|
||||
def get_cell_names(cells):
|
||||
"""Get cell names from definition filess
|
||||
|
||||
Parameters
|
||||
----------
|
||||
cells: list of pathlib.Path
|
||||
List of paths to JSON description files
|
||||
|
||||
Returns
|
||||
-------
|
||||
cell_list: list of str
|
||||
List of cell names
|
||||
"""
|
||||
|
||||
cell_list = []
|
||||
|
||||
for cell in cells:
|
||||
with open(str(cell), "r") as c:
|
||||
cell_json = json.load(c)
|
||||
cell_list.append( cell_json['name'] )
|
||||
return cell_list
|
||||
|
||||
|
||||
def generate_crosstable (cells_lib, link_template=''):
|
||||
"""Generate the RST paragraph containing cell cross reference table
|
||||
|
||||
Parameters:
|
||||
cells_lib: dictionary with list of libraries per cell name [dict]
|
||||
link_template: cell README generic path (with {lib} and {cell} tags) [str]
|
||||
|
||||
Returns:
|
||||
paragraph: Generated paragraph [str]
|
||||
|
||||
"""
|
||||
|
||||
assert isinstance (cells_lib, dict)
|
||||
|
||||
paragraph = ""
|
||||
cell_list = ""
|
||||
|
||||
lib_suffixes = set()
|
||||
for v in cells_lib.values():
|
||||
lib_suffixes.update( [lib.rpartition('_')[2] for lib in v] )
|
||||
lib_suffixes = list(lib_suffixes)
|
||||
lib_suffixes.sort()
|
||||
#print (lib_suffixes)
|
||||
|
||||
for c in sorted(cells_lib):
|
||||
ls = {} # dictionary of cell library shorts (suffixes)
|
||||
for lib in cells_lib[c]:
|
||||
ls [lib.rpartition('_')[2]] = lib
|
||||
mark = ' :doc:`x <' + link_template + '>`' # lib match mark with link
|
||||
suff_match = [ mark.format(cell=c,lib=ls[s]) if s in ls else '' for s in lib_suffixes ]
|
||||
cell_list += cell_template.format(
|
||||
cell_name = c,
|
||||
lib_suffixes_match = tab_entry.join(suff_match),
|
||||
lib_count = str (len(ls))
|
||||
)
|
||||
|
||||
paragraph = rst_template.format(
|
||||
header_line = rst_header,
|
||||
header_underline = rst_header_line_char * len(rst_header),
|
||||
lib_suffixes = tab_entry.join(lib_suffixes),
|
||||
cell_list = cell_list
|
||||
)
|
||||
return paragraph
|
||||
|
||||
|
||||
def cells_in_libs (libpaths):
|
||||
"""Generate the RST paragraph containing cell cross reference table
|
||||
|
||||
Parameters:
|
||||
libpaths: list of cell library paths [list of pathlib.Path]
|
||||
|
||||
Returns:
|
||||
cells_lib: dictionary with list of libraries containing each cell name [dict]
|
||||
"""
|
||||
|
||||
lib_dirs = [pathlib.Path(d) for d in libpaths]
|
||||
lib_dirs = [d for d in lib_dirs if d.is_dir()]
|
||||
libs_toc = dict()
|
||||
|
||||
for lib in lib_dirs:
|
||||
try:
|
||||
libname, cells = collect(lib)
|
||||
if verbose:
|
||||
print(f"{lib} \tLibrary name: {libname}, found {len(cells)} cells")
|
||||
libs_toc[libname] = get_cell_names(cells)
|
||||
except FileNotFoundError:
|
||||
if verbose:
|
||||
print (f'{lib} \t- no cells found')
|
||||
|
||||
all_cells = set()
|
||||
cells_lib = {}
|
||||
for lib,cells in libs_toc.items():
|
||||
all_cells.update(set(cells))
|
||||
for c in cells:
|
||||
cells_lib[c] = cells_lib.get(c, []) + [lib]
|
||||
|
||||
return cells_lib
|
||||
|
||||
|
||||
|
||||
# --- Sphinx extension wrapper ---
|
||||
|
||||
class CellCrossIndex(Directive):
|
||||
|
||||
required_arguments = 1
|
||||
optional_arguments = 1
|
||||
has_content = True
|
||||
|
||||
def run(self):
|
||||
env = self.state.document.settings.env
|
||||
dirname = env.docname.rpartition('/')[0]
|
||||
arg = self.arguments[0]
|
||||
arg = dirname + '/' + arg
|
||||
output = dirname + '/' + self.arguments[1] if len(self.arguments)>2 else None
|
||||
|
||||
path = pathlib.Path(arg).expanduser()
|
||||
parts = path.parts[1:] if path.is_absolute() else path.parts
|
||||
paths = pathlib.Path(path.root).glob(str(pathlib.Path("").joinpath(*parts)))
|
||||
paths = list(paths)
|
||||
paths = [d.resolve() for d in paths if d.is_dir()]
|
||||
|
||||
cells_lib = cells_in_libs ( list(paths) )
|
||||
celllink = self.arguments[0].replace('*','{lib}') + '/cells/{cell}/README'
|
||||
paragraph = generate_crosstable (cells_lib,celllink)
|
||||
|
||||
if output is None: # dynamic output
|
||||
# parse rst string to docutils nodes
|
||||
rst = ViewList()
|
||||
for i,line in enumerate(paragraph.split('\n')):
|
||||
rst.append(line, "cell-index-tmp.rst", i+1)
|
||||
node = nodes.section()
|
||||
node.document = self.state.document
|
||||
nested_parse_with_titles(self.state, rst, node)
|
||||
return node.children
|
||||
else: # file output
|
||||
if not output.endswith('.rst'):
|
||||
output += '.rst'
|
||||
with open(str(output),'w') as f:
|
||||
f.write(paragraph)
|
||||
paragraph_node = nodes.paragraph()
|
||||
return [paragraph_node]
|
||||
|
||||
def setup(app):
|
||||
app.add_directive("cross_index", CellCrossIndex)
|
||||
|
||||
return {
|
||||
'version': '0.1',
|
||||
'parallel_read_safe': True,
|
||||
'parallel_write_safe': True,
|
||||
}
|
||||
|
||||
# --- stand alone, command line operation ---
|
||||
|
||||
def main():
|
||||
global verbose
|
||||
parser = argparse.ArgumentParser()
|
||||
alllibpath = '../../../libraries/*/latest'
|
||||
celllink = 'libraries/{lib}/cells/{cell}/README'
|
||||
|
||||
parser.add_argument(
|
||||
"-v",
|
||||
"--verbose",
|
||||
help="increase verbosity",
|
||||
action="store_true"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--all_libs",
|
||||
help="process all libs in "+alllibpath,
|
||||
action="store_true")
|
||||
parser.add_argument(
|
||||
"libraries_dirs",
|
||||
help="Paths to the library directories. Eg. " + alllibpath,
|
||||
type=pathlib.Path,
|
||||
nargs="*")
|
||||
parser.add_argument(
|
||||
"-o",
|
||||
"--outfile",
|
||||
help="Output file name",
|
||||
type=pathlib.Path,
|
||||
default=pathlib.Path('./cell-index.rst'))
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--celllink",
|
||||
help="Specify cell link template. Default: '" + celllink +"'",
|
||||
type=str,
|
||||
default=celllink)
|
||||
|
||||
args = parser.parse_args()
|
||||
verbose = args.verbose
|
||||
|
||||
if args.all_libs:
|
||||
path = pathlib.Path(alllibpath).expanduser()
|
||||
parts = path.parts[1:] if path.is_absolute() else path.parts
|
||||
paths = pathlib.Path(path.root).glob(str(pathlib.Path("").joinpath(*parts)))
|
||||
args.libraries_dirs = list(paths)
|
||||
|
||||
|
||||
cells_lib = cells_in_libs (args.libraries_dirs)
|
||||
par = generate_crosstable (cells_lib,args.celllink)
|
||||
|
||||
with open(str(args.outfile),'w') as f:
|
||||
f.write(par)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
|
@ -0,0 +1,235 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2020 The SkyWater PDK Authors.
|
||||
#
|
||||
# Use of this source code is governed by the Apache 2.0
|
||||
# license that can be found in the LICENSE file or at
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
''' This is a prototype of cell documentation generation script.
|
||||
'''
|
||||
|
||||
import csv
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import pathlib
|
||||
import glob
|
||||
import subprocess
|
||||
import textwrap
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import Directive
|
||||
from docutils.statemachine import ViewList
|
||||
from sphinx.util.nodes import nested_parse_with_titles
|
||||
|
||||
verbose = False
|
||||
|
||||
readme_template ="""\
|
||||
{header}
|
||||
|
||||
**{description}**
|
||||
|
||||
*This is a stub of cell description file*
|
||||
|
||||
- **Cell name**: {name}
|
||||
- **Type**: {deftype}
|
||||
- **Verilog name**: {verilog_name}
|
||||
- **Library**: {library}
|
||||
- **Inputs**: {inputs}
|
||||
- **Outputs**: {outputs}
|
||||
|
||||
{subheader_sym}
|
||||
|
||||
.. list-table::
|
||||
|
||||
* - .. figure:: {symbol1}
|
||||
-
|
||||
- .. figure:: {symbol2}
|
||||
|
||||
{subheader_sch}
|
||||
|
||||
.. figure:: {schematic}
|
||||
:align: center
|
||||
|
||||
{subheader_gds}
|
||||
|
||||
"""
|
||||
|
||||
figure_template ="""
|
||||
|
||||
.. figure:: {fig}
|
||||
:align: center
|
||||
:width: 50%
|
||||
|
||||
{name}
|
||||
"""
|
||||
|
||||
def write_readme(cellpath, define_data):
|
||||
''' Generates README for a given cell.
|
||||
|
||||
Args:
|
||||
cellpath - path to a cell [str of pathlib.Path]
|
||||
define_data - cell data from json [dic]
|
||||
|
||||
'''
|
||||
outpath = os.path.join(cellpath, 'README.rst')
|
||||
prefix = define_data['file_prefix']
|
||||
header = f':cell:`{prefix}`'
|
||||
subheader_sym = header + ' symbols'
|
||||
subheader_sch = header + ' schematic'
|
||||
subheader_gds = header + ' GDSII layouts'
|
||||
|
||||
header += '\n' + '=' * len(header)
|
||||
subheader_sym += '\n' + '-' * len(subheader_sym)
|
||||
subheader_sch += '\n' + '-' * len(subheader_sch)
|
||||
subheader_gds += '\n' + '-' * len(subheader_gds)
|
||||
|
||||
|
||||
symbol1 = prefix + '.symbol.svg'
|
||||
symbol2 = prefix + '.pp.symbol.svg'
|
||||
schematic = prefix + '.schematic.svg'
|
||||
inputs = []
|
||||
outputs = []
|
||||
for p in define_data['ports']:
|
||||
try:
|
||||
if p[0]=='signal' and p[2]=='input':
|
||||
inputs.append(p[1])
|
||||
if p[0]=='signal' and p[2]=='output':
|
||||
outputs.append(p[1])
|
||||
except:
|
||||
pass
|
||||
gdssvg = []
|
||||
svglist = list(pathlib.Path(cellpath).glob('*.svg'))
|
||||
for s in svglist:
|
||||
gdsfile = pathlib.Path(os.path.join(cellpath, s.stem +'.gds'))
|
||||
if gdsfile.is_file():
|
||||
gdssvg.append(s)
|
||||
|
||||
with open(outpath, 'w') as f:
|
||||
f.write (readme_template.format (
|
||||
header = header,
|
||||
subheader_sym = subheader_sym,
|
||||
subheader_sch = subheader_sch,
|
||||
subheader_gds = subheader_gds,
|
||||
description = define_data['description'].rstrip('.'),
|
||||
name = ':cell:`' + prefix +'`',
|
||||
deftype = define_data['type'],
|
||||
verilog_name = define_data['verilog_name'],
|
||||
library = define_data['library'],
|
||||
inputs = f'{len(inputs)} (' + ', '.join(inputs) + ')',
|
||||
outputs = f'{len(outputs)} (' + ', '.join(outputs) + ')',
|
||||
symbol1 = symbol1,
|
||||
symbol2 = symbol2,
|
||||
schematic = schematic,
|
||||
))
|
||||
for gs in sorted(gdssvg):
|
||||
f.write (figure_template.format (
|
||||
fig = gs.name,
|
||||
name = gs.stem
|
||||
))
|
||||
|
||||
def process(cellpath):
|
||||
''' Processes cell indicated by path.
|
||||
Opens cell definiton and calls further processing
|
||||
|
||||
Args:
|
||||
cellpath - path to a cell [str of pathlib.Path]
|
||||
'''
|
||||
if verbose:
|
||||
print()
|
||||
print(cellpath)
|
||||
define_json = os.path.join(cellpath, 'definition.json')
|
||||
if not os.path.exists(define_json):
|
||||
print("No definition.json in", cellpath)
|
||||
assert os.path.exists(define_json), define_json
|
||||
define_data = json.load(open(define_json))
|
||||
|
||||
if define_data['type'] == 'cell':
|
||||
write_readme(cellpath, define_data)
|
||||
|
||||
return
|
||||
|
||||
# --- Sphinx extension wrapper ----------------
|
||||
|
||||
def GenerateCellReadme (app, cellpath):
|
||||
|
||||
print (f'GenerateCellReadme: generating files for {cellpath}')
|
||||
path = pathlib.Path(cellpath).expanduser()
|
||||
parts = path.parts[1:] if path.is_absolute() else path.parts
|
||||
paths = pathlib.Path(path.root).glob(str(pathlib.Path("").joinpath(*parts)))
|
||||
paths = list(paths)
|
||||
cell_dirs = [d.resolve() for d in paths if d.is_dir()]
|
||||
|
||||
errors = 0
|
||||
for d in cell_dirs:
|
||||
try:
|
||||
process(d)
|
||||
except (AssertionError, FileNotFoundError, ChildProcessError) as ex:
|
||||
print (f'GenerateCellReadme: {type(ex).__name__}')
|
||||
print (f'{ex.args}')
|
||||
errors +=1
|
||||
print (f'GenerateCellReadme: {len(cell_dirs)} files processed, {errors} errors.')
|
||||
|
||||
def setup(app):
|
||||
app.add_event("cells_generate_readme")
|
||||
app.connect('cells_generate_readme', GenerateCellReadme)
|
||||
|
||||
return {
|
||||
'version': '0.1',
|
||||
'parallel_read_safe': True,
|
||||
'parallel_write_safe': True,
|
||||
}
|
||||
|
||||
# ----------------------------------------------
|
||||
|
||||
def main():
|
||||
''' Generates README.rst for cell.'''
|
||||
|
||||
prereq_txt = ''
|
||||
output_txt = 'output:\n generates README.rst'
|
||||
allcellpath = '../../../libraries/*/latest/cells/*'
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description = main.__doc__,
|
||||
epilog = prereq_txt +'\n\n'+ output_txt,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||
parser.add_argument(
|
||||
"--all_libs",
|
||||
help="process all cells in "+allcellpath,
|
||||
action="store_true")
|
||||
parser.add_argument(
|
||||
"cell_dir",
|
||||
help="path to the cell directory",
|
||||
type=pathlib.Path,
|
||||
nargs="*")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.all_libs:
|
||||
path = pathlib.Path(allcellpath).expanduser()
|
||||
parts = path.parts[1:] if path.is_absolute() else path.parts
|
||||
paths = pathlib.Path(path.root).glob(str(pathlib.Path("").joinpath(*parts)))
|
||||
args.cell_dir = list(paths)
|
||||
|
||||
cell_dirs = [d.resolve() for d in args.cell_dir if d.is_dir()]
|
||||
|
||||
errors = 0
|
||||
for d in cell_dirs:
|
||||
try:
|
||||
process(d)
|
||||
except KeyboardInterrupt:
|
||||
sys.exit(1)
|
||||
except (AssertionError, FileNotFoundError, ChildProcessError) as ex:
|
||||
print (f'Error: {type(ex).__name__}')
|
||||
print (f'{ex.args}')
|
||||
errors +=1
|
||||
print (f'\n{len(cell_dirs)} files processed, {errors} errors.')
|
||||
return 0 if errors else 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
|
Loading…
Reference in New Issue