Third variant for HFNS, trunks mades with Steiner tree (FLUTE).
Note: Keep the various hfnsX.py as toolboxes for future experiments. * New: cumulus/plugins/block/hfns3.py, build the trunk of the net as a RMST. First with the "Iterative One Steiner Point" (terribly slow above 100 points) then with FLUTE. For the global routing trunk, must be very cautious to check that the cluster "graph point" is the one of the it's buffer RoutingPad so both end up in the same GCell. * New: cumulus/plugins/block/timing.py, stub for basic timing computation and conversion between sink and capacitance. Currently based on the fake 350nm given as example in SxLib ("man sxlib"...).
This commit is contained in:
parent
8f0a8e5a3a
commit
3495536268
|
@ -417,5 +417,7 @@
|
|||
target_link_libraries( ${pytarget} ${pyDeplibs} )
|
||||
|
||||
install( TARGETS ${pytarget} DESTINATION ${PYTHON_SITE_PACKAGES} )
|
||||
if( NOT ("${pyIncludes}" STREQUAL "None") )
|
||||
install( FILES ${pyIncludes} DESTINATION ${inc_install_dir} )
|
||||
endif()
|
||||
endmacro( add_python_module )
|
||||
|
|
|
@ -235,6 +235,12 @@ def setTraceLevel ( level ):
|
|||
return
|
||||
|
||||
|
||||
def dots ( ttyWidth, leftText, rightText ):
|
||||
dotWidth = ttyWidth - len(leftText) - len(rightText) - 2
|
||||
if dotWidth < 0: dotWidth = 0
|
||||
print( '{}{}{}'.format(leftText,'.'*dotWidth,rightText) )
|
||||
|
||||
|
||||
def overload ( defaultParameters, parameters ):
|
||||
overloads = {}
|
||||
overloadParameters = []
|
||||
|
|
|
@ -51,7 +51,11 @@
|
|||
${CMAKE_CURRENT_SOURCE_DIR}/plugins/alpha/block/spares.py
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/plugins/alpha/block/block.py
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/plugins/alpha/block/clocktree.py
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/plugins/alpha/block/hfns.py
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/plugins/alpha/block/timing.py
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/plugins/alpha/block/rsmt.py
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/plugins/alpha/block/hfns1.py
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/plugins/alpha/block/hfns2.py
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/plugins/alpha/block/hfns3.py
|
||||
)
|
||||
|
||||
install ( FILES ${pySources} DESTINATION ${PYTHON_SITE_PACKAGES}/cumulus )
|
||||
|
|
|
@ -39,6 +39,7 @@ from Hurricane import Instance
|
|||
import CRL
|
||||
from CRL import RoutingLayerGauge
|
||||
from helpers import trace
|
||||
from helpers import dots
|
||||
from helpers.io import ErrorMessage
|
||||
from helpers.io import WarningMessage
|
||||
from helpers.io import catch
|
||||
|
@ -48,12 +49,17 @@ import Anabatic
|
|||
import Katana
|
||||
import plugins.rsave
|
||||
from plugins import getParameter
|
||||
from alpha.block import timing
|
||||
from alpha.block.spares import Spares
|
||||
from alpha.block.clocktree import ClockTree
|
||||
from alpha.block.hfns import BufferTree
|
||||
#from alpha.block.hfns1 import BufferTree
|
||||
#from alpha.block.hfns2 import BufferTree
|
||||
from alpha.block.hfns3 import BufferTree
|
||||
from alpha.block.configuration import IoPin
|
||||
from alpha.block.configuration import BlockState
|
||||
|
||||
timing.staticInit()
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Class : "block.Side".
|
||||
|
@ -415,11 +421,18 @@ class Block ( object ):
|
|||
"""Create the trunk of all the high fanout nets."""
|
||||
print( ' o Building high fanout nets trees.' )
|
||||
if self.spares:
|
||||
with UpdateSession():
|
||||
maxSinks = timing.tech.getSinksEstimate( self.state.bufferConf.name )
|
||||
dots( 82
|
||||
, ' - Max sinks for buffer "{}"'.format(self.state.bufferConf.name)
|
||||
, ' {}'.format(maxSinks) )
|
||||
nets = []
|
||||
for net in self.state.cell.getNets():
|
||||
sinksCount = 0
|
||||
for rp in net.getRoutingPads(): sinksCount += 1
|
||||
if sinksCount > 30:
|
||||
if sinksCount > maxSinks:
|
||||
nets.append( (net,sinksCount) )
|
||||
with UpdateSession():
|
||||
for net,sinksCount in nets:
|
||||
trace( 550, '\tBlock.addHfnTrees(): Found high fanout net "{}" ({} sinks).\n' \
|
||||
.format(net.getName(),sinksCount) )
|
||||
#if not net.getName().startswith('alu_m_muls_b(1)'): continue
|
||||
|
@ -429,9 +442,8 @@ class Block ( object ):
|
|||
sys.stdout.flush()
|
||||
self.hfnTrees.append( BufferTree( self.spares, net ) )
|
||||
self.hfnTrees[-1].buildBTree()
|
||||
self.hfnTrees[-1].rcreateBuffer()
|
||||
self.hfnTrees[-1].splitNet()
|
||||
self.spares.rshowPoolUse()
|
||||
#Breakpoint.stop( 0, 'block.findHfnTrees() done.' )
|
||||
else:
|
||||
print( ' (No spares buffers, disabled)' )
|
||||
return len(self.hfnTrees)
|
||||
|
@ -493,8 +505,10 @@ class Block ( object ):
|
|||
#katana.printConfiguration ()
|
||||
katana.digitalInit ()
|
||||
#katana.runNegociatePreRouted()
|
||||
#Breakpoint.stop( 0, 'Block.route() Before global routing.' )
|
||||
katana.runGlobalRouter ( Katana.Flags.NoFlags )
|
||||
katana.loadGlobalRouting ( Anabatic.EngineLoadGrByNet )
|
||||
#Breakpoint.stop( 0, 'Block.route() After global routing.' )
|
||||
katana.layerAssign ( Anabatic.EngineNoNetLayerAssign )
|
||||
katana.runNegociate ( Katana.Flags.NoFlags )
|
||||
success = katana.isDetailedRoutingSuccess()
|
||||
|
|
|
@ -404,6 +404,9 @@ class BufferInterface ( object ):
|
|||
trace( 550, '-' )
|
||||
return
|
||||
|
||||
@property
|
||||
def name ( self ): return self.masterCell.getName()
|
||||
|
||||
@property
|
||||
def width ( self ): return self.masterCell.getAbutmentBox().getWidth()
|
||||
|
||||
|
|
|
@ -50,8 +50,10 @@ from plugins import getParameter
|
|||
from plugins import utils
|
||||
from plugins.alpha.block.configuration import GaugeConf
|
||||
from plugins.alpha.block.spares import Spares
|
||||
from plugins.alpha.block import timing
|
||||
|
||||
|
||||
timing.staticInit()
|
||||
af = CRL.AllianceFramework.get()
|
||||
|
||||
|
||||
|
@ -529,6 +531,7 @@ class BufferTree ( object ):
|
|||
self.isDeepNet = True
|
||||
self.clusterDepth = 0
|
||||
self.clusters = [ [] ]
|
||||
self.bufName = self.spares.state.bufferConf.name
|
||||
self.netCount = 0
|
||||
self.netName = self.net.getName()
|
||||
self.netIndex = None
|
||||
|
@ -552,7 +555,7 @@ class BufferTree ( object ):
|
|||
pruned from the set given to Kruskal.
|
||||
"""
|
||||
levelFactor = 1
|
||||
if self.clusterDepth == 0: pass
|
||||
if self.clusterDepth == 0: timing.tech.getWlEstimate( self.bufName, 1 )
|
||||
else: levelFactor = 4*self.clusterDepth
|
||||
return levelFactor*l(700)
|
||||
|
||||
|
@ -577,6 +580,17 @@ class BufferTree ( object ):
|
|||
that the number of sinks is below 30 and the half-perimeter is not
|
||||
too great (see ``edgeLimit``).
|
||||
"""
|
||||
if self.clusterDepth == 0:
|
||||
maxWL = timing.tech.getWlEstimate( self.bufName, clusterA.size+clusterB.size )
|
||||
area = Box( clusterA.area )
|
||||
area.merge( clusterB.area )
|
||||
hpWL = (area.getWidth() + area.getHeight()) / 2
|
||||
if hpWL >= maxWL:
|
||||
return True
|
||||
trace( 550, '\t> Reject merge: hpWL >= maxWL ({} >= {}).\n' \
|
||||
.format(DbU.getValueString(hpWL),DbU.getValueString(maxWL)) )
|
||||
return True
|
||||
|
||||
if clusterA.size + clusterB.size > 30:
|
||||
trace( 550, '\t> Reject merge, over size threshold of 30.\n' )
|
||||
return False
|
||||
|
@ -638,7 +652,7 @@ class BufferTree ( object ):
|
|||
trace( 550, '-' )
|
||||
return clustersCount
|
||||
|
||||
def buildBTree ( self ):
|
||||
def _rclusterize ( self ):
|
||||
"""
|
||||
Recursively performs the Kruskal algorithm until only *one* root
|
||||
cluster remains. First level is clusters of RoutingPad, then
|
||||
|
@ -689,13 +703,13 @@ class BufferTree ( object ):
|
|||
raise ErrorMessage( 2, 'BufferTree.snapshot(): Clusters must be built first.' )
|
||||
self.root.snapshot()
|
||||
|
||||
def rcreateBuffer ( self ):
|
||||
def _rcreateBuffer ( self ):
|
||||
"""Proxy to ``Cluster.rcreateBuffer()``."""
|
||||
if not self.root:
|
||||
raise ErrorMessage( 2, 'BufferTree.rcreateBuffer(): Clusters must be built first.' )
|
||||
self.root.rcreateBuffer()
|
||||
|
||||
def splitNet ( self ):
|
||||
def _splitNet ( self ):
|
||||
"""
|
||||
Perform the actual splitting of the net into sub-trees. Mostly calls
|
||||
``Cluster.rsplitNet()`` then connect the top cluster root to the original
|
||||
|
@ -716,3 +730,8 @@ class BufferTree ( object ):
|
|||
RoutingPad.create( self.net, driverRpOcc, RoutingPad.BiggestArea )
|
||||
self.root.setRootDriver( self.net )
|
||||
trace( 550, '\tRoot input: {}\n'.format(self.root.bInputPlug) )
|
||||
|
||||
def buildBTree ( self ):
|
||||
self._rclusterize()
|
||||
self._rcreateBuffer()
|
||||
self._splitNet()
|
||||
|
|
|
@ -0,0 +1,269 @@
|
|||
#
|
||||
# This file is part of the Coriolis Software.
|
||||
# Copyright (c) SU 2020-2020, All Rights Reserved
|
||||
#
|
||||
# +-----------------------------------------------------------------+
|
||||
# | C O R I O L I S |
|
||||
# | C u m u l u s - P y t h o n T o o l s |
|
||||
# | |
|
||||
# | Author : Jean-Paul CHAPUT |
|
||||
# | E-mail : Jean-Paul.Chaput@lip6.fr |
|
||||
# | =============================================================== |
|
||||
# | Python : "./plugins/block/hfns.py" |
|
||||
# +-----------------------------------------------------------------+
|
||||
|
||||
"""
|
||||
Manage High Fanout Net Synthesis (HFNS).
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import os.path
|
||||
import re
|
||||
from operator import itemgetter, attrgetter, methodcaller
|
||||
import Cfg
|
||||
from Hurricane import Breakpoint
|
||||
from Hurricane import DbU
|
||||
from Hurricane import Box
|
||||
from Hurricane import Transformation
|
||||
from Hurricane import Box
|
||||
from Hurricane import Path
|
||||
from Hurricane import Layer
|
||||
from Hurricane import Occurrence
|
||||
from Hurricane import Net
|
||||
from Hurricane import HyperNet
|
||||
from Hurricane import RoutingPad
|
||||
from Hurricane import Horizontal
|
||||
from Hurricane import Vertical
|
||||
from Hurricane import Contact
|
||||
from Hurricane import Pin
|
||||
from Hurricane import Plug
|
||||
from Hurricane import Instance
|
||||
import CRL
|
||||
from CRL import RoutingLayerGauge
|
||||
from helpers import trace, l, u, n
|
||||
from helpers.io import ErrorMessage
|
||||
from helpers.io import WarningMessage
|
||||
from helpers.io import catch
|
||||
from helpers.overlay import UpdateSession
|
||||
from plugins import getParameter
|
||||
from plugins import utils
|
||||
from plugins.alpha.block.configuration import GaugeConf
|
||||
from plugins.alpha.block.spares import Spares
|
||||
|
||||
|
||||
af = CRL.AllianceFramework.get()
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Class : "hfns.Cluster".
|
||||
|
||||
class Cluster ( object ):
|
||||
"""
|
||||
Implementation of cluster of RoutingPads.
|
||||
"""
|
||||
|
||||
def __init__ ( self, bufferTree, leaf, anchor, depth ):
|
||||
self.depth = depth
|
||||
self.bufferTree = bufferTree
|
||||
self.leaf = leaf
|
||||
self.anchors = [ anchor ]
|
||||
self.area = Box( anchor.getCenter() )
|
||||
|
||||
@property
|
||||
def size ( self ): return len(self.anchors)
|
||||
|
||||
def getCenter ( self ):
|
||||
return self.area.getCenter()
|
||||
|
||||
def addAnchor ( self, anchor ):
|
||||
self.anchors.append( anchor )
|
||||
|
||||
def createBuffer ( self ):
|
||||
"""Perform buffer allocation in the leaf."""
|
||||
self.instBuffer = self.leaf.selectFree()
|
||||
if self.instBuffer is None:
|
||||
raise ErrorMessage( 2, 'Cluster.createBuffer(): No more free buffer in leaf {}.' \
|
||||
.format(self.leaf) )
|
||||
|
||||
@property
|
||||
def bInputPlug ( self ):
|
||||
"""The input Plug of the buffer."""
|
||||
return utils.getPlugByName( self.instBuffer, self.bufferTree.spares.state.bufferConf.input )
|
||||
|
||||
@property
|
||||
def bOutputPlug ( self ):
|
||||
"""The output Plug of the buffer."""
|
||||
return utils.getPlugByName( self.instBuffer, self.bufferTree.spares.state.bufferConf.output )
|
||||
|
||||
def createBufInputRp ( self, net ):
|
||||
"""Create a RoutingPad for the buffer input Plug (terminal)."""
|
||||
return RoutingPad.create( net, Occurrence(self.bInputPlug), RoutingPad.BiggestArea )
|
||||
|
||||
def createBufOutputRp ( self, net ):
|
||||
"""Create a RoutingPad for the buffer output Plug (terminal)."""
|
||||
return RoutingPad.create( net, Occurrence(self.bOutputPlug), RoutingPad.BiggestArea )
|
||||
|
||||
def splitNet ( self ):
|
||||
"""
|
||||
Break the top net, re-attach the sinks of the net to the output of
|
||||
the buffer of the leaf.
|
||||
"""
|
||||
spares = self.bufferTree.spares
|
||||
netBuff = self.bufferTree.createSubNet()
|
||||
self.createBuffer()
|
||||
self.bOutputPlug.setNet( netBuff )
|
||||
trace( 550, ',+', '\tCluster.splitNet(), size:{} depth:{} driver:{}\n' \
|
||||
.format(self.size,self.depth,netBuff.getName()) )
|
||||
trace( 550, '\tSplit: {}\n'.format(self.leaf) )
|
||||
if len(self.anchors):
|
||||
trace( 550, '\t| Left :{}\n'.format(self.leaf.getLeft ()) )
|
||||
trace( 550, '\t| Right :{}\n'.format(self.leaf.getRight ()) )
|
||||
trace( 550, '\t| Bottom:{}\n'.format(self.leaf.getBottom()) )
|
||||
trace( 550, '\t| Top :{}\n'.format(self.leaf.getTop ()) )
|
||||
if len(self.anchors) > 30:
|
||||
print( WarningMessage( 'Cluster of "{}" still has {} sinks.' \
|
||||
.format(netBuff.getName(),len(self.anchors)) ))
|
||||
for anchor in self.anchors:
|
||||
if isinstance(anchor,Cluster):
|
||||
trace( 550, '\tcluster input: "{}"\n'.format(netBuff) )
|
||||
anchor.bInputPlug.setNet( netBuff )
|
||||
else:
|
||||
plug = anchor.getPlugOccurrence()
|
||||
deepPlug = spares.raddTransNet( netBuff, plug.getPath() )
|
||||
deepNetBuff = deepPlug.getMasterNet() if deepPlug else netBuff
|
||||
trace( 550, '\tdeepNetBuf: "{}"\n'.format(deepNetBuff) )
|
||||
if isinstance(plug.getEntity(),Pin):
|
||||
print( 'PIN, SKIPPED for {}'.format(deepNetBuff.getName()) )
|
||||
continue
|
||||
plug.getEntity().setNet( deepNetBuff )
|
||||
anchor.destroy()
|
||||
trace( 550, ',-' )
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Class : "hfns.BufferTree".
|
||||
|
||||
class BufferTree ( object ):
|
||||
"""
|
||||
Build a buffer tree for a high fanout net.
|
||||
"""
|
||||
|
||||
patVhdlVector = re.compile( r'(?P<name>.*)\((?P<index>\d+)\)' )
|
||||
|
||||
def __init__ ( self, spares, net ):
|
||||
trace( 550, '\tBufferTree.__init__() on "{}".\n'.format(net.getName()) )
|
||||
self.spares = spares
|
||||
self.net = net
|
||||
self.isDeepNet = True
|
||||
self.clusterDepth = 0
|
||||
self.clusters = [ [] ]
|
||||
self.netCount = 0
|
||||
self.netName = self.net.getName()
|
||||
self.netIndex = None
|
||||
m = BufferTree.patVhdlVector.match( self.net.getName() )
|
||||
if m:
|
||||
self.netName = m.group('name')
|
||||
self.netIndex = m.group('index')
|
||||
trace( 550, '-' )
|
||||
|
||||
def getLeafCluster ( self, leaf ):
|
||||
for cluster in self.clusters[0]:
|
||||
if cluster.leaf == leaf: return cluster
|
||||
return None
|
||||
|
||||
def _addRpToCluster ( self, rp ):
|
||||
trace( 550, '\tBufferTree._addRpToCluster(): @{} {}\n' \
|
||||
.format(rp.getPosition(),rp) )
|
||||
leaf = self.spares.quadTree.getLeafUnder( rp.getPosition() )
|
||||
cluster = self.getLeafCluster( leaf )
|
||||
if cluster:
|
||||
cluster.addAnchor( rp )
|
||||
else:
|
||||
self.clusters[0].append( Cluster( self, leaf, rp, self.clusterDepth ) )
|
||||
trace( 550, '\tUsing leaf: {}\n'.format(leaf) )
|
||||
|
||||
def createSubNet ( self ):
|
||||
"""
|
||||
Create a new sub-net for a buffer driver. If the signal is a bit
|
||||
from a vector, unvectorize but keep a ``bitX`` tag in it. For example,
|
||||
the third (i.e. index 2) auxiliary signal for ``my_vector(3)`` will give
|
||||
``my_vector_hfns_bit3_2``.
|
||||
"""
|
||||
if self.netIndex is None:
|
||||
subNetName = '{}_hfns_{}'.format( self.netName, self.netCount )
|
||||
else:
|
||||
subNetName = '{}_bit{}_hfns_{}'.format( self.netName, self.netIndex, self.netCount )
|
||||
net = Net.create( self.spares.state.cell, subNetName )
|
||||
self.netCount += 1
|
||||
return net
|
||||
|
||||
def _buildLeafLevel ( self ):
|
||||
"""
|
||||
Build clusters of RP by grouping them by the spare quad-tree leafs.
|
||||
"""
|
||||
trace( 550, ',+', '\tBufferTree._buildLeafLevel() on "{}" ...\n'.format(self.net.getName()) )
|
||||
self.rpDriver = None
|
||||
pinRp = None
|
||||
for rp in self.net.getRoutingPads():
|
||||
rpOccurrence = rp.getPlugOccurrence()
|
||||
entity = rpOccurrence.getEntity()
|
||||
if rpOccurrence.getPath().isEmpty():
|
||||
self.isDeepNet = False
|
||||
if isinstance(entity,Pin):
|
||||
pinRp = rp
|
||||
continue
|
||||
masterNet = entity.getMasterNet()
|
||||
if masterNet.getDirection() & Net.Direction.DirIn:
|
||||
self._addRpToCluster( rp )
|
||||
else:
|
||||
trace( 550, '\tDriver:{}.\n'.format(rp) )
|
||||
self.rpDriver = rp
|
||||
if pinRp:
|
||||
if self.rpDriver is None:
|
||||
trace( 550, '\tDriver (externa pin):{}.\n'.format(rp) )
|
||||
self.rpDriver = rp
|
||||
else:
|
||||
self._addRpToCluster( rp )
|
||||
trace( 550, '-' )
|
||||
|
||||
def _buildTrunkLevel ( self ):
|
||||
trace( 550, ',+', '\tBufferTree._buildTrunkLevel() on "{}" ...\n'.format(self.net.getName()) )
|
||||
self.clusterDepth += 1
|
||||
trunkCluster = None
|
||||
for cluster in self.clusters[0]:
|
||||
if trunkCluster:
|
||||
trunkCluster.addAnchor( cluster )
|
||||
continue
|
||||
trunkCluster = Cluster( self, None, cluster, self.clusterDepth )
|
||||
trunkCluster.leaf = self.spares.quadTree.getFreeLeafUnder( trunkCluster.area )
|
||||
self.clusters.append( [ trunkCluster ] )
|
||||
trace( 550, '-' )
|
||||
|
||||
def _splitNet ( self ):
|
||||
"""
|
||||
Perform the actual splitting of the net into sub-trees. Mostly calls
|
||||
``Cluster.splitNet()`` then connect the top cluster root to the original
|
||||
signal.
|
||||
"""
|
||||
for cluster in self.clusters[0]:
|
||||
cluster.splitNet()
|
||||
self.clusters[1][0].splitNet()
|
||||
if self.isDeepNet:
|
||||
# Must convert from a DeepNet into a real top Net to be saved.
|
||||
driverRpOcc = self.rpDriver.getPlugOccurrence()
|
||||
topNetName = self.net.getName()
|
||||
self.net.destroy()
|
||||
self.net = Net.create( self.spares.state.cell, topNetName )
|
||||
deepPlug = self.spares.raddTransNet( self.net, driverRpOcc.getPath() )
|
||||
deepDriverNet = deepPlug.getMasterNet()
|
||||
driverRpOcc.getEntity().setNet( deepDriverNet )
|
||||
RoutingPad.create( self.net, driverRpOcc, RoutingPad.BiggestArea )
|
||||
self.clusters[1][0].createBuffer()
|
||||
self.clusters[1][0].createBufInputRp( self.net )
|
||||
trace( 550, '\tRoot input: {}\n'.format(self.clusters[1][0].bInputPlug) )
|
||||
|
||||
def buildBTree ( self ):
|
||||
self._buildLeafLevel()
|
||||
self._buildTrunkLevel()
|
||||
self._splitNet()
|
|
@ -0,0 +1,619 @@
|
|||
#
|
||||
# This file is part of the Coriolis Software.
|
||||
# Copyright (c) SU 2020-2020, All Rights Reserved
|
||||
#
|
||||
# +-----------------------------------------------------------------+
|
||||
# | C O R I O L I S |
|
||||
# | C u m u l u s - P y t h o n T o o l s |
|
||||
# | |
|
||||
# | Author : Jean-Paul CHAPUT |
|
||||
# | E-mail : Jean-Paul.Chaput@lip6.fr |
|
||||
# | =============================================================== |
|
||||
# | Python : "./plugins/block/hfns.py" |
|
||||
# +-----------------------------------------------------------------+
|
||||
|
||||
"""
|
||||
Manage High Fanout Net Synthesis (HFNS).
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import os.path
|
||||
import re
|
||||
from operator import itemgetter, attrgetter, methodcaller
|
||||
import Cfg
|
||||
from Hurricane import Breakpoint
|
||||
from Hurricane import DbU
|
||||
from Hurricane import Box
|
||||
from Hurricane import Transformation
|
||||
from Hurricane import Box
|
||||
from Hurricane import Path
|
||||
from Hurricane import Layer
|
||||
from Hurricane import Occurrence
|
||||
from Hurricane import Net
|
||||
from Hurricane import HyperNet
|
||||
from Hurricane import RoutingPad
|
||||
from Hurricane import Horizontal
|
||||
from Hurricane import Vertical
|
||||
from Hurricane import Contact
|
||||
from Hurricane import Pin
|
||||
from Hurricane import Plug
|
||||
from Hurricane import Instance
|
||||
import CRL
|
||||
from CRL import RoutingLayerGauge
|
||||
from helpers import trace, l, u, n
|
||||
from helpers.io import ErrorMessage
|
||||
from helpers.io import WarningMessage
|
||||
from helpers.io import catch
|
||||
from helpers.overlay import UpdateSession
|
||||
from plugins import getParameter
|
||||
from plugins import utils
|
||||
from plugins.alpha.block.configuration import GaugeConf
|
||||
from plugins.alpha.block.spares import Spares
|
||||
from plugins.alpha.block import timing
|
||||
from plugins.alpha.block import rsmt
|
||||
|
||||
|
||||
timing.staticInit()
|
||||
rsmt.staticInit()
|
||||
af = CRL.AllianceFramework.get()
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Class : "hfns.SlicedArea".
|
||||
|
||||
class SlicedArea ( object ):
|
||||
"""
|
||||
Perform the buffer creation and insertion for a Cluster. It can request
|
||||
a free buffer from the spare set or insert a new one directly in the
|
||||
design. The second option is still available but now unused, kept as
|
||||
code example (may be needed in the future).
|
||||
"""
|
||||
|
||||
def __init__ ( self, cluster ):
|
||||
"""
|
||||
Create the sliced area and perform an immediate buffer allocation
|
||||
from the spare set. Hint for a position indide of the cluster's area
|
||||
but closest to the parent's center area (so, ideally, on the cluster's
|
||||
edge).
|
||||
"""
|
||||
state = cluster.bufferTree.spares.state
|
||||
self.cluster = cluster
|
||||
if cluster.parent is None:
|
||||
attractor = cluster.getCenter()
|
||||
else:
|
||||
attractor = cluster.parent.area.getCenter()
|
||||
self.instBuffer = cluster.bufferTree.spares.getFreeBufferUnder( cluster.area, attractor )
|
||||
|
||||
@property
|
||||
def buffer ( self ):
|
||||
"""The buffer instance."""
|
||||
return self.instBuffer
|
||||
|
||||
@property
|
||||
def bInputPlug ( self ):
|
||||
"""The input Plug of the buffer."""
|
||||
return utils.getPlugByName( self.buffer, self.cluster.bufferTree.spares.state.bufferConf.input )
|
||||
|
||||
@property
|
||||
def bOutputPlug ( self ):
|
||||
"""The output Plug of the buffer."""
|
||||
return utils.getPlugByName( self.buffer, self.cluster.bufferTree.spares.state.bufferConf.output )
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Class : "hfns.Cluster".
|
||||
|
||||
class Cluster ( object ):
|
||||
"""
|
||||
Implementation of cluster of RoutingPads. This is a disjoint-set data
|
||||
structure (ref. https://en.wikipedia.org/wiki/Disjoint-set_data_structure).
|
||||
|
||||
We manage two kind of trees, do not mistake them:
|
||||
|
||||
1. The cluster's own tree, that is, the union set. ``self.root`` and
|
||||
``self.parent`` belongs to that structure.
|
||||
2. The tree *of* Clusters. Recursive functions like ``rsplitNet()``
|
||||
and ``rcreateBuffer()`` belongs to that super-tree.
|
||||
|
||||
The ``snapshot()`` and ``rrebindRp()`` are kept just in case. They allow
|
||||
to keep the cluster partitionning between iterations of the placer by
|
||||
replacing RoutingPad (which get erased) by Plug occurrences which are
|
||||
stables.
|
||||
"""
|
||||
|
||||
def __init__ ( self, bufferTree, anchor, depth ):
|
||||
self.depth = depth
|
||||
self.bufferTree = bufferTree
|
||||
self.anchor = anchor
|
||||
self.mergedAnchors = [ anchor ]
|
||||
self.parent = None
|
||||
self.rank = 0
|
||||
self.size = 1
|
||||
self.area = Box( anchor.getCenter() )
|
||||
self.estimatedWL = timing.tech.getOneSinkEqWL()
|
||||
self.bInputRp = None
|
||||
self.bOutputRp = None
|
||||
|
||||
def __str__ ( self ):
|
||||
parentId = 'None' if self.parent is None else str(self.parent.id)
|
||||
s = '<Cluster d:{} par:{} id:{} sz:{} area:{}x{} WL:{}>' \
|
||||
.format( self.depth
|
||||
, parentId
|
||||
, self.id
|
||||
, self.size
|
||||
, DbU.getValueString(self.area.getWidth())
|
||||
, DbU.getValueString(self.area.getHeight())
|
||||
, DbU.getValueString(self.estimatedWL) )
|
||||
return s
|
||||
|
||||
def __cmp__ ( self, other ):
|
||||
if other is None: return 1
|
||||
if self.id < other.id: return -1
|
||||
if self.id > other.id: return 1
|
||||
return 0
|
||||
|
||||
@property
|
||||
def buffer ( self ):
|
||||
"""The buffer instance (proxy to slicedArea)."""
|
||||
return self.slicedArea.buffer
|
||||
|
||||
@property
|
||||
def bInputPlug ( self ):
|
||||
"""The input Plug of the buffer (proxy to slicedArea)."""
|
||||
return self.slicedArea.bInputPlug
|
||||
|
||||
@property
|
||||
def bOutputPlug ( self ):
|
||||
"""The output Plug of the buffer (proxy to slicedArea)."""
|
||||
return self.slicedArea.bOutputPlug
|
||||
|
||||
@property
|
||||
def id ( self ):
|
||||
if self.anchor is None: return 0
|
||||
if not isinstance(self.anchor,Cluster) and not self.anchor.isBound(): return 0
|
||||
return self.anchor.getId()
|
||||
|
||||
def getId ( self ):
|
||||
return self.id
|
||||
|
||||
def isRoot ( self ): return self.parent is None
|
||||
|
||||
def getCenter ( self ):
|
||||
return self.area.getCenter()
|
||||
|
||||
def edgeDistance ( self, other ):
|
||||
if self.area.intersect(other.area): return 0
|
||||
dx = 0
|
||||
dy = 0
|
||||
if self.area.getXMax() < other.area.getXMin(): dx = other.area.getXMin() - self.area.getXMax()
|
||||
if self.area.getXMin() > other.area.getXMax(): dx = self.area.getXMin() - other.area.getXMax()
|
||||
if self.area.getYMax() < other.area.getYMin(): dy = other.area.getYMin() - self.area.getYMax()
|
||||
if self.area.getYMin() > other.area.getYMax(): dy = self.area.getYMin() - other.area.getYMax()
|
||||
return dx+dy
|
||||
|
||||
def getBufferCenter ( self ):
|
||||
instBuf = self.slicedArea.buffer
|
||||
ab = instBuf.getMasterCell().getAbutmentBox()
|
||||
instBuf.getTransformation().applyOn( ab )
|
||||
return ab.getCenter()
|
||||
|
||||
def mergeAnchor ( self, anchor ):
|
||||
"""Direct merge of an anchor (not a cluster merge)."""
|
||||
self.mergedAnchors.append( anchor )
|
||||
|
||||
def getRoot ( self ):
|
||||
"""Find the root, performing simple path compression as it goes."""
|
||||
#trace( 550, ',+', '\tCluster.getRoot() of id:{}\n'.format(self.id) )
|
||||
root = self
|
||||
#trace( 550, '\t+ Finding root:\n' )
|
||||
while root.parent is not None:
|
||||
root = root.parent
|
||||
#trace( 550, '\t| id:{}\n'.format(root.id) )
|
||||
node = self
|
||||
#trace( 550, '\t+ Compressing path:\n' )
|
||||
while node.parent is not None:
|
||||
pnode = node.parent
|
||||
node.parent = root
|
||||
node = pnode
|
||||
#trace( 550, '\t| id:{}\n'.format(node.id) )
|
||||
#trace( 550, ',-', '\t> Root of id:{} is id:{}\n'.format(self.id,root.id) )
|
||||
return root
|
||||
|
||||
def merge ( self, other, edge ):
|
||||
"""Union by rank."""
|
||||
#trace( 550, ',+', '\tCluster.merge() id:{} with id:{}\n' \
|
||||
# .format(self.id,other.id) )
|
||||
root1 = self.getRoot()
|
||||
root2 = other.getRoot()
|
||||
if root1 != root2:
|
||||
if root1.rank < root2.rank:
|
||||
root1, root2 = root2, root1
|
||||
if root1.rank != root2.rank:
|
||||
root1.rank += 1
|
||||
trace( 550, '\troot1:{}\n'.format(root1) )
|
||||
trace( 550, '\troot2:{}\n'.format(root2) )
|
||||
trace( 550, '\tedge length:{}\n'.format(DbU.getValueString(edge.clusterDistance)) )
|
||||
edgeLength = edge.clusterDistance
|
||||
root1.area.merge( root2.area )
|
||||
root1.size += root2.size
|
||||
root1.mergedAnchors += root2.mergedAnchors
|
||||
root1.estimatedWL += root2.estimatedWL + edgeLength
|
||||
root2.parent = root1
|
||||
trace( 550, '\troot1 (merged):{}\n'.format(root1) )
|
||||
#trace( 550, ',-', '\tMerge id:{} <= id:{} done\n' \
|
||||
# .format(root1.id,root2.id) )
|
||||
else:
|
||||
pass
|
||||
#trace( 550, ',-', '\tMerge id:{} and id:{} already done\n' \
|
||||
# .format(root1.id,root2.id) )
|
||||
return root1
|
||||
|
||||
def createBufInputRp ( self, net ):
|
||||
"""Create a RoutingPad for the buffer input Plug (terminal)."""
|
||||
self.bInputPlug.setNet( net )
|
||||
self.bInputRp = RoutingPad.create( net, Occurrence(self.bInputPlug), RoutingPad.BiggestArea )
|
||||
return self.bInputRp
|
||||
|
||||
def createBufOutputRp ( self, net ):
|
||||
"""Create a RoutingPad for the buffer output Plug (terminal)."""
|
||||
self.bOutputPlug.setNet( net )
|
||||
self.bOutputRp = RoutingPad.create( net, Occurrence(self.bOutputPlug), RoutingPad.BiggestArea )
|
||||
return self.bOutputRp
|
||||
|
||||
def setRootDriver ( self, net ):
|
||||
"""Connect the top-level buffer input to the original signal."""
|
||||
if not self.isRoot():
|
||||
raise ErrorMessage( 2, 'Cluster.setRootDriver(): Must be called only on the top root cluster.' )
|
||||
self.createBufInputRp( net )
|
||||
|
||||
def createBuffer ( self ):
|
||||
"""Create the SlicedArea which will create/insert the buffer of the cluster."""
|
||||
if not self.isRoot():
|
||||
raise ErrorMessage( 2, 'Cluster.createBuffer(): Only root cluster should have buffer.' )
|
||||
self.slicedArea = SlicedArea( self )
|
||||
|
||||
def rcreateBuffer ( self ):
|
||||
"""Recursively call ``createBuffer()`` on the whole cluster hierarchy."""
|
||||
self.createBuffer()
|
||||
for anchor in self.mergedAnchors:
|
||||
if isinstance(anchor,Cluster):
|
||||
anchor.rcreateBuffer()
|
||||
|
||||
def rsplitNet ( self ):
|
||||
"""
|
||||
Perform the actual splitting of the net into subnets. This is a
|
||||
recursive function. One driver net will be created by cluster.
|
||||
"""
|
||||
if not self.isRoot():
|
||||
raise ErrorMessage( 2, 'Cluster.connect(): Only root cluster should be connecteds.' )
|
||||
spares = self.bufferTree.spares
|
||||
netBuff = self.bufferTree.createSubNet()
|
||||
self.createBufOutputRp( netBuff )
|
||||
trace( 550, ',+', '\tCluster.rsplitNet(), size:{} depth:{} driver:{}\n' \
|
||||
.format(self.size,self.depth,netBuff.getName()) )
|
||||
if len(self.mergedAnchors) > 30:
|
||||
print( WarningMessage( 'Cluster.rsplitNet(): Top cluster of "{}" still has {} sinks.' \
|
||||
.format(netBuff.getName(),len(self.mergedAnchors)) ))
|
||||
for anchor in self.mergedAnchors:
|
||||
if isinstance(anchor,Cluster):
|
||||
trace( 550, '\tcluster input: "{}"\n'.format(netBuff) )
|
||||
anchor.createBufInputRp( netBuff )
|
||||
anchor.rsplitNet()
|
||||
else:
|
||||
plug = anchor.getPlugOccurrence()
|
||||
deepPlug = spares.raddTransNet( netBuff, plug.getPath() )
|
||||
deepNetBuff = deepPlug.getMasterNet() if deepPlug else netBuff
|
||||
trace( 550, '\tdeepNetBuf: "{}"\n'.format(deepNetBuff) )
|
||||
if isinstance(plug.getEntity(),Pin):
|
||||
print( 'PIN, SKIPPED for {}'.format(deepNetBuff.getName()) )
|
||||
continue
|
||||
plug.getEntity().setNet( deepNetBuff )
|
||||
anchor.destroy()
|
||||
trace( 550, ',-' )
|
||||
|
||||
def buildGR ( self ):
|
||||
for anchor in self.mergedAnchors:
|
||||
if not isinstance(anchor,Cluster):
|
||||
message = [ 'Cluster.buildGR(): One anchor is not a cluster.' ]
|
||||
message.append( 'On {}'.format(self) )
|
||||
for i in range(len(self.mergedAnchors)):
|
||||
message.append( '{:3} | {}'.format(i,self.mergedAnchors[i]) )
|
||||
print( ErrorMessage( 2, message ) )
|
||||
return
|
||||
|
||||
bufNet = self.bOutputPlug.getNet()
|
||||
graph = rsmt.RSMT( bufNet )
|
||||
driverCenter = self.bOutputRp.getPosition()
|
||||
graph.addNode( self
|
||||
, driverCenter.getX()
|
||||
, self.bufferTree.spares.toYGCellGrid(driverCenter.getY())
|
||||
+ self.bufferTree.spares.state.gaugeConf.sliceHeight / 2
|
||||
, rsmt.Node.Driver )
|
||||
for anchor in self.mergedAnchors:
|
||||
sinkCenter = anchor.bInputRp.getPosition()
|
||||
graph.addNode( anchor
|
||||
, sinkCenter.getX()
|
||||
, self.bufferTree.spares.toYGCellGrid(sinkCenter.getY())
|
||||
+ self.bufferTree.spares.state.gaugeConf.sliceHeight / 2 )
|
||||
#graph.doIteratedOneSteiner()
|
||||
graph.doFlute()
|
||||
graph.createGRSegments()
|
||||
|
||||
def show ( self ):
|
||||
"""Select the RoutingPad of the cluster in the editor."""
|
||||
editor = self.bufferTree.spares.state.editor
|
||||
if not editor: return False
|
||||
editor.unselectAll()
|
||||
editor.setCumulativeSelection( True )
|
||||
editor.setShowSelection( True )
|
||||
area = Box( self.area )
|
||||
area.inflate( l(10.0) )
|
||||
editor.reframe( area, False )
|
||||
#editor.select( self.anchor.getOccurrence() )
|
||||
for anchor in self.mergedAnchors:
|
||||
if isinstance(anchor,Cluster):
|
||||
continue
|
||||
else:
|
||||
editor.select( anchor.getOccurrence() )
|
||||
return True
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Class : "hfns.Edge".
|
||||
|
||||
class Edge ( object ):
|
||||
"""
|
||||
Define an Edge between two Clusters. The lenght of the Edge is the
|
||||
Manhattan distance of the two centers of the cluster's areas.
|
||||
So, as Clusters grows, so does the area and the length of the
|
||||
edge change over time. To work on a stable value, the initial
|
||||
distance is cached in the ``length`` attribute.
|
||||
"""
|
||||
|
||||
def __init__ ( self, source, target ):
|
||||
self.source = source
|
||||
self.target = target
|
||||
self.length = self.clusterDistance
|
||||
|
||||
@property
|
||||
def clusterLength ( self ):
|
||||
"""
|
||||
Manhattan distance, cluster center to cluster center.
|
||||
The actual one, not the ``length`` initial cached value.
|
||||
"""
|
||||
sourceCenter = self.source.getCenter()
|
||||
targetCenter = self.target.getCenter()
|
||||
return targetCenter.manhattanDistance( sourceCenter )
|
||||
|
||||
@property
|
||||
def clusterDistance ( self ):
|
||||
"""
|
||||
Manhattan distance, cluster edge to cluster edge.
|
||||
The actual one, not the ``length`` initial cached value.
|
||||
"""
|
||||
return self.source.edgeDistance( self.target )
|
||||
|
||||
def __cmp__ ( self, other ):
|
||||
"""Comparison over the cached initial length value."""
|
||||
if self.length < other.length: return -1
|
||||
if self.length > other.length: return 1
|
||||
return 0
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Class : "hfns.BufferTree".
|
||||
|
||||
class BufferTree ( object ):
|
||||
"""
|
||||
Build a buffer tree for a high fanout net. Recursively build clusters
|
||||
using Kruskal algorithm (https://en.wikipedia.org/wiki/Kruskal's_algorithm).
|
||||
All subnet are created at the design top level (like for clock tree)
|
||||
so they are not ``DeepNet``, the driver itself is pulled up to the top
|
||||
level if needs be.
|
||||
"""
|
||||
|
||||
patVhdlVector = re.compile( r'(?P<name>.*)\((?P<index>\d+)\)' )
|
||||
|
||||
def __init__ ( self, spares, net ):
|
||||
trace( 550, '\tBufferTree.__init__() on "{}".\n'.format(net.getName()) )
|
||||
self.spares = spares
|
||||
self.net = net
|
||||
self.isDeepNet = True
|
||||
self.clusterDepth = 0
|
||||
self.clusters = [ [] ]
|
||||
self.bufName = self.spares.state.bufferConf.name
|
||||
self.netCount = 0
|
||||
self.netName = self.net.getName()
|
||||
self.netIndex = None
|
||||
m = BufferTree.patVhdlVector.match( self.net.getName() )
|
||||
if m:
|
||||
self.netName = m.group('name')
|
||||
self.netIndex = m.group('index')
|
||||
|
||||
@property
|
||||
def root ( self ):
|
||||
"""The root cluster of the tree (must be unique...)"""
|
||||
if len(self.clusters[-1]) != 1:
|
||||
raise ErrorMessage( 2, 'BufferTree.root: No, or multiple root for "{}".' \
|
||||
.format(self.net.getName()) )
|
||||
return self.clusters[-1][0]
|
||||
|
||||
@property
|
||||
def edgeLimit ( self ):
|
||||
"""
|
||||
Maximum length of Edge to consider. Edges above this threshold will be
|
||||
pruned from the set given to Kruskal.
|
||||
"""
|
||||
levelFactor = 1
|
||||
if self.clusterDepth == 0: timing.tech.getWlEstimate( self.bufName, 1 )
|
||||
else: levelFactor = 4*self.clusterDepth
|
||||
return levelFactor*l(700)
|
||||
|
||||
def createSubNet ( self ):
|
||||
"""
|
||||
Create a new sub-net for a buffer driver. If the signal is a bit
|
||||
from a vector, unvectorize but keep a ``bitX`` tag in it. For example,
|
||||
the third (i.e. index 2) auxiliary signal for ``my_vector(3)`` will give
|
||||
``my_vector_bit3_2``.
|
||||
"""
|
||||
if self.netIndex is None:
|
||||
subNetName = '{}_hfns_{}'.format( self.netName, self.netCount )
|
||||
else:
|
||||
subNetName = '{}_bit{}_hfns_{}'.format( self.netName, self.netIndex, self.netCount )
|
||||
net = Net.create( self.spares.state.cell, subNetName )
|
||||
self.netCount += 1
|
||||
return net
|
||||
|
||||
def canMerge ( self, edge ):
|
||||
"""
|
||||
Control the merge criterion between two clusters. For now we check
|
||||
that the number of sinks is below 30 and the half-perimeter is not
|
||||
too great (see ``edgeLimit``).
|
||||
"""
|
||||
clusterA = edge.source.getRoot()
|
||||
clusterB = edge.target.getRoot()
|
||||
if clusterA == clusterB:
|
||||
return False
|
||||
if self.clusterDepth == 0:
|
||||
estimatedWL = clusterA.estimatedWL + clusterB.estimatedWL + edge.clusterDistance
|
||||
maxWL = timing.tech.getWlEstimate( self.bufName, clusterA.size+clusterB.size )
|
||||
area = Box( clusterA.area )
|
||||
area.merge( clusterB.area )
|
||||
hpWL = (area.getWidth() + area.getHeight()) / 2
|
||||
trace( 550, '\t> BufferTree.canMerge(): estimatedWL >= maxWL ({} >= {}).\n' \
|
||||
.format(DbU.getValueString(estimatedWL),DbU.getValueString(maxWL)) )
|
||||
if estimatedWL >= maxWL:
|
||||
return False
|
||||
trace( 550, '\t> Reject merge: estimatedWL >= maxWL ({} >= {}).\n' \
|
||||
.format(DbU.getValueString(estimatedWL),DbU.getValueString(maxWL)) )
|
||||
return True
|
||||
|
||||
if clusterA.size + clusterB.size > 30:
|
||||
trace( 550, '\t> Reject merge, over size threshold of 30.\n' )
|
||||
return False
|
||||
area = Box( clusterA.area )
|
||||
area.merge( clusterB.area )
|
||||
hpwl = (area.getWidth() + area.getHeight()) / 2
|
||||
if hpwl > 2*self.edgeLimit:
|
||||
trace( 550, '\t> Reject merge, over HPWL threshold of 2*{}.\n' \
|
||||
.format(DbU.getValueString(self.edgeLimit)))
|
||||
return False
|
||||
else:
|
||||
trace( 550, '\t> Accepted merge, future area is {}x{}.\n' \
|
||||
.format( DbU.getValueString(area.getWidth ())
|
||||
, DbU.getValueString(area.getHeight()) ))
|
||||
return True
|
||||
|
||||
def doKruskal ( self ):
|
||||
"""
|
||||
Do Kruskal algorithm. We do not perform a complete Krukal as
|
||||
*too long* edges are pruned and we do not keep tracks of edges,
|
||||
we just want a cluster of close RoutingPad, not a minimum
|
||||
spanning tree.
|
||||
"""
|
||||
trace( 550, ',+', '\tBufferTree.doKruskal()\n' )
|
||||
trace( 550, '\tBuilding edges, max length:{} ...\n'.format(DbU.getValueString(self.edgeLimit)) )
|
||||
maxWL = timing.tech.getWlEstimate( self.bufName, 26 )
|
||||
trace( 550, '\tmaxWL:{}\n'.format(DbU.getValueString(maxWL)) )
|
||||
clusters = self.clusters[-1]
|
||||
edges = []
|
||||
for i in range(len(clusters)):
|
||||
for j in range(i+1,len(clusters)):
|
||||
edge = Edge( clusters[i], clusters[j] )
|
||||
if edge.length < self.edgeLimit:
|
||||
edges.append( edge )
|
||||
trace( 550, '\tSorting {} edges ...\n'.format(len(edges)) )
|
||||
edges.sort( key=attrgetter('length') )
|
||||
trace( 550, '\tProcessing edges ...\n' )
|
||||
clustersCount = len(clusters)
|
||||
for i in range(len(edges)):
|
||||
edge = edges[i]
|
||||
trace( 550, '\t| Process [{:3d}], length:{} clusterDistance:{}\n' \
|
||||
.format( i, DbU.getValueString(edge.length)
|
||||
, DbU.getValueString(edge.clusterDistance)) )
|
||||
if not self.canMerge(edge):
|
||||
continue
|
||||
edge.source.merge( edge.target, edge )
|
||||
trace( 550, '\t> Merged cluster: {}\n'.format(edge.source.getRoot()) )
|
||||
clustersCount -= 1
|
||||
trace( 550, '\tClusters count: {}\n'.format(clustersCount) )
|
||||
for cluster in clusters:
|
||||
if cluster.isRoot():
|
||||
trace( 550, '\t | {}\n'.format(cluster) )
|
||||
trace( 550, '-' )
|
||||
return clustersCount
|
||||
|
||||
def _rclusterize ( self ):
|
||||
"""
|
||||
Recursively performs the Kruskal algorithm until only *one* root
|
||||
cluster remains. First level is clusters of RoutingPad, then
|
||||
clusters of clusters.
|
||||
"""
|
||||
trace( 550, ',+', '\tBufferTree.buildBTree() on "{}" ...\n'.format(self.net.getName()) )
|
||||
self.rpDriver = None
|
||||
pinRp = None
|
||||
for rp in self.net.getRoutingPads():
|
||||
rpOccurrence = rp.getPlugOccurrence()
|
||||
entity = rpOccurrence.getEntity()
|
||||
if rpOccurrence.getPath().isEmpty():
|
||||
self.isDeepNet = False
|
||||
if isinstance(entity,Pin):
|
||||
pinRp = rp
|
||||
continue
|
||||
masterNet = entity.getMasterNet()
|
||||
if masterNet.getDirection() & Net.Direction.DirIn:
|
||||
self.clusters[0].append( Cluster(self,rp,self.clusterDepth) )
|
||||
else:
|
||||
trace( 550, '\tDriver:{}.\n'.format(rp) )
|
||||
self.rpDriver = rp
|
||||
if pinRp:
|
||||
if self.rpDriver is None:
|
||||
trace( 550, '\tDriver (externa pin):{}.\n'.format(rp) )
|
||||
self.rpDriver = rp
|
||||
else:
|
||||
self.clusters[0].append( Cluster(self,pinRp,self.clusterDepth) )
|
||||
if len(self.clusters[0]) > 1:
|
||||
self.doKruskal()
|
||||
self.clusters.append( [] )
|
||||
for cluster in self.clusters[0]:
|
||||
if cluster.isRoot():
|
||||
if len(self.clusters[1]) == 0:
|
||||
self.clusters[1].append( Cluster(self,cluster,1) )
|
||||
else:
|
||||
self.clusters[1][0].mergeAnchor( cluster )
|
||||
self.clusterDepth += 1
|
||||
trace( 550, '-' )
|
||||
|
||||
def _rcreateBuffer ( self ):
|
||||
"""Proxy to ``Cluster.rcreateBuffer()``."""
|
||||
if not self.root:
|
||||
raise ErrorMessage( 2, 'BufferTree.rcreateBuffer(): Clusters must be built first.' )
|
||||
self.root.rcreateBuffer()
|
||||
|
||||
def _splitNet ( self ):
|
||||
"""
|
||||
Perform the actual splitting of the net into sub-trees. Mostly calls
|
||||
``Cluster.rsplitNet()`` then connect the top cluster root to the original
|
||||
signal.
|
||||
"""
|
||||
if not self.root:
|
||||
raise ErrorMessage( 2, 'BufferTree.splitNet(): Clusters must be built first.' )
|
||||
self.root.rsplitNet()
|
||||
if self.isDeepNet:
|
||||
# Must convert from a DeepNet into a real top Net to be saved.
|
||||
driverRpOcc = self.rpDriver.getPlugOccurrence()
|
||||
topNetName = self.net.getName()
|
||||
self.net.destroy()
|
||||
self.net = Net.create( self.spares.state.cell, topNetName )
|
||||
deepPlug = self.spares.raddTransNet( self.net, driverRpOcc.getPath() )
|
||||
deepDriverNet = deepPlug.getMasterNet()
|
||||
driverRpOcc.getEntity().setNet( deepDriverNet )
|
||||
RoutingPad.create( self.net, driverRpOcc, RoutingPad.BiggestArea )
|
||||
self.root.setRootDriver( self.net )
|
||||
trace( 550, '\tRoot input: {}\n'.format(self.root.bInputPlug) )
|
||||
|
||||
def buildBTree ( self ):
|
||||
self._rclusterize()
|
||||
self._rcreateBuffer()
|
||||
self._splitNet()
|
||||
self.root.buildGR()
|
|
@ -0,0 +1,531 @@
|
|||
#
|
||||
# This file is part of the Coriolis Software.
|
||||
# Copyright (c) SU 2014-2020, All Rights Reserved
|
||||
#
|
||||
# +-----------------------------------------------------------------+
|
||||
# | C O R I O L I S |
|
||||
# | C u m u l u s - P y t h o n T o o l s |
|
||||
# | |
|
||||
# | Author : Jean-Paul CHAPUT |
|
||||
# | E-mail : Jean-Paul.Chaput@lip6.fr |
|
||||
# | =============================================================== |
|
||||
# | Python : "./plugins/block/rsmt.py" |
|
||||
# +-----------------------------------------------------------------+
|
||||
|
||||
"""
|
||||
Rectilinear Steiner Minimum Spaning Tree (RSMT) Module.
|
||||
|
||||
References:
|
||||
|
||||
.. [1] A. B. Kahng and G. Robins. A new class of iterative steiner tree
|
||||
heuristics with good performance. IEEE Transactions Computer-Aided
|
||||
Design, 11(7):893-902, July 1992.
|
||||
|
||||
.. [2] A. B. Kahng and G. Robins. On Optimal Interconnections for VLSI.
|
||||
Kluwer Academic Publishers, Boston, MA, 1995.
|
||||
|
||||
.. [3] Gabriel Robins and Alexander Zelikovsky.
|
||||
Minimum Steiner Tree Construction, *reference for myself*.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import Cfg
|
||||
import Hurricane
|
||||
from Hurricane import DbU
|
||||
from Hurricane import Box
|
||||
from Hurricane import Point
|
||||
from Hurricane import Path
|
||||
from Hurricane import Occurrence
|
||||
from Hurricane import Breakpoint
|
||||
from Hurricane import DataBase
|
||||
from Hurricane import UpdateSession
|
||||
from Hurricane import Net
|
||||
from Hurricane import RoutingPad
|
||||
from Hurricane import Contact
|
||||
from Hurricane import Horizontal
|
||||
from Hurricane import Vertical
|
||||
from Hurricane import Instance
|
||||
import Flute
|
||||
import Viewer
|
||||
import CRL
|
||||
from CRL import RoutingLayerGauge
|
||||
import helpers
|
||||
from helpers import trace
|
||||
from helpers.io import ErrorMessage
|
||||
from helpers.io import WarningMessage
|
||||
import plugins
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Class : "rsmt.Node".
|
||||
|
||||
class Node ( object ):
|
||||
|
||||
GraphPoint = 0x0001
|
||||
SteinerPoint = 0x0002
|
||||
KeepPoint = 0x0004
|
||||
Driver = 0x0008
|
||||
|
||||
@staticmethod
|
||||
def showNodes ( nodes ):
|
||||
for node in nodes:
|
||||
trace( 542, '\t| {}\n'.format(node) )
|
||||
|
||||
def __init__ ( self, cluster, x, y, flags=0 ):
|
||||
self.cluster = cluster
|
||||
self.edges = []
|
||||
self.position = Point( x, y )
|
||||
self._back = None
|
||||
self.distance = DbU.fromLambda( 100000.0 )
|
||||
self.flags = flags
|
||||
if cluster: self.flags = self.flags|Node.GraphPoint|Node.KeepPoint
|
||||
else: self.flags = self.flags|Node.SteinerPoint
|
||||
self.gcontact = None
|
||||
return
|
||||
|
||||
def __cmp__ ( self, other ):
|
||||
return self.distance - other.distance
|
||||
|
||||
@property
|
||||
def x ( self ):
|
||||
return self.position.getX()
|
||||
|
||||
@property
|
||||
def y ( self ):
|
||||
return self.position.getY()
|
||||
|
||||
def __str__ ( self ):
|
||||
return '<Node D:{} @[{} {}] d:{} {}>'.format( self.degree
|
||||
, DbU.getValueString(self.x)
|
||||
, DbU.getValueString(self.y)
|
||||
, DbU.getValueString(self.distance)
|
||||
, self.cluster )
|
||||
|
||||
@property
|
||||
def degree ( self ):
|
||||
return len(self.edges)
|
||||
|
||||
def setFlags ( self, flags ):
|
||||
self.flags = self.flags | flags
|
||||
|
||||
def unsetFlags ( self, flags ):
|
||||
self.flags = self.flags & ~flags
|
||||
|
||||
def addEdge ( self, edge ):
|
||||
self.edges.append( edge )
|
||||
|
||||
def delEdge ( self, edge ):
|
||||
for i in range(len(self.edges)):
|
||||
if self.edges[i] == edge:
|
||||
del self.edges[i]
|
||||
|
||||
def isSame ( self, other ):
|
||||
return id(self) == id(other)
|
||||
|
||||
def update ( self, node ):
|
||||
distance = self.position.manhattanDistance( node.position )
|
||||
if distance < self.distance:
|
||||
self.distance = distance
|
||||
self.back = node
|
||||
return True
|
||||
return False
|
||||
|
||||
def check ( self ):
|
||||
if self.degree > 4:
|
||||
raise ErrorMessage( 2, [ 'Node.check(): Degree of node is over 4,'
|
||||
, 'On {}.'.format(self) ] )
|
||||
if self.degree == 0:
|
||||
raise ErrorMessage( 2, [ 'Node.check(): Degree of node is zero (unconnected),'
|
||||
, 'On {}.'.format(self) ] )
|
||||
if self.cluster is None:
|
||||
if self.degree < 2:
|
||||
raise ErrorMessage( 2, [ 'Node.check(): Degree of Steiner node must be between 2 and 4,'
|
||||
, 'On {}.'.format(self) ] )
|
||||
|
||||
def createGContact ( self, net ):
|
||||
global gcut
|
||||
side = DbU.fromLambda( 1.0 )
|
||||
self.gcontact = Contact.create( net, gcut, self.x, self.y, side, side )
|
||||
trace( 542, '+ {}'.format(self) )
|
||||
trace( 542, '| {}'.format(self.gcontact) )
|
||||
if self.flags & Node.GraphPoint:
|
||||
if self.flags & Node.Driver:
|
||||
rp = self.cluster.bOutputRp
|
||||
else:
|
||||
rp = self.cluster.bInputRp
|
||||
rp.getBodyHook().attach( self.gcontact.getBodyHook() )
|
||||
trace( 542, '| {}'.format(rp) )
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Class : "rmst.Edge".
|
||||
|
||||
class Edge ( object ):
|
||||
|
||||
def __init__ ( self, source, target ):
|
||||
self.source = source
|
||||
self.target = target
|
||||
self.length = self.source.position.manhattanDistance( self.target.position )
|
||||
self.source.addEdge( self )
|
||||
self.target.addEdge( self )
|
||||
|
||||
def __del__ ( self ):
|
||||
self.source.delEdge( self )
|
||||
self.target.delEdge( self )
|
||||
|
||||
def __str__ ( self ):
|
||||
return '<Edge S:[{} {}] T:[{} {}] len:{}>' \
|
||||
.format( DbU.getValueString(self.source.x)
|
||||
, DbU.getValueString(self.source.y)
|
||||
, DbU.getValueString(self.target.x)
|
||||
, DbU.getValueString(self.target.y)
|
||||
, DbU.getValueString(self.length)
|
||||
)
|
||||
|
||||
def isHorizontal ( self ):
|
||||
return self.source.y == self.target.y
|
||||
|
||||
def isVertical ( self ):
|
||||
return self.source.x == self.target.x
|
||||
|
||||
def isDiagonal ( self ):
|
||||
return (self.source.x != self.target.x) \
|
||||
and (self.source.y != self.target.y) \
|
||||
|
||||
def _createGHorizontal ( self, source, target, y ):
|
||||
global gmetalh
|
||||
DbU.fromLambda( 2.0 )
|
||||
if source.getPosition().getX() > target.getPosition().getX():
|
||||
source, target = target, source
|
||||
return Horizontal.create( source, target, gmetalh, y, DbU.fromLambda(2.0) )
|
||||
|
||||
def _createGVertical ( self, source, target, x ):
|
||||
global gmetalv
|
||||
if source.getPosition().getY() > target.getPosition().getY():
|
||||
source, target = target, source
|
||||
return Vertical.create( source, target, gmetalv, x, DbU.fromLambda(2.0) )
|
||||
|
||||
def createGSegment ( self, net ):
|
||||
global gcut
|
||||
side = DbU.fromLambda( 1.0 )
|
||||
segments = []
|
||||
if self.isDiagonal():
|
||||
turn = Contact.create( net, gcut, self.target.x, self.source.y, side, side )
|
||||
segments.append( self._createGHorizontal( self.source.gcontact
|
||||
, turn
|
||||
, self.source.y ) )
|
||||
segments.append( self._createGVertical( turn
|
||||
, self.target.gcontact
|
||||
, self.target.x ) )
|
||||
elif self.isHorizontal():
|
||||
segments.append( self._createGHorizontal( self.source.gcontact
|
||||
, self.target.gcontact
|
||||
, self.source.y ) )
|
||||
else:
|
||||
segments.append( self._createGVertical( self.source.gcontact
|
||||
, self.target.gcontact
|
||||
, self.source.x ) )
|
||||
return segments
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Class : "rmst.geoKey".
|
||||
|
||||
class GeoKey ( object ):
|
||||
|
||||
def __init__ ( self, x, y ):
|
||||
self.x = x
|
||||
self.y = y
|
||||
|
||||
def __cmp__ ( self, other ):
|
||||
if self.x != other.x: return self.x - other.x
|
||||
if self.y != other.y: return self.y - other.y
|
||||
return 0
|
||||
|
||||
def __hash__ ( self ):
|
||||
key = self.x + self.y
|
||||
key = (key ^ (key >> 30)) * 0xbf58476d1ce4e5b9;
|
||||
key = (key ^ (key >> 27)) * 0x94d049bb133111eb;
|
||||
key = key ^ (key >> 31);
|
||||
return key;
|
||||
|
||||
def __str__ (self ):
|
||||
return '<GeoKey [{} {}]>'.format( DbU.getValueString(self.x)
|
||||
, DbU.getValueString(self.y) )
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Class : "rmst.Graph".
|
||||
|
||||
class Graph ( object ):
|
||||
|
||||
def __init__ ( self, net, name=None ):
|
||||
self.nodesLUT = {}
|
||||
self.edges = []
|
||||
self.length = 0
|
||||
self.net = net
|
||||
self._name = name
|
||||
|
||||
def __len__ ( self ):
|
||||
return self.length
|
||||
|
||||
@property
|
||||
def name ( self ):
|
||||
return self.net.getName() if self._name is None else self._name
|
||||
|
||||
@property
|
||||
def nodes ( self ):
|
||||
return self.nodesLUT.values()
|
||||
|
||||
def addNode ( self, cluster, x, y, flags=0 ):
|
||||
node = Node( cluster, x, y, flags )
|
||||
self.nodesLUT[ GeoKey(x,y) ] = node
|
||||
return node
|
||||
|
||||
def copyNode ( self, node ):
|
||||
self.addNode( node.cluster, node.x, node.y, node.flags )
|
||||
|
||||
def setNodes ( self, nodes ):
|
||||
self.__init__( self.net, self.name )
|
||||
for node in nodes:
|
||||
self.copyNode( node )
|
||||
|
||||
def lookupNode ( self, x, y ):
|
||||
geoKey = GeoKey( x, y )
|
||||
if self.nodesLUT.has_key(geoKey):
|
||||
return self.nodesLUT[geoKey]
|
||||
return None
|
||||
|
||||
def lookupOrAddNode ( self, x, y ):
|
||||
node = self.lookupNode( x, y )
|
||||
if node is None:
|
||||
node = self.addNode( None, x, y )
|
||||
node = self.lookupNode( x, y )
|
||||
return node
|
||||
|
||||
def showNodes ( self ):
|
||||
trace( 542, '+,+', '\tGraph "{}" nodes:\n'.format(self.name) )
|
||||
for node in self.nodes:
|
||||
trace( 542, '\t| {}\n'.format(node) )
|
||||
trace( 542, '--' )
|
||||
|
||||
def showEdges ( self ):
|
||||
trace( 542, '+,+', '\tGraph "{}" Edges:\n'.format(self.name) )
|
||||
for i in range(len(self.edges)):
|
||||
trace( 542, '\t[{:3}] {}\n'.format(i,self.edges[i]) )
|
||||
trace( 542, '--' )
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Class : "rsmt.RMST".
|
||||
|
||||
class RMST ( Graph ):
|
||||
"""
|
||||
Build a Rectilinear Minimum Spaning Tree (RMST) using Prim's algorithm.
|
||||
The graph structure is supplied by the Graph base class.
|
||||
"""
|
||||
|
||||
def __init__ ( self, net, name=None ):
|
||||
Graph.__init__( self, net, name )
|
||||
|
||||
def reinit ( self ):
|
||||
self.edges = []
|
||||
self.length = 0
|
||||
|
||||
def doPrim ( self ):
|
||||
self.reinit()
|
||||
# Special case a graph of one or two nodes only.
|
||||
if len(self.nodes) < 2: return
|
||||
if len(self.nodes) == 2:
|
||||
self.edges.append( Edge( self.nodes[0], self.nodes[1] ) )
|
||||
self.length = self.edges[0].length
|
||||
return
|
||||
|
||||
trace( 542, '+' )
|
||||
unreacheds = []
|
||||
self.nodes[0]._distance = 0
|
||||
for node in self.nodes[1:]:
|
||||
node.update( self.nodes[0] )
|
||||
unreacheds.append( node )
|
||||
unreacheds.sort()
|
||||
trace( 542, '\tPrim "{}" (initial stack)\n'.format(self.name) )
|
||||
trace( 542, '\t+ S {}\n'.format(self.nodes[0]) )
|
||||
Node.showNodes( unreacheds )
|
||||
|
||||
while len(unreacheds):
|
||||
nearest = unreacheds.pop(0)
|
||||
self.edges.append( Edge( nearest, nearest.back ) )
|
||||
trace( 542, '\tAdding {}\n'.format(self.edges[-1]) )
|
||||
for node in unreacheds:
|
||||
node.update( nearest )
|
||||
unreacheds.sort()
|
||||
trace( 542, '\tPrim "{}" (current stack)\n'.format(self.name) )
|
||||
trace( 542, '\tS {}\n'.format(self.nodes[0]) )
|
||||
Node.showNodes( unreacheds )
|
||||
|
||||
for edge in self.edges:
|
||||
self.length += edge.length
|
||||
|
||||
trace( 542, '-' )
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Class : "rsmt.RSMT".
|
||||
|
||||
class RSMT ( Graph ):
|
||||
|
||||
def __init__ ( self, net, name=None ):
|
||||
Graph.__init__( self, net, name )
|
||||
self._hananNodes = []
|
||||
|
||||
def _computeHanan ( self ):
|
||||
xs = []
|
||||
ys = []
|
||||
for node in self.nodes:
|
||||
if not node.x in xs: xs.append( node.x )
|
||||
if not node.y in xs:
|
||||
ys.append( node.y )
|
||||
xs.sort()
|
||||
ys.sort()
|
||||
|
||||
trace( 542, '\tHanan matrix: {}x{}\n'.format(len(xs),len(ys)) )
|
||||
|
||||
self._hananNodes = []
|
||||
for x in xs:
|
||||
for y in ys:
|
||||
isHanan = True
|
||||
for node in self.nodes:
|
||||
if node.x == x and node.y == y:
|
||||
isHanan = False
|
||||
break
|
||||
if not isHanan: continue
|
||||
self._hananNodes.append( Node( None, x, y ) )
|
||||
|
||||
def addNode ( self, cluster, x, y, flags=0 ):
|
||||
node = Graph.addNode( self, cluster, x, y, flags )
|
||||
trace( 542, '\tNew Node: {}\n'.format(node) )
|
||||
return node
|
||||
|
||||
def doFlute ( self ):
|
||||
trace( 542, ',+', '\tRSMT.doFlute() on "{}".\n'.format(self.net.getName()) )
|
||||
self.edges = []
|
||||
self.length = 0
|
||||
|
||||
if len(self.nodes) < 2: return
|
||||
if len(self.nodes) == 2:
|
||||
self.edges.append( Edge( self.nodes[0], self.nodes[1] ) )
|
||||
self.length = self.edges[0].length
|
||||
return
|
||||
|
||||
points = []
|
||||
for node in self.nodes:
|
||||
points.append( (node.x,node.y) )
|
||||
tree = Flute.flute( points )
|
||||
for i in range(len(tree)):
|
||||
j = tree[i][0]
|
||||
source = self.lookupOrAddNode( tree[i][1], tree[i][2] )
|
||||
target = self.lookupOrAddNode( tree[j][1], tree[j][2] )
|
||||
if source.x == target.x and source.y == target.y:
|
||||
#print( WarningMessage( ['RSMT.doFlute(): Edge has same source & target.'
|
||||
# , '({})'.format(source) ]) )
|
||||
continue
|
||||
self.edges.append( Edge( source, target ) )
|
||||
self.length = self.edges[0].length
|
||||
for node in self.nodes: node.check()
|
||||
return
|
||||
|
||||
def doIteratedOneSteiner ( self ):
|
||||
trace( 542, ',+', '\tRSMT.doIteratedSteiner() on "{}".\n'.format(self.net.getName()) )
|
||||
self.edges = []
|
||||
self.length = 0
|
||||
|
||||
if len(self.nodes) < 2: return
|
||||
if len(self.nodes) == 2:
|
||||
self.edges.append( Edge( self.nodes[0], self.nodes[1] ) )
|
||||
self.length = self.edges[0].length
|
||||
return
|
||||
|
||||
self._computeHanan()
|
||||
count = 0
|
||||
minMST = RMST( self.net, 'MST[{}]'.format(count) )
|
||||
minMST.setNodes( self.nodes )
|
||||
minMST.doPrim()
|
||||
trace( 542, '\tInitial "{}" length:{}\n'.format(minMST.name,DbU.getValueString(len(minMST))) )
|
||||
#minMST.showEdges()
|
||||
|
||||
addedSteiner = True
|
||||
while addedSteiner:
|
||||
addedSteiner = False
|
||||
for steinerNode in self._hananNodes:
|
||||
count += 1
|
||||
trace( 542, '\tTrying with Steiner point [{} {}]\n' \
|
||||
.format(DbU.getValueString(steinerNode.x)
|
||||
,DbU.getValueString(steinerNode.y)) )
|
||||
mst = RMST( self.net, 'MST[{}]'.format(count) )
|
||||
mst.setNodes( self.nodes )
|
||||
mst.copyNode( steinerNode )
|
||||
mst.doPrim()
|
||||
trace( 542, '\tCurrent "{}" length {}\n' \
|
||||
.format(mst.name,DbU.getValueString(len(mst))) )
|
||||
#mst.showEdges()
|
||||
if len(mst) < len(minMST):
|
||||
trace( 542, '\tAccept min RST.\n' )
|
||||
minMST = mst
|
||||
addedSteiner = True
|
||||
|
||||
if addedSteiner:
|
||||
self.copyNode( minMST.nodes[-1] )
|
||||
self.nodes[-1].setFlags( Node.KeepPoint )
|
||||
|
||||
i = 0
|
||||
while i < len(self.edges):
|
||||
if self.nodes[i].flags & Node.SteinerPoint \
|
||||
and self.nodes[i].degree < 3:
|
||||
trace( 542, '\tDeleting unused Steiner point @[{} {}]\n' \
|
||||
.format(DbU.getValueString(self.nodes[i].x)
|
||||
,DbU.getValueString(self.nodes[i].y)) )
|
||||
del self.nodes[i]
|
||||
else:
|
||||
i += 1
|
||||
|
||||
self.nodes = minMST.nodes
|
||||
self.edges = minMST.edges
|
||||
self.length = minMST.length
|
||||
self.showEdges()
|
||||
trace( 542, '-' )
|
||||
|
||||
def createGRSegments ( self ):
|
||||
trace( 542, ',+', '\tRSMT.createGRsegments(): "{}"\n'.format(self.net.getName()) )
|
||||
for node in self.nodes:
|
||||
node.createGContact( self.net )
|
||||
for i in range(len(self.edges)):
|
||||
segments = self.edges[i].createGSegment( self.net )
|
||||
for segment in segments:
|
||||
trace( 542, '\t[{:3}] {}\n'.format(i,segment) )
|
||||
trace( 542, '-' )
|
||||
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Module static initialization
|
||||
|
||||
gcut = None
|
||||
gmetalh = None
|
||||
gmetalv = None
|
||||
|
||||
|
||||
def staticInit ():
|
||||
"""
|
||||
Static initialization of the ``rsmt`` module. Allow to postpone
|
||||
initialization until Hurricane & CRL database and evironment are
|
||||
properly loaded.
|
||||
"""
|
||||
global gcut
|
||||
global gmetalh
|
||||
global gmetalv
|
||||
if gcut is not None: return
|
||||
|
||||
tech = DataBase.getDB().getTechnology()
|
||||
gcut = tech.getLayer( 'gcontact' )
|
||||
gmetalh = tech.getLayer( 'gmetalh' )
|
||||
gmetalv = tech.getLayer( 'gmetalv' )
|
||||
Flute.readLUT()
|
|
@ -36,6 +36,7 @@ from Hurricane import Instance
|
|||
import CRL
|
||||
from CRL import RoutingLayerGauge
|
||||
from helpers import trace
|
||||
from helpers import dots
|
||||
from helpers.io import ErrorMessage
|
||||
from helpers.io import WarningMessage
|
||||
from helpers.io import catch
|
||||
|
@ -98,18 +99,18 @@ class BufferPool ( object ):
|
|||
* MARK_USED, tag the designated buffer as USED.
|
||||
"""
|
||||
|
||||
trace( 550, ',+', '\tBufferPool.select() column:{}, row={}, flags={:x}\n' \
|
||||
trace( 540, ',+', '\tBufferPool.select() column:{}, row={}, flags={:x}\n' \
|
||||
.format(column,row,flags) )
|
||||
if column >= self.columns:
|
||||
trace( 550, '-' )
|
||||
trace( 540, '-' )
|
||||
raise ErrorMessage( 3, 'BufferPool.select(): Column {} is out of range (max:{}).' \
|
||||
.format(column,self.columns) )
|
||||
if row >= self.rows:
|
||||
trace( 550, '-' )
|
||||
trace( 540, '-' )
|
||||
raise ErrorMessage( 3, 'BufferPool.select(): Row {} is out of range (max:{}).' \
|
||||
.format(row,self.rows) )
|
||||
self._select( self.toIndex( column, row ), flags )
|
||||
trace( 550, '-' )
|
||||
trace( 540, '-' )
|
||||
|
||||
def _select ( self, index, flags ):
|
||||
self.selectedIndex = index
|
||||
|
@ -129,7 +130,7 @@ class BufferPool ( object ):
|
|||
for i in range(self.rows*self.columns):
|
||||
if not (self.buffers[i][0] & Spares.USED):
|
||||
self._select( i, Spares.MARK_USED )
|
||||
trace( 550, '\tUse buffer from pool {}\n'.format(self.quadTree) )
|
||||
trace( 540, '\tUse buffer from pool {}\n'.format(self.quadTree) )
|
||||
return self.buffers[i][1]
|
||||
return None
|
||||
|
||||
|
@ -142,7 +143,7 @@ class BufferPool ( object ):
|
|||
|
||||
def _createBuffers ( self ):
|
||||
"""Create the matrix of instances buffer."""
|
||||
trace( 550, ',+', '\tBufferPool.createBuffers()\n' )
|
||||
trace( 540, ',+', '\tBufferPool.createBuffers()\n' )
|
||||
|
||||
state = self.quadTree.spares.state
|
||||
sliceHeight = state.gaugeConf.sliceHeight
|
||||
|
@ -152,7 +153,7 @@ class BufferPool ( object ):
|
|||
- (state.bufferConf.height * self.rows)/2 )
|
||||
slice = y / sliceHeight
|
||||
|
||||
trace( 550, '\tSlice height: {}\n'.format(DbU.getValueString(sliceHeight)) )
|
||||
trace( 540, '\tSlice height: {}\n'.format(DbU.getValueString(sliceHeight)) )
|
||||
|
||||
for row in range(self.rows):
|
||||
orientation = Transformation.Orientation.ID
|
||||
|
@ -167,31 +168,36 @@ class BufferPool ( object ):
|
|||
instance.setTransformation( transf )
|
||||
instance.setPlacementStatus( Instance.PlacementStatus.FIXED )
|
||||
self.buffers[ index ][1] = instance
|
||||
trace( 550, '\tBuffer[{}]: {} @{}\n'.format(index,self.buffers[index],transf) )
|
||||
trace( 540, '\tBuffer[{}]: {} @{}\n'.format(index,self.buffers[index],transf) )
|
||||
blBufAb = self.buffers[ 0][1].getAbutmentBox()
|
||||
trBufAb = self.buffers[-1][1].getAbutmentBox()
|
||||
self.area = Box( blBufAb.getXMin(), blBufAb.getYMin()
|
||||
, trBufAb.getXMax(), trBufAb.getYMax() )
|
||||
trace( 550, '-' )
|
||||
trace( 540, '-' )
|
||||
|
||||
def _destroyBuffers ( self ):
|
||||
"""Destroy all the buffer instances of the pool."""
|
||||
for flags, buffer in self.buffers:
|
||||
buffer.destroy()
|
||||
|
||||
def showUse ( self, depth ):
|
||||
"""Display the pool occupancy."""
|
||||
def getUse ( self ):
|
||||
"""Return the pool occupancy, a tuple ``(occupancy,capacity)``."""
|
||||
count = 0
|
||||
for i in range(self.rows*self.columns):
|
||||
if self.buffers[i][0] & Spares.USED:
|
||||
count += 1
|
||||
return count, self.rows*self.columns
|
||||
|
||||
def showUse ( self, depth ):
|
||||
"""Display the pool occupancy."""
|
||||
occupancy, capacity = self.getUse()
|
||||
#header = '| ' if self.quadTree.isLeaf() else '+ '
|
||||
#print( ' {}{}Pool {}, usage:{}/{}.'.format( ' '*depth
|
||||
# , header
|
||||
# , self.quadTree
|
||||
# , count
|
||||
# , self.rows*self.columns) )
|
||||
return count, self.rows*self.columns
|
||||
# , occupency
|
||||
# , capacity )
|
||||
return occupancy, capacity
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
|
@ -218,10 +224,12 @@ class QuadTree ( object ):
|
|||
self.ycut = None
|
||||
self.parent = parent
|
||||
self.depth = parent.depth+1 if parent else 0
|
||||
self.maxDepth = 0
|
||||
self.bl = None
|
||||
self.br = None
|
||||
self.tl = None
|
||||
self.tr = None
|
||||
self.position = [ None, None ]
|
||||
self.bufferTag = 'spare'
|
||||
self.bufferNet = None
|
||||
self.pool = BufferPool( self )
|
||||
|
@ -239,10 +247,14 @@ class QuadTree ( object ):
|
|||
self.pool._destroyBuffers()
|
||||
|
||||
def __str__ ( self ):
|
||||
s = '<QuadTree [{},{} {},{}] "{}">'.format( DbU.getValueString(self.area.getXMin())
|
||||
occupancy, capacity = self.pool.getUse()
|
||||
s = '<QuadTree [{},{} {},{}] {}/{} "{}">' \
|
||||
.format( DbU.getValueString(self.area.getXMin())
|
||||
, DbU.getValueString(self.area.getYMin())
|
||||
, DbU.getValueString(self.area.getXMax())
|
||||
, DbU.getValueString(self.area.getYMax())
|
||||
, occupancy
|
||||
, capacity
|
||||
, self.rtag )
|
||||
return s
|
||||
|
||||
|
@ -252,8 +264,6 @@ class QuadTree ( object ):
|
|||
def rshowPoolUse ( self ):
|
||||
rused = 0
|
||||
rtotal = 0
|
||||
if not self.depth:
|
||||
print( ' o Detailed use of spare buffers.' )
|
||||
used, total = self.pool.showUse( self.depth )
|
||||
rused += used
|
||||
rtotal += total
|
||||
|
@ -261,10 +271,33 @@ class QuadTree ( object ):
|
|||
used, total = leaf.rshowPoolUse()
|
||||
rused += used
|
||||
rtotal += total
|
||||
|
||||
if not self.depth:
|
||||
global framework
|
||||
catalog = framework.getCatalog()
|
||||
instancesNb = 0
|
||||
bufNb = 0
|
||||
for occurrence in self.spares.state.cell.getTerminalNetlistInstanceOccurrences():
|
||||
cellName = occurrence.getEntity().getMasterCell().getName()
|
||||
cstate = catalog.getState( cellName )
|
||||
if cstate and cstate.isFeed(): continue
|
||||
if cellName.startswith( 'buf_' ):
|
||||
bufNb += 1
|
||||
continue
|
||||
instancesNb += 1
|
||||
if bufNb != rtotal:
|
||||
print( WarningMessage('QuadTree.rshowPoolUse(): Buffer instance number discrepency, {} vs {}.' \
|
||||
.format(bufNb,rtotal)) )
|
||||
if rtotal:
|
||||
print( ' - Useds: {}, total: {} ({:.1%}).' \
|
||||
.format(rused,rtotal,float(rused)/float(rtotal)) )
|
||||
print( ' o Detailed use of spare buffers.' )
|
||||
dots( 82
|
||||
, ' - Useds: '
|
||||
, ' {}/{} ({:.1%})'.format(rused,rtotal,float(rused)/float(rtotal)) )
|
||||
dots( 82
|
||||
, ' - Buffer ratio: '
|
||||
, ' {}/{} ({:.1%})'.format( rtotal
|
||||
, instancesNb+bufNb
|
||||
, float(rtotal)/float(instancesNb+bufNb)) )
|
||||
return rused, rtotal
|
||||
|
||||
@property
|
||||
|
@ -346,14 +379,14 @@ class QuadTree ( object ):
|
|||
"""
|
||||
if self.isLeaf() and not doLeaf: return
|
||||
|
||||
trace( 550, '\tQuadTree.connectBuffer(): rtag:"{}"\n'.format(self.rtag) )
|
||||
trace( 540, '\tQuadTree.connectBuffer(): rtag:"{}"\n'.format(self.rtag) )
|
||||
plug = self.bOutputPlug
|
||||
if not plug.getNet():
|
||||
outputNetBuff = Net.create( self.spares.state.cell,'{}_{}' \
|
||||
.format(self.root.bufferTag,self.rtag) )
|
||||
plug.setNet( outputNetBuff )
|
||||
trace( 550, '\t| {}\n'.format(plug) )
|
||||
trace( 550, '\t| {}\n'.format(outputNetBuff) )
|
||||
trace( 540, '\t| {}\n'.format(plug) )
|
||||
trace( 540, '\t| {}\n'.format(outputNetBuff) )
|
||||
|
||||
def rconnectBuffer ( self ):
|
||||
"""[R]ecursive call of connectBuffer()"""
|
||||
|
@ -367,13 +400,13 @@ class QuadTree ( object ):
|
|||
For a more detailed explanation of the parameter, please refer
|
||||
to BufferPool.select().
|
||||
"""
|
||||
trace( 550, '+' )
|
||||
trace( 540, '+' )
|
||||
if self.plugs:
|
||||
self.plugs = []
|
||||
self.pool.select( column, row, flags )
|
||||
if not self.isLeaf():
|
||||
for leaf in self.leafs: leaf.rselectBuffer( column, row, flags )
|
||||
trace( 550, '-' )
|
||||
trace( 540, '-' )
|
||||
|
||||
def partition ( self ):
|
||||
"""
|
||||
|
@ -383,7 +416,7 @@ class QuadTree ( object ):
|
|||
Depending on the initial aspect ratio, the first levels *may* not be a
|
||||
quad-tree, but only a vertical or horizontal bi-partition.
|
||||
"""
|
||||
trace( 550, ',+', '\tQuadTree.partition(): {} (spareSide:{})\n' \
|
||||
trace( 540, ',+', '\tQuadTree.partition(): {} (spareSide:{})\n' \
|
||||
.format(self.area, DbU.getValueString(self.spares.state.cfg.block.spareSide)) )
|
||||
|
||||
spareSide = self.spares.state.cfg.block.spareSide
|
||||
|
@ -392,7 +425,7 @@ class QuadTree ( object ):
|
|||
aspectRatio = float(self.area.getWidth()) / float(self.area.getHeight())
|
||||
|
||||
if self.area.getHeight() < side*2.0 or self.area.getWidth () < side*2.0:
|
||||
trace( 550, '-' )
|
||||
trace( 540, '-' )
|
||||
return False
|
||||
|
||||
if aspectRatio < 0.5:
|
||||
|
@ -411,8 +444,8 @@ class QuadTree ( object ):
|
|||
, self.area.getXMax()
|
||||
, self.area.getYMax() )
|
||||
, 'tl' )
|
||||
trace( 550, '\tVertical bi-partition @Y:{}\n'.format(DbU.getValueString(self.ycut)) )
|
||||
trace( 550, '-' )
|
||||
trace( 540, '\tVertical bi-partition @Y:{}\n'.format(DbU.getValueString(self.ycut)) )
|
||||
trace( 540, '-' )
|
||||
return True
|
||||
elif aspectRatio > 2.0:
|
||||
self.xcut = self.spares.toXGCellGrid( self.area.getXMin() + self.area.getWidth()/2 )
|
||||
|
@ -430,8 +463,8 @@ class QuadTree ( object ):
|
|||
, self.area.getXMax()
|
||||
, self.area.getYMax() )
|
||||
, 'br' )
|
||||
trace( 550, '\tHorizontal bi-partition @X:{}\n'.format(DbU.getValueString(self.xcut)) )
|
||||
trace( 550, '-' )
|
||||
trace( 540, '\tHorizontal bi-partition @X:{}\n'.format(DbU.getValueString(self.xcut)) )
|
||||
trace( 540, '-' )
|
||||
return True
|
||||
|
||||
self.ycut = self.spares.toYGCellGrid( self.area.getYMin() + self.area.getHeight()/2 )
|
||||
|
@ -465,19 +498,112 @@ class QuadTree ( object ):
|
|||
, self.area.getYMax() )
|
||||
, 'tr' )
|
||||
|
||||
trace( 550, '\tQuadri-partition @X:{} + @Y:{}\n'\
|
||||
trace( 540, '\tQuadri-partition @X:{} + @Y:{}\n'\
|
||||
.format(DbU.getValueString(self.xcut),DbU.getValueString(self.ycut)) )
|
||||
trace( 550, '-' )
|
||||
trace( 540, '-' )
|
||||
return True
|
||||
|
||||
def rpartition ( self ):
|
||||
""""[R]ecursively partition the the area."""
|
||||
trace( 550, ',+', '\tQuadTree.rpartition(): {}\n'.format(self.area) )
|
||||
trace( 540, ',+', '\tQuadTree.rpartition(): {}\n'.format(self.area) )
|
||||
if self.partition():
|
||||
for leaf in self.leafs:
|
||||
trace( 550, '\tLeaf rtag:"{}"\n'.format(leaf.rtag) )
|
||||
trace( 540, '\tLeaf rtag:"{}"\n'.format(leaf.rtag) )
|
||||
leaf.rpartition()
|
||||
trace( 550, '-' )
|
||||
trace( 540, '-' )
|
||||
if self.isRoot():
|
||||
self._rsetMaxDepth()
|
||||
self._rsetPosition( self.maxDepth, [0,0] )
|
||||
|
||||
def _rsetMaxDepth ( self ):
|
||||
maxDepth = 0
|
||||
trace( 540, ',+', '\tEntering: {}\n'.format(self) )
|
||||
if not self.isLeaf():
|
||||
for leaf in self.leafs:
|
||||
leaf._rsetMaxDepth()
|
||||
maxDepth = max( maxDepth, leaf.maxDepth+1 )
|
||||
self.maxDepth = maxDepth
|
||||
trace( 540, ',-', '\tMaxdepth: {} {}\n'.format(maxDepth,self) )
|
||||
|
||||
def _rsetPosition ( self, maxDepth, position ):
|
||||
trace( 540, '+,', '\t+_rsetPosition(): {}\n'.format(self) )
|
||||
self.position = position
|
||||
trace( 540, '\t| position:{}\n'.format(self.position) )
|
||||
if not self.isLeaf():
|
||||
leafBit = 1 << (maxDepth - (self.depth + 1))
|
||||
trace( 540, '\t| leafBit:{}\n'.format(leafBit) )
|
||||
if self.bl: self.bl._rsetPosition( maxDepth, position )
|
||||
if self.br: self.br._rsetPosition( maxDepth, [position[0] | leafBit, position[1]] )
|
||||
if self.tl: self.tl._rsetPosition( maxDepth, [position[0] , position[1] | leafBit] )
|
||||
if self.tr: self.tr._rsetPosition( maxDepth, [position[0] | leafBit, position[1] | leafBit] )
|
||||
trace( 540, '-' )
|
||||
|
||||
def _getLeafAt ( self, maxDepth, stopDepth, position ):
|
||||
trace( 540, '\t_getLeafAt(): {}\n'.format(self) )
|
||||
trace( 540, '\t| maxDepth:{}, stopDepth:{}, position:{}\n' \
|
||||
.format(maxDepth,stopDepth,position) )
|
||||
if self.isLeaf(): return self
|
||||
if self.depth >= stopDepth: return leaf
|
||||
bitDepth = maxDepth - (self.depth + 1)
|
||||
bitMask = 1 << bitDepth
|
||||
trace( 540, '\tbitDepth: {}, bitMask: {:b}\n'.format(bitDepth,bitMask) )
|
||||
leafCode = (position[0] & bitMask) + ((position[1] & bitMask) << 1)
|
||||
trace( 540, '\tleafCode: {}\n'.format(leafCode) )
|
||||
leafCode = leafCode >> bitDepth
|
||||
leaf = None
|
||||
trace( 540, '\tleafCode: {}\n'.format(leafCode) )
|
||||
if leafCode == 0: leaf = self.bl
|
||||
if leafCode == 1: leaf = self.br
|
||||
if leafCode == 2: leaf = self.tl
|
||||
if leafCode == 3: leaf = self.tr
|
||||
if leaf is None: return None
|
||||
trace( 540, '+' )
|
||||
leaf = leaf._getLeafAt( maxDepth, stopDepth, position )
|
||||
trace( 540, '-' )
|
||||
return leaf
|
||||
|
||||
def getLeafAt ( self, position, depth=None ):
|
||||
return self._getLeafAt( self.root.maxDepth, depth, position )
|
||||
|
||||
def getLeft ( self ):
|
||||
trace( 540, '\tgetLeft(): \n'.format(self) )
|
||||
shiftedPos = self.position[0] >> (self.root.maxDepth - self.depth)
|
||||
deltaPos = 1 << (self.root.maxDepth - self.depth)
|
||||
trace( 540, '\t| position[0] (x): {}\n'.format(self.position[0]) )
|
||||
trace( 540, '\t| shiftedPos (x): {}\n'.format(shiftedPos) )
|
||||
trace( 540, '\t| deltaPos (x): {}\n'.format(deltaPos) )
|
||||
if shiftedPos == 0: return None
|
||||
return self.root.getLeafAt( [self.position[0]-deltaPos, self.position[1]], self.depth )
|
||||
|
||||
def getRight ( self ):
|
||||
trace( 540, '\tgetRight(): \n'.format(self) )
|
||||
shiftedPos = self.position[0] >> (self.root.maxDepth - self.depth)
|
||||
deltaPos = 1 << (self.root.maxDepth - self.depth)
|
||||
trace( 540, '\t| position[0] (x): {}\n'.format(self.position[0]) )
|
||||
trace( 540, '\t| shiftedPos (x): {}\n'.format(shiftedPos) )
|
||||
trace( 540, '\t| deltaPos (x): {}\n'.format(deltaPos) )
|
||||
if shiftedPos+1 >= 1 << self.depth: return None
|
||||
return self.root.getLeafAt( [self.position[0]+deltaPos, self.position[1]], self.depth )
|
||||
|
||||
def getBottom ( self ):
|
||||
trace( 540, '\tgetBottom(): \n'.format(self) )
|
||||
shiftedPos = self.position[1] >> (self.root.maxDepth - self.depth)
|
||||
deltaPos = 1 << (self.root.maxDepth - self.depth)
|
||||
trace( 540, '\t| position[0] (x): {}\n'.format(self.position[0]) )
|
||||
trace( 540, '\t| shiftedPos (x): {}\n'.format(shiftedPos) )
|
||||
trace( 540, '\t| deltaPos (x): {}\n'.format(deltaPos) )
|
||||
if shiftedPos == 0: return None
|
||||
return self.root.getLeafAt( [self.position[0], self.position[1]-deltaPos], self.depth )
|
||||
|
||||
def getTop ( self ):
|
||||
trace( 540, '\tgetTop(): \n'.format(self) )
|
||||
shiftedPos = self.position[1] >> (self.root.maxDepth - self.depth)
|
||||
deltaPos = 1 << (self.root.maxDepth - self.depth)
|
||||
trace( 540, '\t| position[0] (x): {}\n'.format(self.position[0]) )
|
||||
trace( 540, '\t| shiftedPos (x): {}\n'.format(shiftedPos) )
|
||||
trace( 540, '\t| deltaPos (x): {}\n'.format(deltaPos) )
|
||||
if shiftedPos+1 >= 1 << self.depth: return None
|
||||
return self.root.getLeafAt( [self.position[0], self.position[1]+deltaPos], self.depth )
|
||||
|
||||
def getLeafUnder ( self, position ):
|
||||
"""Find the QuadTree leaf under ``position``."""
|
||||
|
@ -543,26 +669,26 @@ class QuadTree ( object ):
|
|||
"""
|
||||
if not self.plugs: return
|
||||
|
||||
trace( 550, ',+', '\tQuadTree.spliNetlist()\n' )
|
||||
trace( 540, ',+', '\tQuadTree.spliNetlist()\n' )
|
||||
self.connectBuffer( doLeaf=True )
|
||||
netBuff = self.bOutputPlug.getNet()
|
||||
trace( 550, '\tBuffer: {}\n'.format(self.buffer) )
|
||||
trace( 550, '\tBuffer output: {}\n'.format(netBuff) )
|
||||
trace( 540, '\tBuffer: {}\n'.format(self.buffer) )
|
||||
trace( 540, '\tBuffer output: {}\n'.format(netBuff) )
|
||||
for plug in self.plugs:
|
||||
trace( 550, '\t| Leaf: {}\n'.format(plug) )
|
||||
trace( 550, '\t| netBuff: {}\n'.format(netBuff) )
|
||||
trace( 540, '\t| Leaf: {}\n'.format(plug) )
|
||||
trace( 540, '\t| netBuff: {}\n'.format(netBuff) )
|
||||
deepPlug = self.spares.raddTransNet( netBuff, plug.getPath() )
|
||||
trace( 550, '\t| netBuff: {}\n'.format(netBuff) )
|
||||
trace( 550, '\t| Deep Plug: {}\n'.format(deepPlug) )
|
||||
trace( 540, '\t| netBuff: {}\n'.format(netBuff) )
|
||||
trace( 540, '\t| Deep Plug: {}\n'.format(deepPlug) )
|
||||
deepNetBuff = deepPlug.getMasterNet() if deepPlug else netBuff
|
||||
trace( 550, '\t| deepNetBuff: {} {}\n'.format(deepNetBuff,netBuff) )
|
||||
trace( 540, '\t| deepNetBuff: {} {}\n'.format(deepNetBuff,netBuff) )
|
||||
plug.getEntity().setNet( deepNetBuff )
|
||||
|
||||
maxSinks = self.spares.state.bufferConf.maxSinks
|
||||
if len(self.plugs) > maxSinks:
|
||||
print( WarningMessage( 'QuadTree.splitNetlist(): More than {} sink points ({}) on "{}".' \
|
||||
.format(maxSinks,len(self.plugs),netBuff.getName())) )
|
||||
trace( 550, '-' )
|
||||
trace( 540, '-' )
|
||||
|
||||
def rsplitNetlist ( self ):
|
||||
"""Recursive call over splitNetlist()."""
|
||||
|
@ -603,8 +729,8 @@ class Spares ( object ):
|
|||
|
||||
def getSpareSpaceMargin ( self ):
|
||||
"""
|
||||
Compute the percentage of margin space to compensate for the 4 spare
|
||||
buffers.
|
||||
Compute the percentage of margin space to compensate for the spare
|
||||
buffers (row*columns) with a supplemental margin factor of 1.3 or 1.4.
|
||||
"""
|
||||
if not self.state.useSpares: return 0.0
|
||||
spareSide = self.state.cfg.block.spareSide
|
||||
|
@ -615,7 +741,7 @@ class Spares ( object ):
|
|||
bufferLength = self.state.bufferConf.width * self.state.bColumns * self.state.bRows
|
||||
if not areaLength:
|
||||
raise ErrorMessage( 3, 'Spares.getSpareSpaceMargin(): Spare leaf area is zero.' )
|
||||
return (float(bufferLength) * 1.3) / float(areaLength)
|
||||
return (float(bufferLength) * 1.4) / float(areaLength)
|
||||
|
||||
def toXGCellGrid ( self, x ):
|
||||
"""Find the nearest X (inferior) on the Cell gauge grid (sliceStep)."""
|
||||
|
@ -629,10 +755,10 @@ class Spares ( object ):
|
|||
|
||||
def build ( self ):
|
||||
if not self.state.useSpares: return
|
||||
trace( 550, ',+', '\tSpares.build()\n' )
|
||||
trace( 540, ',+', '\tSpares.build()\n' )
|
||||
with UpdateSession():
|
||||
self.quadTree = QuadTree.create( self )
|
||||
trace( 550, '-' )
|
||||
trace( 540, '-' )
|
||||
|
||||
def rshowPoolUse ( self ):
|
||||
if self.quadTree:
|
||||
|
@ -640,7 +766,7 @@ class Spares ( object ):
|
|||
|
||||
def addStrayBuffer ( self, position ):
|
||||
"""Add a new stray buffer at ``position``."""
|
||||
trace( 550, ',+', '\tSpares.addStrayBuffer()\n' )
|
||||
trace( 540, ',+', '\tSpares.addStrayBuffer()\n' )
|
||||
|
||||
sliceHeight = self.state.gaugeConf.sliceHeight
|
||||
x = self.quadTree.onXPitch( position.getX() )
|
||||
|
@ -660,8 +786,8 @@ class Spares ( object ):
|
|||
transf = Transformation( x+unoverlapDx, y, orientation )
|
||||
instance.setTransformation( transf )
|
||||
self.strayBuffers.append( instance )
|
||||
trace( 550, '\tBuffer: {} @{}\n'.format(self.strayBuffers[-1],transf) )
|
||||
trace( 550, '-' )
|
||||
trace( 540, '\tBuffer: {} @{}\n'.format(self.strayBuffers[-1],transf) )
|
||||
trace( 540, '-' )
|
||||
return instance
|
||||
|
||||
def getFreeBufferNear ( self, position ):
|
||||
|
|
|
@ -0,0 +1,153 @@
|
|||
#
|
||||
# This file is part of the Coriolis Software.
|
||||
# Copyright (c) SU 2020-2020, All Rights Reserved
|
||||
#
|
||||
# +-----------------------------------------------------------------+
|
||||
# | C O R I O L I S |
|
||||
# | C u m u l u s - P y t h o n T o o l s |
|
||||
# | |
|
||||
# | Author : Jean-Paul CHAPUT |
|
||||
# | E-mail : Jean-Paul.Chaput@lip6.fr |
|
||||
# | =============================================================== |
|
||||
# | Python : "./plugins/block/timing.py" |
|
||||
# +-----------------------------------------------------------------+
|
||||
|
||||
"""
|
||||
This module provide a very basic support for timing related features.
|
||||
|
||||
For now, the electrical values are taken from the sxlib dummy 0.35um
|
||||
technology. See ``man sxlib``, **output drive** section.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import os.path
|
||||
import re
|
||||
from operator import itemgetter, attrgetter, methodcaller
|
||||
import Cfg
|
||||
from Hurricane import Breakpoint
|
||||
from Hurricane import DbU
|
||||
from Hurricane import Box
|
||||
from Hurricane import Transformation
|
||||
from Hurricane import Box
|
||||
from Hurricane import Path
|
||||
from Hurricane import Layer
|
||||
from Hurricane import Occurrence
|
||||
from Hurricane import Net
|
||||
from Hurricane import HyperNet
|
||||
from Hurricane import RoutingPad
|
||||
from Hurricane import Horizontal
|
||||
from Hurricane import Vertical
|
||||
from Hurricane import Contact
|
||||
from Hurricane import Pin
|
||||
from Hurricane import Plug
|
||||
from Hurricane import Instance
|
||||
import CRL
|
||||
from CRL import RoutingLayerGauge
|
||||
from helpers import trace, l, u, n
|
||||
from helpers.io import ErrorMessage
|
||||
from helpers.io import WarningMessage
|
||||
from helpers.io import catch
|
||||
from helpers.overlay import UpdateSession
|
||||
from plugins import getParameter
|
||||
from plugins import utils
|
||||
|
||||
|
||||
class CellTimings ( object ):
|
||||
"""
|
||||
Contains the timing data related to a Cell.
|
||||
"""
|
||||
|
||||
def __init__ ( self, cell ):
|
||||
self.cell = cell
|
||||
self.drive = 0.0
|
||||
|
||||
@property
|
||||
def name ( self ): return self.cell.getName()
|
||||
|
||||
def __str__ ( self ):
|
||||
return '<CellTimings "{}" drive:{}>'.format(self.name, self.drive)
|
||||
|
||||
|
||||
class TechTimings ( object ):
|
||||
"""
|
||||
Timing datas for the technology and the standard cells.
|
||||
"""
|
||||
|
||||
def __init__ ( self ):
|
||||
self.cells = {}
|
||||
self.capaBaseDrive = 0.0
|
||||
self.capaAvgFanin = 0.0
|
||||
self.capaPerLambda = 0.0
|
||||
|
||||
def addCell ( self, cellTiming ):
|
||||
if self.cells.has_key(cellTiming.name):
|
||||
print( ErrorMessage( 1, 'TechTimings.addCell(): Redefinition of timings for "{}"' \
|
||||
.format(cellTiming.name) ))
|
||||
self.cells[ cellTiming.name ] = cellTiming
|
||||
|
||||
def getCapaEstimate ( self, WL, sinks ):
|
||||
return WL*self.capaPerLambda + self.capaAvgFanin*sinks
|
||||
|
||||
def getWlEstimate ( self, cellName, sinks ):
|
||||
drivingCapa = self.getDrivingCapa( cellName )
|
||||
#print( 'sinks:{}, dC:{}, avgFanin:{}, CpL:{}'.format(sinks,drivingCapa,self.capaAvgFanin,self.capaPerLambda) )
|
||||
#print( '{}'.format((drivingCapa - self.capaAvgFanin*sinks) / self.capaPerLambda) )
|
||||
return DbU.fromLambda( (drivingCapa - self.capaAvgFanin*sinks) / self.capaPerLambda )
|
||||
|
||||
def getOneSinkEqWL ( self ):
|
||||
"""Return the equivalent wirelength of the sink average capacity."""
|
||||
return DbU.fromLambda(self.capaAvgFanin / self.capaPerLambda)
|
||||
|
||||
def getSinksEstimate ( self, cellName ):
|
||||
"""
|
||||
Estimate the number sinks that gate can drive. We assume a 100 lambda
|
||||
wire to connect to each sink.
|
||||
"""
|
||||
drivingCapa = self.getDrivingCapa( cellName )
|
||||
return drivingCapa / (self.capaAvgFanin + 100.0*self.capaPerLambda)
|
||||
|
||||
def getDrivingCapa ( self, name ):
|
||||
if not self.cells.has_key(name):
|
||||
print( ErrorMessage( 1, 'TechTimings.getDrivingCapa(): No timings for "{}"' \
|
||||
.format(name) ))
|
||||
return 0.0
|
||||
return self.cells[name].drive
|
||||
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Module static initialization
|
||||
|
||||
tech = None
|
||||
|
||||
cellsTimingsDatas = ( ('inv_x1', 1.0)
|
||||
, ('inv_x2', 1.6)
|
||||
, ('inv_x4', 3.6)
|
||||
, ('inv_x8', 8.4)
|
||||
, ('buf_x2', 2.1)
|
||||
, ('buf_x4', 4.3)
|
||||
, ('buf_x8', 8.4)
|
||||
)
|
||||
|
||||
def staticInit ():
|
||||
"""
|
||||
Static initialization of the ``timing`` module. Allow to postpone
|
||||
initialization until Hurricane & CRL database and evironment are
|
||||
properly loaded.
|
||||
|
||||
Capacitance unit is fF (femto Farad).
|
||||
"""
|
||||
global tech
|
||||
|
||||
if tech is not None: return
|
||||
|
||||
af = CRL.AllianceFramework.get()
|
||||
tech = TechTimings()
|
||||
tech.capaAvgFanin = 10.0
|
||||
tech.capaBaseDrive = 125.0
|
||||
tech.capaPerLambda = 0.3
|
||||
for cellName, drive in cellsTimingsDatas:
|
||||
cell = af.getCell( cellName, CRL.Catalog.State.Views )
|
||||
cellTiming = CellTimings( cell )
|
||||
cellTiming.drive = drive*tech.capaBaseDrive
|
||||
tech.addCell( cellTiming )
|
Loading…
Reference in New Issue