mirror of https://github.com/YosysHQ/yosys.git
Merge branch 'origin/master' into krys/docs
This commit is contained in:
commit
d2bf5a83af
|
@ -34,6 +34,7 @@ body:
|
|||
- macOS
|
||||
- Windows
|
||||
- BSD
|
||||
- WebAssembly
|
||||
multiple: true
|
||||
validations:
|
||||
required: true
|
||||
|
|
|
@ -16,7 +16,7 @@ jobs:
|
|||
steps:
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
brew install bison flex gawk libffi pkg-config bash
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1 brew install bison flex gawk libffi pkg-config bash
|
||||
|
||||
- name: Runtime environment
|
||||
shell: bash
|
||||
|
|
23
CHANGELOG
23
CHANGELOG
|
@ -2,9 +2,30 @@
|
|||
List of major changes and improvements between releases
|
||||
=======================================================
|
||||
|
||||
Yosys 0.38 .. Yosys 0.39-dev
|
||||
Yosys 0.39 .. Yosys 0.40-dev
|
||||
--------------------------
|
||||
|
||||
Yosys 0.38 .. Yosys 0.39
|
||||
--------------------------
|
||||
* New commands and options
|
||||
- Added option "-extra-map" to "synth" pass.
|
||||
- Added option "-dont_use" to "dfflibmap" pass.
|
||||
- Added option "-href" to "show" command.
|
||||
- Added option "-noscopeinfo" to "flatten" pass.
|
||||
- Added option "-scopename" to "flatten" pass.
|
||||
|
||||
* SystemVerilog
|
||||
- Added support for packed multidimensional arrays.
|
||||
|
||||
* Various
|
||||
- Added "$scopeinfo" cells to preserve information about
|
||||
the hierarchy during flattening.
|
||||
- Added sequential area output to "stat -liberty".
|
||||
- Added ability to record/replay diagnostics in cxxrtl backend.
|
||||
|
||||
* Verific support
|
||||
- Added attributes to module instantiation.
|
||||
|
||||
Yosys 0.37 .. Yosys 0.38
|
||||
--------------------------
|
||||
* New commands and options
|
||||
|
|
6
Makefile
6
Makefile
|
@ -141,7 +141,7 @@ LIBS += -lrt
|
|||
endif
|
||||
endif
|
||||
|
||||
YOSYS_VER := 0.38+113
|
||||
YOSYS_VER := 0.39+4
|
||||
|
||||
# Note: We arrange for .gitcommit to contain the (short) commit hash in
|
||||
# tarballs generated with git-archive(1) using .gitattributes. The git repo
|
||||
|
@ -157,7 +157,7 @@ endif
|
|||
OBJS = kernel/version_$(GIT_REV).o
|
||||
|
||||
bumpversion:
|
||||
sed -i "/^YOSYS_VER := / s/+[0-9][0-9]*$$/+`git log --oneline 543faed.. | wc -l`/;" Makefile
|
||||
sed -i "/^YOSYS_VER := / s/+[0-9][0-9]*$$/+`git log --oneline 0033808.. | wc -l`/;" Makefile
|
||||
|
||||
# set 'ABCREV = default' to use abc/ as it is
|
||||
#
|
||||
|
@ -165,7 +165,7 @@ bumpversion:
|
|||
# is just a symlink to your actual ABC working directory, as 'make mrproper'
|
||||
# will remove the 'abc' directory and you do not want to accidentally
|
||||
# delete your work on ABC..
|
||||
ABCREV = 896e5e7
|
||||
ABCREV = 0cd90d0
|
||||
ABCPULL = 1
|
||||
ABCURL ?= https://github.com/YosysHQ/abc
|
||||
ABCMKARGS = CC="$(CXX)" CXX="$(CXX)" ABC_USE_LIBSTDCXX=1 ABC_USE_NAMESPACE=abc VERBOSE=$(Q)
|
||||
|
|
|
@ -67,6 +67,8 @@ struct AigerWriter
|
|||
int initstate_ff = 0;
|
||||
|
||||
dict<SigBit, int> ywmap_clocks;
|
||||
vector<Cell *> ywmap_asserts;
|
||||
vector<Cell *> ywmap_assumes;
|
||||
|
||||
int mkgate(int a0, int a1)
|
||||
{
|
||||
|
@ -269,6 +271,7 @@ struct AigerWriter
|
|||
unused_bits.erase(A);
|
||||
unused_bits.erase(EN);
|
||||
asserts.push_back(make_pair(A, EN));
|
||||
ywmap_asserts.push_back(cell);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -279,6 +282,7 @@ struct AigerWriter
|
|||
unused_bits.erase(A);
|
||||
unused_bits.erase(EN);
|
||||
assumes.push_back(make_pair(A, EN));
|
||||
ywmap_assumes.push_back(cell);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -852,6 +856,19 @@ struct AigerWriter
|
|||
for (auto &it : init_lines)
|
||||
json.value(it.second);
|
||||
json.end_array();
|
||||
|
||||
json.name("asserts");
|
||||
json.begin_array();
|
||||
for (Cell *cell : ywmap_asserts)
|
||||
json.value(witness_path(cell));
|
||||
json.end_array();
|
||||
|
||||
json.name("assumes");
|
||||
json.begin_array();
|
||||
for (Cell *cell : ywmap_assumes)
|
||||
json.value(witness_path(cell));
|
||||
json.end_array();
|
||||
|
||||
json.end_object();
|
||||
}
|
||||
|
||||
|
|
|
@ -49,11 +49,16 @@
|
|||
// <file> ::= <file-header> <definitions> <sample>+
|
||||
// <file-header> ::= 0x52585843 0x00004c54
|
||||
// <definitions> ::= <packet-define>* <packet-end>
|
||||
// <sample> ::= <packet-sample> <packet-change>* <packet-end>
|
||||
// <sample> ::= <packet-sample> (<packet-change> | <packet-diag>)* <packet-end>
|
||||
// <packet-define> ::= 0xc0000000 ...
|
||||
// <packet-sample> ::= 0xc0000001 ...
|
||||
// <packet-change> ::= 0x0??????? <chunk>+ | 0x1??????? <index> <chunk>+ | 0x2??????? | 0x3???????
|
||||
// <chunk>, <index> ::= 0x????????
|
||||
// <packet-diag> ::= <packet-break> | <packet-print> | <packet-assert> | <packet-assume>
|
||||
// <packet-break> ::= 0xc0000010 <message> <source-location>
|
||||
// <packet-print> ::= 0xc0000011 <message> <source-location>
|
||||
// <packet-assert> ::= 0xc0000012 <message> <source-location>
|
||||
// <packet-assume> ::= 0xc0000013 <message> <source-location>
|
||||
// <packet-end> ::= 0xFFFFFFFF
|
||||
//
|
||||
// The replay log contains sample data, however, it does not cover the entire design. Rather, it only contains sample
|
||||
|
@ -61,6 +66,10 @@
|
|||
// a minimum, and recording speed to a maximum. The player samples any missing data by setting the design state items
|
||||
// to the same values they had during recording, and re-evaluating the design.
|
||||
//
|
||||
// Packets for diagnostics (prints, breakpoints, assertions, and assumptions) are used solely for diagnostics emitted
|
||||
// by the C++ testbench driving the simulation, and are not recorded while evaluating the design. (Diagnostics emitted
|
||||
// by the RTL can be reconstructed at replay time, so recording them would be a waste of space.)
|
||||
//
|
||||
// Limits
|
||||
// ------
|
||||
//
|
||||
|
@ -109,6 +118,33 @@
|
|||
|
||||
namespace cxxrtl {
|
||||
|
||||
// A single diagnostic that can be manipulated as an object (including being written to and read from a file).
|
||||
// This differs from the base CXXRTL interface, where diagnostics can only be emitted via a procedure call, and are
|
||||
// not materialized as objects.
|
||||
struct diagnostic {
|
||||
// The `BREAK` flavor corresponds to a breakpoint, which is a diagnostic type that can currently only be emitted
|
||||
// by the C++ testbench code.
|
||||
enum flavor {
|
||||
BREAK = 0,
|
||||
PRINT = 1,
|
||||
ASSERT = 2,
|
||||
ASSUME = 3,
|
||||
};
|
||||
|
||||
flavor type;
|
||||
std::string message;
|
||||
std::string location; // same format as the `src` attribute of `$print` or `$check` cell
|
||||
|
||||
diagnostic()
|
||||
: type(BREAK) {}
|
||||
|
||||
diagnostic(flavor type, const std::string &message, const std::string &location)
|
||||
: type(type), message(message), location(location) {}
|
||||
|
||||
diagnostic(flavor type, const std::string &message, const char *file, unsigned line)
|
||||
: type(type), message(message), location(std::string(file) + ':' + std::to_string(line)) {}
|
||||
};
|
||||
|
||||
// A spool stores CXXRTL design state changes in a file.
|
||||
class spool {
|
||||
public:
|
||||
|
@ -139,6 +175,9 @@ public:
|
|||
static constexpr uint32_t PACKET_CHANGEL = 0x20000000/* | ident */;
|
||||
static constexpr uint32_t PACKET_CHANGEH = 0x30000000/* | ident */;
|
||||
|
||||
static constexpr uint32_t PACKET_DIAGNOSTIC = 0xc0000010/* | diagnostic::flavor */;
|
||||
static constexpr uint32_t DIAGNOSTIC_MASK = 0x0000000f;
|
||||
|
||||
static constexpr uint32_t PACKET_END = 0xffffffff;
|
||||
|
||||
// Writing spools.
|
||||
|
@ -281,6 +320,12 @@ public:
|
|||
emit_word(data[offset]);
|
||||
}
|
||||
|
||||
void write_diagnostic(const diagnostic &diagnostic) {
|
||||
emit_word(PACKET_DIAGNOSTIC | diagnostic.type);
|
||||
emit_string(diagnostic.message);
|
||||
emit_string(diagnostic.location);
|
||||
}
|
||||
|
||||
void write_end() {
|
||||
emit_word(PACKET_END);
|
||||
}
|
||||
|
@ -397,11 +442,16 @@ public:
|
|||
return true;
|
||||
}
|
||||
|
||||
bool read_change_header(uint32_t &header, ident_t &ident) {
|
||||
bool read_header(uint32_t &header) {
|
||||
header = absorb_word();
|
||||
if (header == PACKET_END)
|
||||
return false;
|
||||
assert((header & ~(CHANGE_MASK | MAXIMUM_IDENT)) == 0);
|
||||
return header != PACKET_END;
|
||||
}
|
||||
|
||||
// This method must be separate from `read_change_data` because `chunks` and `depth` can only be looked up
|
||||
// if `ident` is known.
|
||||
bool read_change_ident(uint32_t header, ident_t &ident) {
|
||||
if ((header & ~(CHANGE_MASK | MAXIMUM_IDENT)) != 0)
|
||||
return false; // some other packet
|
||||
ident = header & MAXIMUM_IDENT;
|
||||
return true;
|
||||
}
|
||||
|
@ -427,6 +477,18 @@ public:
|
|||
for (size_t offset = 0; offset < chunks; offset++)
|
||||
data[chunks * index + offset] = absorb_word();
|
||||
}
|
||||
|
||||
bool read_diagnostic(uint32_t header, diagnostic &diagnostic) {
|
||||
if ((header & ~DIAGNOSTIC_MASK) != PACKET_DIAGNOSTIC)
|
||||
return false; // some other packet
|
||||
uint32_t type = header & DIAGNOSTIC_MASK;
|
||||
assert(type == diagnostic::BREAK || type == diagnostic::PRINT ||
|
||||
type == diagnostic::ASSERT || type == diagnostic::ASSUME);
|
||||
diagnostic.type = (diagnostic::flavor)type;
|
||||
diagnostic.message = absorb_string();
|
||||
diagnostic.location = absorb_string();
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
// Opening spools. For certain uses of the record/replay mechanism, two distinct open files (two open files, i.e.
|
||||
|
@ -584,6 +646,18 @@ public:
|
|||
return changed;
|
||||
}
|
||||
|
||||
void record_diagnostic(const diagnostic &diagnostic) {
|
||||
assert(streaming);
|
||||
|
||||
// Emit an incremental delta cycle per diagnostic to simplify the logic of the recorder. This is inefficient, but
|
||||
// diagnostics should be rare enough that this inefficiency does not matter. If it turns out to be an issue, this
|
||||
// code should be changed to accumulate diagnostics to a buffer that is flushed in `record_{complete,incremental}`
|
||||
// and also in `advance_time` before the timestamp is changed. (Right now `advance_time` never writes to the spool.)
|
||||
writer.write_sample(/*incremental=*/true, pointer++, timestamp);
|
||||
writer.write_diagnostic(diagnostic);
|
||||
writer.write_end();
|
||||
}
|
||||
|
||||
void flush() {
|
||||
writer.flush();
|
||||
}
|
||||
|
@ -657,8 +731,9 @@ public:
|
|||
streaming = true;
|
||||
|
||||
// Establish the initial state of the design.
|
||||
initialized = replay();
|
||||
assert(initialized);
|
||||
std::vector<diagnostic> diagnostics;
|
||||
initialized = replay(&diagnostics);
|
||||
assert(initialized && diagnostics.empty());
|
||||
}
|
||||
|
||||
// Returns the pointer of the current sample.
|
||||
|
@ -690,8 +765,8 @@ public:
|
|||
// If this function returns `true`, then `current_pointer() == at_pointer`, and the module contains values that
|
||||
// correspond to this pointer in the replay log. To obtain a valid pointer, call `current_pointer()`; while pointers
|
||||
// are monotonically increasing for each consecutive sample, using arithmetic operations to create a new pointer is
|
||||
// not allowed.
|
||||
bool rewind_to(spool::pointer_t at_pointer) {
|
||||
// not allowed. The `diagnostics` argument, if not `nullptr`, receives the diagnostics recorded in this sample.
|
||||
bool rewind_to(spool::pointer_t at_pointer, std::vector<diagnostic> *diagnostics) {
|
||||
assert(initialized);
|
||||
|
||||
// The pointers in the replay log start from one that is greater than `at_pointer`. In this case the pointer will
|
||||
|
@ -707,9 +782,12 @@ public:
|
|||
reader.rewind(position_it->second);
|
||||
|
||||
// Replay samples until eventually arriving to `at_pointer` or encountering end of file.
|
||||
while(replay()) {
|
||||
while(replay(diagnostics)) {
|
||||
if (pointer == at_pointer)
|
||||
return true;
|
||||
|
||||
if (diagnostics)
|
||||
diagnostics->clear();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@ -717,8 +795,8 @@ public:
|
|||
// If this function returns `true`, then `current_time() <= at_or_before_timestamp`, and the module contains values
|
||||
// that correspond to `current_time()` in the replay log. If `current_time() == at_or_before_timestamp` and there
|
||||
// are several consecutive samples with the same time, the module contains values that correspond to the first of
|
||||
// these samples.
|
||||
bool rewind_to_or_before(const time &at_or_before_timestamp) {
|
||||
// these samples. The `diagnostics` argument, if not `nullptr`, receives the diagnostics recorded in this sample.
|
||||
bool rewind_to_or_before(const time &at_or_before_timestamp, std::vector<diagnostic> *diagnostics) {
|
||||
assert(initialized);
|
||||
|
||||
// The timestamps in the replay log start from one that is greater than `at_or_before_timestamp`. In this case
|
||||
|
@ -734,7 +812,7 @@ public:
|
|||
reader.rewind(position_it->second);
|
||||
|
||||
// Replay samples until eventually arriving to or past `at_or_before_timestamp` or encountering end of file.
|
||||
while (replay()) {
|
||||
while (replay(diagnostics)) {
|
||||
if (timestamp == at_or_before_timestamp)
|
||||
break;
|
||||
|
||||
|
@ -743,14 +821,17 @@ public:
|
|||
break;
|
||||
if (next_timestamp > at_or_before_timestamp)
|
||||
break;
|
||||
|
||||
if (diagnostics)
|
||||
diagnostics->clear();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// If this function returns `true`, then `current_pointer()` and `current_time()` are updated for the next sample
|
||||
// and the module now contains values that correspond to that sample. If it returns `false`, there was no next sample
|
||||
// to read.
|
||||
bool replay() {
|
||||
// to read. The `diagnostics` argument, if not `nullptr`, receives the diagnostics recorded in the next sample.
|
||||
bool replay(std::vector<diagnostic> *diagnostics) {
|
||||
assert(streaming);
|
||||
|
||||
bool incremental;
|
||||
|
@ -771,11 +852,16 @@ public:
|
|||
}
|
||||
|
||||
uint32_t header;
|
||||
while (reader.read_header(header)) {
|
||||
spool::ident_t ident;
|
||||
variable var;
|
||||
while (reader.read_change_header(header, ident)) {
|
||||
diagnostic diag;
|
||||
if (reader.read_change_ident(header, ident)) {
|
||||
variable &var = variables.at(ident);
|
||||
reader.read_change_data(header, var.chunks, var.depth, var.curr);
|
||||
} else if (reader.read_diagnostic(header, diag)) {
|
||||
if (diagnostics)
|
||||
diagnostics->push_back(diag);
|
||||
} else assert(false && "Unrecognized packet header");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -1117,7 +1117,18 @@ struct Smt2Worker
|
|||
|
||||
string name_a = get_bool(cell->getPort(ID::A));
|
||||
string name_en = get_bool(cell->getPort(ID::EN));
|
||||
if (cell->name[0] == '$' && cell->attributes.count(ID::src))
|
||||
bool private_name = cell->name[0] == '$';
|
||||
|
||||
if (!private_name && cell->has_attribute(ID::hdlname)) {
|
||||
for (auto const &part : cell->get_hdlname_attribute()) {
|
||||
if (part == "_witness_") {
|
||||
private_name = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (private_name && cell->attributes.count(ID::src))
|
||||
decls.push_back(stringf("; yosys-smt2-%s %d %s %s\n", cell->type.c_str() + 1, id, get_id(cell), cell->attributes.at(ID::src).decode_string().c_str()));
|
||||
else
|
||||
decls.push_back(stringf("; yosys-smt2-%s %d %s\n", cell->type.c_str() + 1, id, get_id(cell)));
|
||||
|
|
|
@ -57,6 +57,8 @@ keep_going = False
|
|||
check_witness = False
|
||||
detect_loops = False
|
||||
incremental = None
|
||||
track_assumes = False
|
||||
minimize_assumes = False
|
||||
so = SmtOpts()
|
||||
|
||||
|
||||
|
@ -189,6 +191,15 @@ def help():
|
|||
--incremental
|
||||
run in incremental mode (experimental)
|
||||
|
||||
--track-assumes
|
||||
track individual assumptions and report a subset of used
|
||||
assumptions that are sufficient for the reported outcome. This
|
||||
can be used to debug PREUNSAT failures as well as to find a
|
||||
smaller set of sufficient assumptions.
|
||||
|
||||
--minimize-assumes
|
||||
when using --track-assumes, solve for a minimal set of sufficient assumptions.
|
||||
|
||||
""" + so.helpmsg())
|
||||
|
||||
def usage():
|
||||
|
@ -200,7 +211,8 @@ try:
|
|||
opts, args = getopt.getopt(sys.argv[1:], so.shortopts + "t:higcm:", so.longopts +
|
||||
["help", "final-only", "assume-skipped=", "smtc=", "cex=", "aig=", "aig-noheader", "yw=", "btorwit=", "presat",
|
||||
"dump-vcd=", "dump-yw=", "dump-vlogtb=", "vlogtb-top=", "dump-smtc=", "dump-all", "noinfo", "append=",
|
||||
"smtc-init", "smtc-top=", "noinit", "binary", "keep-going", "check-witness", "detect-loops", "incremental"])
|
||||
"smtc-init", "smtc-top=", "noinit", "binary", "keep-going", "check-witness", "detect-loops", "incremental",
|
||||
"track-assumes", "minimize-assumes"])
|
||||
except:
|
||||
usage()
|
||||
|
||||
|
@ -289,6 +301,10 @@ for o, a in opts:
|
|||
elif o == "--incremental":
|
||||
from smtbmc_incremental import Incremental
|
||||
incremental = Incremental()
|
||||
elif o == "--track-assumes":
|
||||
track_assumes = True
|
||||
elif o == "--minimize-assumes":
|
||||
minimize_assumes = True
|
||||
elif so.handle(o, a):
|
||||
pass
|
||||
else:
|
||||
|
@ -447,6 +463,9 @@ def get_constr_expr(db, state, final=False, getvalues=False, individual=False):
|
|||
|
||||
smt = SmtIo(opts=so)
|
||||
|
||||
if track_assumes:
|
||||
smt.smt2_options[':produce-unsat-assumptions'] = 'true'
|
||||
|
||||
if noinfo and vcdfile is None and vlogtbfile is None and outconstr is None:
|
||||
smt.produce_models = False
|
||||
|
||||
|
@ -649,14 +668,20 @@ if aimfile is not None:
|
|||
num_steps = max(num_steps, step+2)
|
||||
step += 1
|
||||
|
||||
ywfile_hierwitness_cache = None
|
||||
|
||||
def ywfile_constraints(inywfile, constr_assumes, map_steps=None, skip_x=False):
|
||||
global ywfile_hierwitness_cache
|
||||
if map_steps is None:
|
||||
map_steps = {}
|
||||
|
||||
with open(inywfile, "r") as f:
|
||||
inyw = ReadWitness(f)
|
||||
|
||||
inits, seqs, clocks, mems = smt.hierwitness(topmod, allregs=True, blackbox=True)
|
||||
if ywfile_hierwitness_cache is None:
|
||||
ywfile_hierwitness_cache = smt.hierwitness(topmod, allregs=True, blackbox=True)
|
||||
|
||||
inits, seqs, clocks, mems = ywfile_hierwitness_cache
|
||||
|
||||
smt_wires = defaultdict(list)
|
||||
smt_mems = defaultdict(list)
|
||||
|
@ -1491,6 +1516,44 @@ def get_active_assert_map(step, active):
|
|||
|
||||
return assert_map
|
||||
|
||||
assume_enables = {}
|
||||
|
||||
def declare_assume_enables():
|
||||
def recurse(mod, path, key_base=()):
|
||||
for expr, desc in smt.modinfo[mod].assumes.items():
|
||||
enable = f"|assume_enable {len(assume_enables)}|"
|
||||
smt.smt2_assumptions[(expr, key_base)] = enable
|
||||
smt.write(f"(declare-const {enable} Bool)")
|
||||
assume_enables[(expr, key_base)] = (enable, path, desc)
|
||||
|
||||
for cell, submod in smt.modinfo[mod].cells.items():
|
||||
recurse(submod, f"{path}.{cell}", (mod, cell, key_base))
|
||||
|
||||
recurse(topmod, topmod)
|
||||
|
||||
if track_assumes:
|
||||
declare_assume_enables()
|
||||
|
||||
def smt_assert_design_assumes(step):
|
||||
if not track_assumes:
|
||||
smt_assert_consequent("(|%s_u| s%d)" % (topmod, step))
|
||||
return
|
||||
|
||||
if not assume_enables:
|
||||
return
|
||||
|
||||
def expr_for_assume(assume_key, base=None):
|
||||
expr, key_base = assume_key
|
||||
expr_prefix = f"(|{expr}| "
|
||||
expr_suffix = ")"
|
||||
while key_base:
|
||||
mod, cell, key_base = key_base
|
||||
expr_prefix += f"(|{mod}_h {cell}| "
|
||||
expr_suffix += ")"
|
||||
return f"{expr_prefix} s{step}{expr_suffix}"
|
||||
|
||||
for assume_key, (enable, path, desc) in assume_enables.items():
|
||||
smt_assert_consequent(f"(=> {enable} {expr_for_assume(assume_key)})")
|
||||
|
||||
states = list()
|
||||
asserts_antecedent_cache = [list()]
|
||||
|
@ -1645,6 +1708,13 @@ def smt_check_sat(expected=["sat", "unsat"]):
|
|||
smt_forall_assert()
|
||||
return smt.check_sat(expected=expected)
|
||||
|
||||
def report_tracked_assumptions(msg):
|
||||
if track_assumes:
|
||||
print_msg(msg)
|
||||
for key in smt.get_unsat_assumptions(minimize=minimize_assumes):
|
||||
enable, path, descr = assume_enables[key]
|
||||
print_msg(f" In {path}: {descr}")
|
||||
|
||||
|
||||
if incremental:
|
||||
incremental.mainloop()
|
||||
|
@ -1658,7 +1728,7 @@ elif tempind:
|
|||
break
|
||||
|
||||
smt_state(step)
|
||||
smt_assert_consequent("(|%s_u| s%d)" % (topmod, step))
|
||||
smt_assert_design_assumes(step)
|
||||
smt_assert_antecedent("(|%s_h| s%d)" % (topmod, step))
|
||||
smt_assert_antecedent("(not (|%s_is| s%d))" % (topmod, step))
|
||||
smt_assert_consequent(get_constr_expr(constr_assumes, step))
|
||||
|
@ -1701,6 +1771,7 @@ elif tempind:
|
|||
|
||||
else:
|
||||
print_msg("Temporal induction successful.")
|
||||
report_tracked_assumptions("Used assumptions:")
|
||||
retstatus = "PASSED"
|
||||
break
|
||||
|
||||
|
@ -1726,7 +1797,7 @@ elif covermode:
|
|||
|
||||
while step < num_steps:
|
||||
smt_state(step)
|
||||
smt_assert_consequent("(|%s_u| s%d)" % (topmod, step))
|
||||
smt_assert_design_assumes(step)
|
||||
smt_assert_antecedent("(|%s_h| s%d)" % (topmod, step))
|
||||
smt_assert_consequent(get_constr_expr(constr_assumes, step))
|
||||
|
||||
|
@ -1747,6 +1818,7 @@ elif covermode:
|
|||
smt_assert("(distinct (covers_%d s%d) #b%s)" % (coveridx, step, "0" * len(cover_desc)))
|
||||
|
||||
if smt_check_sat() == "unsat":
|
||||
report_tracked_assumptions("Used assumptions:")
|
||||
smt_pop()
|
||||
break
|
||||
|
||||
|
@ -1755,13 +1827,14 @@ elif covermode:
|
|||
print_msg("Appending additional step %d." % i)
|
||||
smt_state(i)
|
||||
smt_assert_antecedent("(not (|%s_is| s%d))" % (topmod, i))
|
||||
smt_assert_consequent("(|%s_u| s%d)" % (topmod, i))
|
||||
smt_assert_design_assumes(i)
|
||||
smt_assert_antecedent("(|%s_h| s%d)" % (topmod, i))
|
||||
smt_assert_antecedent("(|%s_t| s%d s%d)" % (topmod, i-1, i))
|
||||
smt_assert_consequent(get_constr_expr(constr_assumes, i))
|
||||
print_msg("Re-solving with appended steps..")
|
||||
if smt_check_sat() == "unsat":
|
||||
print("%s Cannot appended steps without violating assumptions!" % smt.timestamp())
|
||||
report_tracked_assumptions("Conflicting assumptions:")
|
||||
found_failed_assert = True
|
||||
retstatus = "FAILED"
|
||||
break
|
||||
|
@ -1817,7 +1890,7 @@ else: # not tempind, covermode
|
|||
retstatus = "PASSED"
|
||||
while step < num_steps:
|
||||
smt_state(step)
|
||||
smt_assert_consequent("(|%s_u| s%d)" % (topmod, step))
|
||||
smt_assert_design_assumes(step)
|
||||
smt_assert_antecedent("(|%s_h| s%d)" % (topmod, step))
|
||||
smt_assert_consequent(get_constr_expr(constr_assumes, step))
|
||||
|
||||
|
@ -1847,7 +1920,7 @@ else: # not tempind, covermode
|
|||
if step+i < num_steps:
|
||||
smt_state(step+i)
|
||||
smt_assert_antecedent("(not (|%s_is| s%d))" % (topmod, step+i))
|
||||
smt_assert_consequent("(|%s_u| s%d)" % (topmod, step+i))
|
||||
smt_assert_design_assumes(step + i)
|
||||
smt_assert_antecedent("(|%s_h| s%d)" % (topmod, step+i))
|
||||
smt_assert_antecedent("(|%s_t| s%d s%d)" % (topmod, step+i-1, step+i))
|
||||
smt_assert_consequent(get_constr_expr(constr_assumes, step+i))
|
||||
|
@ -1861,7 +1934,8 @@ else: # not tempind, covermode
|
|||
print_msg("Checking assumptions in steps %d to %d.." % (step, last_check_step))
|
||||
|
||||
if smt_check_sat() == "unsat":
|
||||
print("%s Assumptions are unsatisfiable!" % smt.timestamp())
|
||||
print_msg("Assumptions are unsatisfiable!")
|
||||
report_tracked_assumptions("Conficting assumptions:")
|
||||
retstatus = "PREUNSAT"
|
||||
break
|
||||
|
||||
|
@ -1914,13 +1988,14 @@ else: # not tempind, covermode
|
|||
print_msg("Appending additional step %d." % i)
|
||||
smt_state(i)
|
||||
smt_assert_antecedent("(not (|%s_is| s%d))" % (topmod, i))
|
||||
smt_assert_consequent("(|%s_u| s%d)" % (topmod, i))
|
||||
smt_assert_design_assumes(i)
|
||||
smt_assert_antecedent("(|%s_h| s%d)" % (topmod, i))
|
||||
smt_assert_antecedent("(|%s_t| s%d s%d)" % (topmod, i-1, i))
|
||||
smt_assert_consequent(get_constr_expr(constr_assumes, i))
|
||||
print_msg("Re-solving with appended steps..")
|
||||
if smt_check_sat() == "unsat":
|
||||
print("%s Cannot append steps without violating assumptions!" % smt.timestamp())
|
||||
print_msg("Cannot append steps without violating assumptions!")
|
||||
report_tracked_assumptions("Conflicting assumptions:")
|
||||
retstatus = "FAILED"
|
||||
break
|
||||
print_anyconsts(step)
|
||||
|
|
|
@ -15,6 +15,14 @@ class InteractiveError(Exception):
|
|||
pass
|
||||
|
||||
|
||||
def mkkey(data):
|
||||
if isinstance(data, list):
|
||||
return tuple(map(mkkey, data))
|
||||
elif isinstance(data, dict):
|
||||
raise InteractiveError(f"JSON objects found in assumption key: {data!r}")
|
||||
return data
|
||||
|
||||
|
||||
class Incremental:
|
||||
def __init__(self):
|
||||
self.traceidx = 0
|
||||
|
@ -73,17 +81,17 @@ class Incremental:
|
|||
|
||||
if min_len is not None and arg_len < min_len:
|
||||
if min_len == max_len:
|
||||
raise (
|
||||
raise InteractiveError(
|
||||
f"{json.dumps(expr[0])} expression must have "
|
||||
f"{min_len} argument{'s' if min_len != 1 else ''}"
|
||||
)
|
||||
else:
|
||||
raise (
|
||||
raise InteractiveError(
|
||||
f"{json.dumps(expr[0])} expression must have at least "
|
||||
f"{min_len} argument{'s' if min_len != 1 else ''}"
|
||||
)
|
||||
if max_len is not None and arg_len > max_len:
|
||||
raise (
|
||||
raise InteractiveError(
|
||||
f"{json.dumps(expr[0])} expression can have at most "
|
||||
f"{min_len} argument{'s' if max_len != 1 else ''}"
|
||||
)
|
||||
|
@ -96,13 +104,30 @@ class Incremental:
|
|||
smt_out.append(f"s{step}")
|
||||
return "module", smtbmc.topmod
|
||||
|
||||
def expr_mod_constraint(self, expr, smt_out):
|
||||
self.expr_arg_len(expr, 1)
|
||||
def expr_cell(self, expr, smt_out):
|
||||
self.expr_arg_len(expr, 2)
|
||||
position = len(smt_out)
|
||||
smt_out.append(None)
|
||||
arg_sort = self.expr(expr[1], smt_out, required_sort=["module", None])
|
||||
arg_sort = self.expr(expr[2], smt_out, required_sort=["module", None])
|
||||
smt_out.append(")")
|
||||
module = arg_sort[1]
|
||||
cell = expr[1]
|
||||
submod = smtbmc.smt.modinfo[module].cells.get(cell)
|
||||
if submod is None:
|
||||
raise InteractiveError(f"module {module!r} has no cell {cell!r}")
|
||||
smt_out[position] = f"(|{module}_h {cell}| "
|
||||
return ("module", submod)
|
||||
|
||||
def expr_mod_constraint(self, expr, smt_out):
|
||||
suffix = expr[0][3:]
|
||||
self.expr_arg_len(expr, 1, 2 if suffix in ["_a", "_u", "_c"] else 1)
|
||||
position = len(smt_out)
|
||||
smt_out.append(None)
|
||||
arg_sort = self.expr(expr[-1], smt_out, required_sort=["module", None])
|
||||
module = arg_sort[1]
|
||||
if len(expr) == 3:
|
||||
smt_out[position] = f"(|{module}{suffix} {expr[1]}| "
|
||||
else:
|
||||
smt_out[position] = f"(|{module}{suffix}| "
|
||||
smt_out.append(")")
|
||||
return "Bool"
|
||||
|
@ -223,20 +248,19 @@ class Incremental:
|
|||
subexpr = expr[2]
|
||||
|
||||
if not isinstance(label, str):
|
||||
raise InteractiveError(f"expression label has to be a string")
|
||||
raise InteractiveError("expression label has to be a string")
|
||||
|
||||
smt_out.append("(! ")
|
||||
smt_out.appedd(label)
|
||||
smt_out.append(" ")
|
||||
|
||||
sort = self.expr(subexpr, smt_out)
|
||||
|
||||
smt_out.append(" :named ")
|
||||
smt_out.append(label)
|
||||
smt_out.append(")")
|
||||
|
||||
return sort
|
||||
|
||||
expr_handlers = {
|
||||
"step": expr_step,
|
||||
"cell": expr_cell,
|
||||
"mod_h": expr_mod_constraint,
|
||||
"mod_is": expr_mod_constraint,
|
||||
"mod_i": expr_mod_constraint,
|
||||
|
@ -302,6 +326,30 @@ class Incremental:
|
|||
|
||||
assert_fn(self.expr_smt(cmd.get("expr"), "Bool"))
|
||||
|
||||
def cmd_assert_design_assumes(self, cmd):
|
||||
step = self.arg_step(cmd)
|
||||
smtbmc.smt_assert_design_assumes(step)
|
||||
|
||||
def cmd_get_design_assume(self, cmd):
|
||||
key = mkkey(cmd.get("key"))
|
||||
return smtbmc.assume_enables.get(key)
|
||||
|
||||
def cmd_update_assumptions(self, cmd):
|
||||
expr = cmd.get("expr")
|
||||
key = cmd.get("key")
|
||||
|
||||
|
||||
key = mkkey(key)
|
||||
|
||||
result = smtbmc.smt.smt2_assumptions.pop(key, None)
|
||||
if expr is not None:
|
||||
expr = self.expr_smt(expr, "Bool")
|
||||
smtbmc.smt.smt2_assumptions[key] = expr
|
||||
return result
|
||||
|
||||
def cmd_get_unsat_assumptions(self, cmd):
|
||||
return smtbmc.smt.get_unsat_assumptions(minimize=bool(cmd.get('minimize')))
|
||||
|
||||
def cmd_push(self, cmd):
|
||||
smtbmc.smt_push()
|
||||
|
||||
|
@ -313,11 +361,14 @@ class Incremental:
|
|||
|
||||
def cmd_smtlib(self, cmd):
|
||||
command = cmd.get("command")
|
||||
response = cmd.get("response", False)
|
||||
if not isinstance(command, str):
|
||||
raise InteractiveError(
|
||||
f"raw SMT-LIB command must be a string, found {json.dumps(command)}"
|
||||
)
|
||||
smtbmc.smt.write(command)
|
||||
if response:
|
||||
return smtbmc.smt.read()
|
||||
|
||||
def cmd_design_hierwitness(self, cmd=None):
|
||||
allregs = (cmd is None) or bool(cmd.get("allreges", False))
|
||||
|
@ -369,6 +420,21 @@ class Incremental:
|
|||
|
||||
return dict(last_step=last_step)
|
||||
|
||||
def cmd_modinfo(self, cmd):
|
||||
fields = cmd.get("fields", [])
|
||||
|
||||
mod = cmd.get("mod")
|
||||
if mod is None:
|
||||
mod = smtbmc.topmod
|
||||
modinfo = smtbmc.smt.modinfo.get(mod)
|
||||
if modinfo is None:
|
||||
return None
|
||||
|
||||
result = dict(name=mod)
|
||||
for field in fields:
|
||||
result[field] = getattr(modinfo, field, None)
|
||||
return result
|
||||
|
||||
def cmd_ping(self, cmd):
|
||||
return cmd
|
||||
|
||||
|
@ -377,6 +443,10 @@ class Incremental:
|
|||
"assert": cmd_assert,
|
||||
"assert_antecedent": cmd_assert,
|
||||
"assert_consequent": cmd_assert,
|
||||
"assert_design_assumes": cmd_assert_design_assumes,
|
||||
"get_design_assume": cmd_get_design_assume,
|
||||
"update_assumptions": cmd_update_assumptions,
|
||||
"get_unsat_assumptions": cmd_get_unsat_assumptions,
|
||||
"push": cmd_push,
|
||||
"pop": cmd_pop,
|
||||
"check": cmd_check,
|
||||
|
@ -384,6 +454,7 @@ class Incremental:
|
|||
"design_hierwitness": cmd_design_hierwitness,
|
||||
"write_yw_trace": cmd_write_yw_trace,
|
||||
"read_yw_trace": cmd_read_yw_trace,
|
||||
"modinfo": cmd_modinfo,
|
||||
"ping": cmd_ping,
|
||||
}
|
||||
|
||||
|
|
|
@ -79,6 +79,20 @@ def except_hook(exctype, value, traceback):
|
|||
sys.excepthook = except_hook
|
||||
|
||||
|
||||
def recursion_helper(iteration, *request):
|
||||
stack = [iteration(*request)]
|
||||
|
||||
while stack:
|
||||
top = stack.pop()
|
||||
try:
|
||||
request = next(top)
|
||||
except StopIteration:
|
||||
continue
|
||||
|
||||
stack.append(top)
|
||||
stack.append(iteration(*request))
|
||||
|
||||
|
||||
hex_dict = {
|
||||
"0": "0000", "1": "0001", "2": "0010", "3": "0011",
|
||||
"4": "0100", "5": "0101", "6": "0110", "7": "0111",
|
||||
|
@ -100,6 +114,7 @@ class SmtModInfo:
|
|||
self.clocks = dict()
|
||||
self.cells = dict()
|
||||
self.asserts = dict()
|
||||
self.assumes = dict()
|
||||
self.covers = dict()
|
||||
self.maximize = set()
|
||||
self.minimize = set()
|
||||
|
@ -127,6 +142,7 @@ class SmtIo:
|
|||
self.recheck = False
|
||||
self.smt2cache = [list()]
|
||||
self.smt2_options = dict()
|
||||
self.smt2_assumptions = dict()
|
||||
self.p = None
|
||||
self.p_index = solvers_index
|
||||
solvers_index += 1
|
||||
|
@ -298,10 +314,22 @@ class SmtIo:
|
|||
return stmt
|
||||
|
||||
def unroll_stmt(self, stmt):
|
||||
if not isinstance(stmt, list):
|
||||
return stmt
|
||||
result = []
|
||||
recursion_helper(self._unroll_stmt_into, stmt, result)
|
||||
return result.pop()
|
||||
|
||||
stmt = [self.unroll_stmt(s) for s in stmt]
|
||||
def _unroll_stmt_into(self, stmt, output, depth=128):
|
||||
if not isinstance(stmt, list):
|
||||
output.append(stmt)
|
||||
return
|
||||
|
||||
new_stmt = []
|
||||
for s in stmt:
|
||||
if depth:
|
||||
yield from self._unroll_stmt_into(s, new_stmt, depth - 1)
|
||||
else:
|
||||
yield s, new_stmt
|
||||
stmt = new_stmt
|
||||
|
||||
if len(stmt) >= 2 and not isinstance(stmt[0], list) and stmt[0] in self.unroll_decls:
|
||||
assert stmt[1] in self.unroll_objs
|
||||
|
@ -330,12 +358,19 @@ class SmtIo:
|
|||
decl[2] = list()
|
||||
|
||||
if len(decl) > 0:
|
||||
decl = self.unroll_stmt(decl)
|
||||
tmp = []
|
||||
if depth:
|
||||
yield from self._unroll_stmt_into(decl, tmp, depth - 1)
|
||||
else:
|
||||
yield decl, tmp
|
||||
|
||||
decl = tmp.pop()
|
||||
self.write(self.unparse(decl), unroll=False)
|
||||
|
||||
return self.unroll_cache[key]
|
||||
output.append(self.unroll_cache[key])
|
||||
return
|
||||
|
||||
return stmt
|
||||
output.append(stmt)
|
||||
|
||||
def p_thread_main(self):
|
||||
while True:
|
||||
|
@ -569,6 +604,12 @@ class SmtIo:
|
|||
else:
|
||||
self.modinfo[self.curmod].covers["%s_c %s" % (self.curmod, fields[2])] = fields[3]
|
||||
|
||||
if fields[1] == "yosys-smt2-assume":
|
||||
if len(fields) > 4:
|
||||
self.modinfo[self.curmod].assumes["%s_u %s" % (self.curmod, fields[2])] = f'{fields[4]} ({fields[3]})'
|
||||
else:
|
||||
self.modinfo[self.curmod].assumes["%s_u %s" % (self.curmod, fields[2])] = fields[3]
|
||||
|
||||
if fields[1] == "yosys-smt2-maximize":
|
||||
self.modinfo[self.curmod].maximize.add(fields[2])
|
||||
|
||||
|
@ -752,8 +793,13 @@ class SmtIo:
|
|||
return stmt
|
||||
|
||||
def check_sat(self, expected=["sat", "unsat", "unknown", "timeout", "interrupted"]):
|
||||
if self.smt2_assumptions:
|
||||
assume_exprs = " ".join(self.smt2_assumptions.values())
|
||||
check_stmt = f"(check-sat-assuming ({assume_exprs}))"
|
||||
else:
|
||||
check_stmt = "(check-sat)"
|
||||
if self.debug_print:
|
||||
print("> (check-sat)")
|
||||
print(f"> {check_stmt}")
|
||||
if self.debug_file and not self.nocomments:
|
||||
print("; running check-sat..", file=self.debug_file)
|
||||
self.debug_file.flush()
|
||||
|
@ -767,7 +813,7 @@ class SmtIo:
|
|||
for cache_stmt in cache_ctx:
|
||||
self.p_write(cache_stmt + "\n", False)
|
||||
|
||||
self.p_write("(check-sat)\n", True)
|
||||
self.p_write(f"{check_stmt}\n", True)
|
||||
|
||||
if self.timeinfo:
|
||||
i = 0
|
||||
|
@ -835,7 +881,7 @@ class SmtIo:
|
|||
|
||||
if self.debug_file:
|
||||
print("(set-info :status %s)" % result, file=self.debug_file)
|
||||
print("(check-sat)", file=self.debug_file)
|
||||
print(check_stmt, file=self.debug_file)
|
||||
self.debug_file.flush()
|
||||
|
||||
if result not in expected:
|
||||
|
@ -912,6 +958,48 @@ class SmtIo:
|
|||
def bv2int(self, v):
|
||||
return int(self.bv2bin(v), 2)
|
||||
|
||||
def get_raw_unsat_assumptions(self):
|
||||
self.write("(get-unsat-assumptions)")
|
||||
exprs = set(self.unparse(part) for part in self.parse(self.read()))
|
||||
unsat_assumptions = []
|
||||
for key, value in self.smt2_assumptions.items():
|
||||
# normalize expression
|
||||
value = self.unparse(self.parse(value))
|
||||
if value in exprs:
|
||||
exprs.remove(value)
|
||||
unsat_assumptions.append(key)
|
||||
return unsat_assumptions
|
||||
|
||||
def get_unsat_assumptions(self, minimize=False):
|
||||
if not minimize:
|
||||
return self.get_raw_unsat_assumptions()
|
||||
required_assumptions = {}
|
||||
|
||||
while True:
|
||||
candidate_assumptions = {}
|
||||
for key in self.get_raw_unsat_assumptions():
|
||||
if key not in required_assumptions:
|
||||
candidate_assumptions[key] = self.smt2_assumptions[key]
|
||||
|
||||
while candidate_assumptions:
|
||||
|
||||
candidate_key, candidate_assume = candidate_assumptions.popitem()
|
||||
|
||||
self.smt2_assumptions = {}
|
||||
for key, assume in candidate_assumptions.items():
|
||||
self.smt2_assumptions[key] = assume
|
||||
for key, assume in required_assumptions.items():
|
||||
self.smt2_assumptions[key] = assume
|
||||
result = self.check_sat()
|
||||
|
||||
if result == 'unsat':
|
||||
candidate_assumptions = None
|
||||
else:
|
||||
required_assumptions[candidate_key] = candidate_assume
|
||||
|
||||
if candidate_assumptions is not None:
|
||||
return list(required_assumptions)
|
||||
|
||||
def get(self, expr):
|
||||
self.write("(get-value (%s))" % (expr))
|
||||
return self.parse(self.read())[0][1]
|
||||
|
|
|
@ -172,6 +172,141 @@ void demux_op(AbstractCellEdgesDatabase *db, RTLIL::Cell *cell)
|
|||
}
|
||||
}
|
||||
|
||||
void shift_op(AbstractCellEdgesDatabase *db, RTLIL::Cell *cell)
|
||||
{
|
||||
bool is_signed = cell->getParam(ID::A_SIGNED).as_bool();
|
||||
bool is_b_signed = cell->getParam(ID::B_SIGNED).as_bool();
|
||||
int a_width = GetSize(cell->getPort(ID::A));
|
||||
int b_width = GetSize(cell->getPort(ID::B));
|
||||
int y_width = GetSize(cell->getPort(ID::Y));
|
||||
|
||||
// Behavior of the different shift cells:
|
||||
//
|
||||
// $shl, $sshl -- shifts left by the amount on B port, B always unsigned
|
||||
// $shr, $sshr -- ditto right
|
||||
// $shift, $shiftx -- shifts right by the amount on B port, B optionally signed
|
||||
//
|
||||
// Sign extension (if A signed):
|
||||
//
|
||||
// $shl, $shr, $shift -- only sign-extends up to size of Y, then shifts in zeroes
|
||||
// $sshl, $sshr -- fully sign-extends
|
||||
// $shiftx -- no sign extension
|
||||
//
|
||||
// Because $shl, $sshl only shift left, and $shl sign-extens up to size of Y, they
|
||||
// are effectively the same.
|
||||
|
||||
// the cap below makes sure we don't overflow in the arithmetic further down, though
|
||||
// it makes the edge data invalid once a_width approaches the order of 2**30
|
||||
// (that ever happening is considered improbable)
|
||||
int b_width_capped = min(b_width, 30);
|
||||
|
||||
int b_high, b_low;
|
||||
if (!is_b_signed) {
|
||||
b_high = (1 << b_width_capped) - 1;
|
||||
b_low = 0;
|
||||
} else {
|
||||
b_high = (1 << (b_width_capped - 1)) - 1;
|
||||
b_low = -(1 << (b_width_capped - 1));
|
||||
}
|
||||
|
||||
for (int i = 0; i < y_width; i++){
|
||||
// highest position of Y that can change with the value of B
|
||||
int b_range_upper = 0;
|
||||
// 1 + highest position of A that can be moved to Y[i]
|
||||
int a_range_upper;
|
||||
// lowest position of A that can be moved to Y[i]
|
||||
int a_range_lower;
|
||||
|
||||
if (cell->type.in(ID($shl), ID($sshl))) {
|
||||
b_range_upper = a_width + b_high;
|
||||
if (is_signed) b_range_upper -= 1;
|
||||
a_range_lower = max(0, i - b_high);
|
||||
a_range_upper = min(i+1, a_width);
|
||||
} else if (cell->type.in(ID($shr), ID($sshr)) || (cell->type.in(ID($shift), ID($shiftx)) && !is_b_signed)) {
|
||||
b_range_upper = a_width;
|
||||
a_range_lower = min(i, a_width - 1);
|
||||
a_range_upper = min(i+1 + b_high, a_width);
|
||||
} else if (cell->type.in(ID($shift), ID($shiftx)) && is_b_signed) {
|
||||
// can go both ways depending on sign of B
|
||||
// 2's complement range is different depending on direction
|
||||
b_range_upper = a_width - b_low;
|
||||
a_range_lower = max(0, i + b_low);
|
||||
if (is_signed)
|
||||
a_range_lower = min(a_range_lower, a_width - 1);
|
||||
a_range_upper = min(i+1 + b_high, a_width);
|
||||
} else {
|
||||
log_assert(false && "unreachable");
|
||||
}
|
||||
|
||||
if (i < b_range_upper) {
|
||||
for (int k = a_range_lower; k < a_range_upper; k++)
|
||||
db->add_edge(cell, ID::A, k, ID::Y, i, -1);
|
||||
} else {
|
||||
// only influence is through sign extension
|
||||
if (is_signed)
|
||||
db->add_edge(cell, ID::A, a_width - 1, ID::Y, i, -1);
|
||||
}
|
||||
|
||||
for (int k = 0; k < b_width; k++) {
|
||||
// left shifts
|
||||
if (cell->type.in(ID($shl), ID($sshl))) {
|
||||
if (a_width == 1 && is_signed) {
|
||||
int skip = 1 << (k + 1);
|
||||
int base = skip -1;
|
||||
if (i % skip != base && i - a_width + 2 < 1 << b_width)
|
||||
db->add_edge(cell, ID::B, k, ID::Y, i, -1);
|
||||
} else if (is_signed) {
|
||||
if (i - a_width + 2 < 1 << b_width)
|
||||
db->add_edge(cell, ID::B, k, ID::Y, i, -1);
|
||||
} else {
|
||||
if (i - a_width + 1 < 1 << b_width)
|
||||
db->add_edge(cell, ID::B, k, ID::Y, i, -1);
|
||||
}
|
||||
// right shifts
|
||||
} else if (cell->type.in(ID($shr), ID($sshr)) || (cell->type.in(ID($shift), ID($shiftx)) && !is_b_signed)) {
|
||||
if (is_signed) {
|
||||
bool shift_in_bulk = i < a_width - 1;
|
||||
// can we jump into the zero-padding by toggling B[k]?
|
||||
bool zpad_jump = (((y_width - i) & ((1 << (k + 1)) - 1)) != 0 \
|
||||
&& (((y_width - i) & ~(1 << k)) < (1 << b_width)));
|
||||
|
||||
if (shift_in_bulk || (cell->type.in(ID($shr), ID($shift), ID($shiftx)) && zpad_jump))
|
||||
db->add_edge(cell, ID::B, k, ID::Y, i, -1);
|
||||
} else {
|
||||
if (i < a_width)
|
||||
db->add_edge(cell, ID::B, k, ID::Y, i, -1);
|
||||
}
|
||||
// bidirectional shifts (positive B shifts right, negative left)
|
||||
} else if (cell->type.in(ID($shift), ID($shiftx)) && is_b_signed) {
|
||||
if (is_signed) {
|
||||
if (k != b_width - 1) {
|
||||
bool r_shift_in_bulk = i < a_width - 1;
|
||||
// assuming B is positive, can we jump into the upper zero-padding by toggling B[k]?
|
||||
bool r_zpad_jump = (((y_width - i) & ((1 << (k + 1)) - 1)) != 0 \
|
||||
&& (((y_width - i) & ~(1 << k)) <= b_high));
|
||||
// assuming B is negative, can we influence Y[i] by toggling B[k]?
|
||||
bool l = a_width - 2 - i >= b_low;
|
||||
if (a_width == 1) {
|
||||
// in case of a_width==1 we go into more detailed reasoning
|
||||
l = l && (~(i - a_width) & ((1 << (k + 1)) - 1)) != 0;
|
||||
}
|
||||
if (r_shift_in_bulk || r_zpad_jump || l)
|
||||
db->add_edge(cell, ID::B, k, ID::Y, i, -1);
|
||||
} else {
|
||||
if (y_width - i <= b_high || a_width - 2 - i >= b_low)
|
||||
db->add_edge(cell, ID::B, k, ID::Y, i, -1);
|
||||
}
|
||||
} else {
|
||||
if (a_width - 1 - i >= b_low)
|
||||
db->add_edge(cell, ID::B, k, ID::Y, i, -1);
|
||||
}
|
||||
} else {
|
||||
log_assert(false && "unreachable");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PRIVATE_NAMESPACE_END
|
||||
|
||||
bool YOSYS_NAMESPACE_PREFIX AbstractCellEdgesDatabase::add_edges_from_cell(RTLIL::Cell *cell)
|
||||
|
@ -201,11 +336,10 @@ bool YOSYS_NAMESPACE_PREFIX AbstractCellEdgesDatabase::add_edges_from_cell(RTLIL
|
|||
return true;
|
||||
}
|
||||
|
||||
// FIXME:
|
||||
// if (cell->type.in(ID($shl), ID($shr), ID($sshl), ID($sshr), ID($shift), ID($shiftx))) {
|
||||
// shift_op(this, cell);
|
||||
// return true;
|
||||
// }
|
||||
if (cell->type.in(ID($shl), ID($shr), ID($sshl), ID($sshr), ID($shift), ID($shiftx))) {
|
||||
shift_op(this, cell);
|
||||
return true;
|
||||
}
|
||||
|
||||
if (cell->type.in(ID($lt), ID($le), ID($eq), ID($ne), ID($eqx), ID($nex), ID($ge), ID($gt))) {
|
||||
compare_op(this, cell);
|
||||
|
@ -227,8 +361,17 @@ bool YOSYS_NAMESPACE_PREFIX AbstractCellEdgesDatabase::add_edges_from_cell(RTLIL
|
|||
return true;
|
||||
}
|
||||
|
||||
// FIXME: $mul $div $mod $divfloor $modfloor $slice $concat
|
||||
// FIXME: $lut $sop $alu $lcu $macc $fa
|
||||
// FIXME: $mul $div $mod $divfloor $modfloor $pow $slice $concat $bweqx
|
||||
// FIXME: $lut $sop $alu $lcu $macc $fa $logic_and $logic_or $bwmux
|
||||
|
||||
// FIXME: $_BUF_ $_NOT_ $_AND_ $_NAND_ $_OR_ $_NOR_ $_XOR_ $_XNOR_ $_ANDNOT_ $_ORNOT_
|
||||
// FIXME: $_MUX_ $_NMUX_ $_MUX4_ $_MUX8_ $_MUX16_ $_AOI3_ $_OAI3_ $_AOI4_ $_OAI4_
|
||||
|
||||
// FIXME: $specify2 $specify3 $specrule ???
|
||||
// FIXME: $equiv $set_tag $get_tag $overwrite_tag $original_tag
|
||||
|
||||
if (cell->type.in(ID($assert), ID($assume), ID($live), ID($fair), ID($cover), ID($initstate), ID($anyconst), ID($anyseq), ID($allconst), ID($allseq)))
|
||||
return true; // no-op: these have either no inputs or no outputs
|
||||
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -1001,7 +1001,7 @@ struct LicensePass : public Pass {
|
|||
log("This command produces the following notice.\n");
|
||||
notice();
|
||||
}
|
||||
void execute(std::vector<std::string> args, RTLIL::Design*) override
|
||||
void execute(std::vector<std::string>, RTLIL::Design*) override
|
||||
{
|
||||
notice();
|
||||
}
|
||||
|
|
|
@ -134,7 +134,6 @@ static bool rename_witness(RTLIL::Design *design, dict<RTLIL::Module *, int> &ca
|
|||
cell->set_hdlname_attribute({ "_witness_", strstr(new_id.c_str(), ".") + 1 });
|
||||
renames.emplace_back(cell, new_id);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if (cell->type.in(ID($anyconst), ID($anyseq), ID($anyinit), ID($allconst), ID($allseq))) {
|
||||
|
@ -165,6 +164,20 @@ static bool rename_witness(RTLIL::Design *design, dict<RTLIL::Module *, int> &ca
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (cell->type.in(ID($assert), ID($assume), ID($cover), ID($live), ID($fair), ID($check))) {
|
||||
has_witness_signals = true;
|
||||
if (cell->name.isPublic())
|
||||
continue;
|
||||
std::string name = stringf("%s_%s", cell->type.c_str() + 1, cell->name.c_str() + 1);
|
||||
for (auto &c : name)
|
||||
if ((c < 'a' || c > 'z') && (c < 'A' || c > 'Z') && (c < '0' || c > '9') && c != '_')
|
||||
c = '_';
|
||||
auto new_id = module->uniquify("\\_witness_." + name);
|
||||
renames.emplace_back(cell, new_id);
|
||||
cell->set_hdlname_attribute({ "_witness_", strstr(new_id.c_str(), ".") + 1 });
|
||||
}
|
||||
}
|
||||
for (auto rename : renames) {
|
||||
module->rename(rename.first, rename.second);
|
||||
|
|
|
@ -543,13 +543,13 @@ struct EvalPass : public Pass {
|
|||
|
||||
int pos = 0;
|
||||
for (auto &c : tabsigs.chunks()) {
|
||||
tab_line.push_back(log_signal(RTLIL::SigSpec(tabvals).extract(pos, c.width)));
|
||||
tab_line.push_back(log_signal(RTLIL::SigSpec(tabvals).extract(pos, c.width), false));
|
||||
pos += c.width;
|
||||
}
|
||||
|
||||
pos = 0;
|
||||
for (auto &c : signal.chunks()) {
|
||||
tab_line.push_back(log_signal(value.extract(pos, c.width)));
|
||||
tab_line.push_back(log_signal(value.extract(pos, c.width), false));
|
||||
pos += c.width;
|
||||
}
|
||||
|
||||
|
|
|
@ -1041,8 +1041,8 @@ struct TechmapPass : public Pass {
|
|||
log("\n");
|
||||
log("When a port on a module in the map file has the 'techmap_autopurge' attribute\n");
|
||||
log("set, and that port is not connected in the instantiation that is mapped, then\n");
|
||||
log("then a cell port connected only to such wires will be omitted in the mapped\n");
|
||||
log("version of the circuit.\n");
|
||||
log("a cell port connected only to such wires will be omitted in the mapped version\n");
|
||||
log("of the circuit.\n");
|
||||
log("\n");
|
||||
log("All wires in the modules from the map file matching the pattern _TECHMAP_*\n");
|
||||
log("or *._TECHMAP_* are special wires that are used to pass instructions from\n");
|
||||
|
|
|
@ -88,6 +88,10 @@ struct SynthPass : public ScriptPass {
|
|||
log(" read/write collision\" (same result as setting the no_rw_check\n");
|
||||
log(" attribute on all memories).\n");
|
||||
log("\n");
|
||||
log(" -extra-map filename\n");
|
||||
log(" source extra rules from the given file to complement the default\n");
|
||||
log(" mapping library in the `techmap` step. this option can be\n");
|
||||
log(" repeated.\n");
|
||||
log("\n");
|
||||
log("The following commands are executed by this synthesis command:\n");
|
||||
help_script();
|
||||
|
@ -96,8 +100,8 @@ struct SynthPass : public ScriptPass {
|
|||
|
||||
string top_module, fsm_opts, memory_opts, abc;
|
||||
bool autotop, flatten, noalumacc, nofsm, noabc, noshare, flowmap, booth;
|
||||
|
||||
int lut;
|
||||
std::vector<std::string> techmap_maps;
|
||||
|
||||
void clear_flags() override
|
||||
{
|
||||
|
@ -115,6 +119,7 @@ struct SynthPass : public ScriptPass {
|
|||
flowmap = false;
|
||||
booth = false;
|
||||
abc = "abc";
|
||||
techmap_maps.clear();
|
||||
}
|
||||
|
||||
void execute(std::vector<std::string> args, RTLIL::Design *design) override
|
||||
|
@ -151,7 +156,7 @@ struct SynthPass : public ScriptPass {
|
|||
flatten = true;
|
||||
continue;
|
||||
}
|
||||
if (args[argidx] == "-lut") {
|
||||
if (args[argidx] == "-lut" && argidx + 1 < args.size()) {
|
||||
lut = atoi(args[++argidx].c_str());
|
||||
continue;
|
||||
}
|
||||
|
@ -192,6 +197,10 @@ struct SynthPass : public ScriptPass {
|
|||
memory_opts += " -no-rw-check";
|
||||
continue;
|
||||
}
|
||||
if (args[argidx] == "-extra-map" && argidx + 1 < args.size()) {
|
||||
techmap_maps.push_back(args[++argidx]);
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
extra_args(args, argidx, design);
|
||||
|
@ -261,7 +270,17 @@ struct SynthPass : public ScriptPass {
|
|||
run("opt -fast -full");
|
||||
run("memory_map");
|
||||
run("opt -full");
|
||||
run("techmap");
|
||||
if (help_mode) {
|
||||
run("techmap", " (unless -extra-map)");
|
||||
run("techmap -map +/techmap.v -map <inject>", " (if -extra-map)");
|
||||
} else {
|
||||
std::string techmap_opts;
|
||||
if (!techmap_maps.empty())
|
||||
techmap_opts += " -map +/techmap.v";
|
||||
for (auto fn : techmap_maps)
|
||||
techmap_opts += stringf(" -map %s", fn.c_str());
|
||||
run("techmap" + techmap_opts);
|
||||
}
|
||||
if (help_mode) {
|
||||
run("techmap -map +/gate2lut.v", "(if -noabc and -lut)");
|
||||
run("clean; opt_lut", " (if -noabc and -lut)");
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -ex
|
||||
set -e
|
||||
|
||||
prefix=${1%.ok}
|
||||
prefix=${prefix%.sv}
|
||||
|
@ -58,16 +58,33 @@ generate_sby() {
|
|||
}
|
||||
|
||||
if [ -f $prefix.ys ]; then
|
||||
set -x
|
||||
$PWD/../../yosys -q -e "Assert .* failed." -s $prefix.ys
|
||||
elif [ -f $prefix.sv ]; then
|
||||
generate_sby pass > ${prefix}_pass.sby
|
||||
generate_sby fail > ${prefix}_fail.sby
|
||||
|
||||
# Check that SBY is up to date enough for this yosys version
|
||||
if sby --help | grep -q -e '--status'; then
|
||||
set -x
|
||||
sby --yosys $PWD/../../yosys -f ${prefix}_pass.sby
|
||||
sby --yosys $PWD/../../yosys -f ${prefix}_fail.sby
|
||||
else
|
||||
echo "sva test '${prefix}' requires an up to date SBY, skipping"
|
||||
fi
|
||||
else
|
||||
generate_sby pass > ${prefix}.sby
|
||||
|
||||
# Check that SBY is up to date enough for this yosys version
|
||||
if sby --help | grep -q -e '--status'; then
|
||||
set -x
|
||||
sby --yosys $PWD/../../yosys -f ${prefix}.sby
|
||||
else
|
||||
echo "sva test '${prefix}' requires an up to date SBY, skipping"
|
||||
fi
|
||||
fi
|
||||
|
||||
{ set +x; } &>/dev/null
|
||||
|
||||
touch $prefix.ok
|
||||
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
test_cell -s 1705659041 -n 100 -edges $shift $shiftx $shl $shr $sshl $sshr
|
Loading…
Reference in New Issue