2018-12-10 20:22:40 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
'''
|
|
|
|
|
Check:
|
|
|
|
|
-Individual files are valid
|
|
|
|
|
-No overlap between any tile
|
2018-12-10 23:42:53 +01:00
|
|
|
|
|
|
|
|
TODO:
|
|
|
|
|
Can we use prjxray?
|
|
|
|
|
Relies on 074, which is too far into the process
|
2018-12-10 20:22:40 +01:00
|
|
|
'''
|
|
|
|
|
|
|
|
|
|
from prjxray import util
|
2018-12-10 23:42:53 +01:00
|
|
|
from prjxray import db as prjxraydb
|
|
|
|
|
import os
|
2018-12-10 20:22:40 +01:00
|
|
|
import parsedb
|
|
|
|
|
#from prjxray import db as prjxraydb
|
|
|
|
|
import glob
|
2019-02-11 14:18:27 +01:00
|
|
|
import json
|
2018-12-10 20:22:40 +01:00
|
|
|
|
2019-02-11 14:18:27 +01:00
|
|
|
def make_tile_mask(db_file, tile_name, tilej, strict=False, verbose=False):
|
2018-12-10 23:42:53 +01:00
|
|
|
'''
|
|
|
|
|
Return dict
|
|
|
|
|
key: (address, bit index)
|
|
|
|
|
val: sample description of where it came from (there may be multiple, only one)
|
|
|
|
|
'''
|
|
|
|
|
|
|
|
|
|
# FIXME: fix mask files https://github.com/SymbiFlow/prjxray/issues/301
|
|
|
|
|
# in the meantime build them on the fly
|
|
|
|
|
# We may want this to build them anyway
|
|
|
|
|
|
|
|
|
|
ret = dict()
|
|
|
|
|
for absaddr, bitaddr, tag in util.gen_tile_bits(
|
2019-02-11 14:18:27 +01:00
|
|
|
db_file, tilej, strict=strict, verbose=verbose):
|
2018-12-10 23:42:53 +01:00
|
|
|
name = "%s.%s" % (tile_name, tag)
|
|
|
|
|
ret.setdefault((absaddr, bitaddr), name)
|
|
|
|
|
return ret
|
|
|
|
|
|
|
|
|
|
|
2018-12-10 20:22:40 +01:00
|
|
|
def parsedb_all(db_root, verbose=False):
|
|
|
|
|
'''Verify .db files are individually valid'''
|
|
|
|
|
|
2018-12-10 23:42:53 +01:00
|
|
|
files = 0
|
|
|
|
|
for bit_fn in glob.glob('%s/segbits_*.db' % db_root):
|
|
|
|
|
verbose and print("Checking %s" % bit_fn)
|
2018-12-10 20:22:40 +01:00
|
|
|
parsedb.run(bit_fn, fnout=None, strict=True, verbose=verbose)
|
2018-12-10 23:42:53 +01:00
|
|
|
files += 1
|
|
|
|
|
print("segbits_*.db: %d okay" % files)
|
2018-12-10 20:22:40 +01:00
|
|
|
|
2018-12-10 23:42:53 +01:00
|
|
|
files = 0
|
|
|
|
|
for bit_fn in glob.glob('%s/mask_*.db' % db_root):
|
|
|
|
|
verbose and print("Checking %s" % bit_fn)
|
2018-12-10 20:22:40 +01:00
|
|
|
parsedb.run(bit_fn, fnout=None, strict=True, verbose=verbose)
|
2018-12-10 23:42:53 +01:00
|
|
|
files += 1
|
|
|
|
|
print("mask_*.db: %d okay" % files)
|
2018-12-10 20:22:40 +01:00
|
|
|
|
|
|
|
|
|
2019-02-11 14:18:27 +01:00
|
|
|
def parsedb_file(db_root, db_files, tile, strict=False, verbose=False):
|
|
|
|
|
tile_type = tile["type"]
|
|
|
|
|
db_files[tile_type] = {}
|
|
|
|
|
|
|
|
|
|
for block_type, blockj in tile["bits"].items():
|
|
|
|
|
db_files[tile_type][block_type] = []
|
|
|
|
|
if block_type == "CLB_IO_CLK":
|
|
|
|
|
fn = "%s/segbits_%s.db" % (db_root, tile_type.lower())
|
|
|
|
|
else:
|
|
|
|
|
fn = "%s/segbits_%s.%s.db" % (
|
|
|
|
|
db_root, tile_type.lower(), block_type.lower())
|
|
|
|
|
# tilegrid runs a lot earlier than fuzzers
|
|
|
|
|
# may not have been created yet
|
|
|
|
|
verbose and print("Check %s: %s" % (fn, os.path.exists(fn)))
|
|
|
|
|
|
|
|
|
|
# FIXME: some segbits files are not present and the strict check produces assertion errors
|
|
|
|
|
# e.g. segbits_cmt_top_r_lower_b.db
|
|
|
|
|
if strict:
|
|
|
|
|
assert os.path.exists(fn)
|
|
|
|
|
elif not os.path.exists(fn):
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
for line in util.parse_db_lines(fn):
|
|
|
|
|
db_files[tile_type][block_type].append(line)
|
|
|
|
|
return db_files
|
|
|
|
|
|
|
|
|
|
|
2018-12-10 23:42:53 +01:00
|
|
|
def check_tile_overlap(db, db_root, strict=False, verbose=False):
|
2018-12-10 20:22:40 +01:00
|
|
|
'''
|
2018-12-10 23:42:53 +01:00
|
|
|
Verifies that no two tiles use the same bit
|
|
|
|
|
|
2018-12-10 20:22:40 +01:00
|
|
|
Assume .db files are individually valid
|
|
|
|
|
Create a mask for all the bits the tile type uses
|
|
|
|
|
For each tile, create bitmasks over the entire bitstream for current part
|
|
|
|
|
Throw an exception if two tiles share an address
|
|
|
|
|
'''
|
2018-12-10 23:42:53 +01:00
|
|
|
mall = dict()
|
2019-02-08 15:35:49 +01:00
|
|
|
tiles_type_done = set()
|
2019-02-11 14:18:27 +01:00
|
|
|
db_files = dict()
|
2018-12-10 20:22:40 +01:00
|
|
|
|
|
|
|
|
tiles_checked = 0
|
2018-12-10 23:54:40 +01:00
|
|
|
|
2018-12-10 23:42:53 +01:00
|
|
|
for tile_name, tilej in db.tilegrid.items():
|
2019-02-11 14:18:27 +01:00
|
|
|
tile_type = tilej["type"]
|
2018-12-10 23:54:40 +01:00
|
|
|
|
2019-02-11 14:18:27 +01:00
|
|
|
if tile_type not in tiles_type_done:
|
|
|
|
|
db_files = parsedb_file(db_root, db_files, tilej)
|
|
|
|
|
tiles_type_done.add(tile_type)
|
|
|
|
|
|
|
|
|
|
if tile_type not in mall:
|
|
|
|
|
verbose and print("Adding tile type: %s" % tile_type)
|
|
|
|
|
mall[tile_type] = {}
|
2019-02-08 15:35:49 +01:00
|
|
|
|
2018-12-10 23:42:53 +01:00
|
|
|
mtile = make_tile_mask(
|
2019-02-11 14:18:27 +01:00
|
|
|
db_files[tile_type], tile_name, tilej, strict=strict, verbose=verbose)
|
2018-12-10 23:42:53 +01:00
|
|
|
verbose and print(
|
|
|
|
|
"Checking %s, type %s, bits: %s" %
|
|
|
|
|
(tile_name, tilej["type"], len(mtile)))
|
|
|
|
|
if len(mtile) == 0:
|
2018-12-10 20:22:40 +01:00
|
|
|
continue
|
2018-12-10 23:42:53 +01:00
|
|
|
|
2019-02-11 14:18:27 +01:00
|
|
|
collisions = set(mall[tile_type].keys()).intersection(set(mtile.keys()))
|
2018-12-10 23:42:53 +01:00
|
|
|
if collisions:
|
|
|
|
|
print("ERROR: %s collisions" % len(collisions))
|
|
|
|
|
for ck in sorted(collisions):
|
|
|
|
|
addr, bitaddr = ck
|
2018-12-10 23:54:40 +01:00
|
|
|
word, bit = util.addr_bit2word(bitaddr)
|
2018-12-10 23:42:53 +01:00
|
|
|
print(
|
2018-12-10 23:54:40 +01:00
|
|
|
" %s: had %s, got %s" %
|
2019-02-11 14:18:27 +01:00
|
|
|
(util.addr2str(addr, word, bit), mall[tile_type][ck], mtile[ck]))
|
2018-12-10 23:42:53 +01:00
|
|
|
raise ValueError("%s collisions" % len(collisions))
|
2019-02-11 14:18:27 +01:00
|
|
|
mall[tile_type].update(mtile)
|
2018-12-10 20:22:40 +01:00
|
|
|
tiles_checked += 1
|
2018-12-10 23:42:53 +01:00
|
|
|
print("Checked %s tiles, %s bits" % (tiles_checked, len(mall)))
|
2018-12-10 20:22:40 +01:00
|
|
|
|
|
|
|
|
|
2018-12-10 23:42:53 +01:00
|
|
|
def run(db_root, strict=False, verbose=False):
|
2018-12-10 20:22:40 +01:00
|
|
|
# Start by running a basic check on db files
|
2018-12-10 23:42:53 +01:00
|
|
|
print("Checking individual .db...")
|
2018-12-10 20:22:40 +01:00
|
|
|
parsedb_all(db_root, verbose=verbose)
|
|
|
|
|
|
|
|
|
|
# Now load and verify tile consistency
|
|
|
|
|
db = prjxraydb.Database(db_root)
|
|
|
|
|
db._read_tilegrid()
|
2018-12-10 23:42:53 +01:00
|
|
|
'''
|
|
|
|
|
these don't load properly without .json files
|
|
|
|
|
See: https://github.com/SymbiFlow/prjxray/issues/303
|
2018-12-10 20:22:40 +01:00
|
|
|
db._read_tile_types()
|
|
|
|
|
print(db.tile_types.keys())
|
|
|
|
|
'''
|
|
|
|
|
|
2018-12-10 23:42:53 +01:00
|
|
|
verbose and print("")
|
|
|
|
|
|
|
|
|
|
print("Checking aggregate dir...")
|
|
|
|
|
check_tile_overlap(db, db_root, strict=strict, verbose=verbose)
|
2018-12-10 20:22:40 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
|
import argparse
|
|
|
|
|
|
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
|
description="Parse a db repository, checking for consistency")
|
|
|
|
|
|
|
|
|
|
util.db_root_arg(parser)
|
2018-12-10 23:42:53 +01:00
|
|
|
parser.add_argument('--strict', action='store_true', help='')
|
2018-12-10 20:22:40 +01:00
|
|
|
parser.add_argument('--verbose', action='store_true', help='')
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
2018-12-10 23:42:53 +01:00
|
|
|
run(args.db_root, strict=args.strict, verbose=args.verbose)
|
2018-12-10 20:22:40 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
|
main()
|