From 2c6c3487a47bcc48f2ad84705e49e661f943f988 Mon Sep 17 00:00:00 2001 From: John McMaster Date: Mon, 24 Sep 2018 18:27:25 -0700 Subject: [PATCH] timfuz: timing4 format w/ site/bel info Signed-off-by: John McMaster --- fuzzers/007-timing/checksub.py | 4 +- fuzzers/007-timing/corner_csv.py | 12 +- fuzzers/007-timing/csv_flat2group.py | 4 +- fuzzers/007-timing/projects/corner.mk | 2 +- fuzzers/007-timing/projects/generate.sh | 3 +- .../007-timing/projects/placelut/generate.sh | 2 +- fuzzers/007-timing/projects/project.mk | 2 +- fuzzers/007-timing/projects/project.tcl | 10 +- fuzzers/007-timing/rref.py | 4 +- fuzzers/007-timing/solve_leastsq.py | 4 +- fuzzers/007-timing/solve_linprog.py | 4 +- fuzzers/007-timing/solve_qor.py | 4 +- fuzzers/007-timing/timing_txt2icsv.py | 226 +++-------------- fuzzers/007-timing/timing_txt2json.py | 237 ++++++++++++++++++ 14 files changed, 302 insertions(+), 216 deletions(-) create mode 100644 fuzzers/007-timing/timing_txt2json.py diff --git a/fuzzers/007-timing/checksub.py b/fuzzers/007-timing/checksub.py index 407156d6..559b6c61 100644 --- a/fuzzers/007-timing/checksub.py +++ b/fuzzers/007-timing/checksub.py @@ -111,14 +111,14 @@ def main(): parser.add_argument('--verbose', action='store_true', help='') parser.add_argument('--sub-json', help='') - parser.add_argument('fns_in', nargs='*', help='timing3i.csv input files') + parser.add_argument('fns_in', nargs='*', help='timing4i.csv input files') args = parser.parse_args() # Store options in dict to ease passing through functions bench = Benchmark() fns_in = args.fns_in if not fns_in: - fns_in = glob.glob('specimen_*/timing3i.csv') + fns_in = glob.glob('specimen_*/timing4i.csv') sub_json = None if args.sub_json: diff --git a/fuzzers/007-timing/corner_csv.py b/fuzzers/007-timing/corner_csv.py index 15e82ef8..8b7290a3 100644 --- a/fuzzers/007-timing/corner_csv.py +++ b/fuzzers/007-timing/corner_csv.py @@ -28,10 +28,12 @@ def main(): parser.add_argument('--verbose', type=int, help='') parser.add_argument( - '--auto-name', action='store_true', help='timing3i.csv => timing3c.csv') + '--auto-name', + action='store_true', + help='timing4i.csv => timing4c.csv') parser.add_argument('--out', default=None, help='Output csv') parser.add_argument('--corner', help='Output csv') - parser.add_argument('fns_in', nargs='+', help='timing3i.csv input files') + parser.add_argument('fns_in', nargs='+', help='timing4i.csv input files') args = parser.parse_args() bench = Benchmark() @@ -40,8 +42,8 @@ def main(): if args.auto_name: assert len(args.fns_in) == 1 fnin = args.fns_in[0] - fnout = fnin.replace('timing3i.csv', 'timing3c.csv') - assert fnout != fnin, 'Expect timing3i.csv in' + fnout = fnin.replace('timing4i.csv', 'timing4c.csv') + assert fnout != fnin, 'Expect timing4i.csv in' else: fnout = '/dev/stdout' print("Writing to %s" % fnout) @@ -49,7 +51,7 @@ def main(): fns_in = args.fns_in if not fns_in: - fns_in = glob.glob('specimen_*/timing3i.csv') + fns_in = glob.glob('specimen_*/timing4i.csv') run(fout=fout, fns_in=fns_in, corner=args.corner, verbose=args.verbose) diff --git a/fuzzers/007-timing/csv_flat2group.py b/fuzzers/007-timing/csv_flat2group.py index c58f2a45..aa7d6a1d 100644 --- a/fuzzers/007-timing/csv_flat2group.py +++ b/fuzzers/007-timing/csv_flat2group.py @@ -45,14 +45,14 @@ def main(): required=True, help='Group substitutions to make fully ranked') parser.add_argument('--out', help='Output sub.json substitution result') - parser.add_argument('fns_in', nargs='+', help='timing3i.csv input files') + parser.add_argument('fns_in', nargs='+', help='timing4i.csv input files') args = parser.parse_args() # Store options in dict to ease passing through functions bench = Benchmark() fns_in = args.fns_in if not fns_in: - fns_in = glob.glob('specimen_*/timing3i.csv') + fns_in = glob.glob('specimen_*/timing4i.csv') sub_json = load_sub(args.sub_json) diff --git a/fuzzers/007-timing/projects/corner.mk b/fuzzers/007-timing/projects/corner.mk index 1d84d688..618719ec 100644 --- a/fuzzers/007-timing/projects/corner.mk +++ b/fuzzers/007-timing/projects/corner.mk @@ -46,6 +46,6 @@ $(BUILD_DIR)/$(CORNER)/timgrid-vc.json: $(BUILD_DIR)/$(CORNER)/flat.csv python3 $(TIMFUZ_DIR)/tile_annotate.py --timgrid-s $(TIMFUZ_DIR)/timgrid/build/timgrid-s.json --out $(BUILD_DIR)/$(CORNER)/timgrid-vc.json $(BUILD_DIR)/$(CORNER)/flat.csv $(BUILD_DIR)/$(CORNER)/qor.txt: $(BUILD_DIR)/$(CORNER)/flat.csv - python3 $(TIMFUZ_DIR)/solve_qor.py --corner $(CORNER) --bounds-csv $(BUILD_DIR)/$(CORNER)/flat.csv specimen_*/timing3i.csv >$(BUILD_DIR)/$(CORNER)/qor.txt.tmp + python3 $(TIMFUZ_DIR)/solve_qor.py --corner $(CORNER) --bounds-csv $(BUILD_DIR)/$(CORNER)/flat.csv specimen_*/timing4i.csv >$(BUILD_DIR)/$(CORNER)/qor.txt.tmp mv $(BUILD_DIR)/$(CORNER)/qor.txt.tmp $(BUILD_DIR)/$(CORNER)/qor.txt diff --git a/fuzzers/007-timing/projects/generate.sh b/fuzzers/007-timing/projects/generate.sh index 4dfcd690..220f40c0 100644 --- a/fuzzers/007-timing/projects/generate.sh +++ b/fuzzers/007-timing/projects/generate.sh @@ -4,6 +4,7 @@ source ${XRAY_GENHEADER} TIMFUZ_DIR=$XRAY_DIR/fuzzers/007-timing timing_txt2csv () { - python3 $TIMFUZ_DIR/timing_txt2icsv.py --speed-json $TIMFUZ_DIR/speed/build/speed.json --out timing3i.csv timing3.txt + python3 $TIMFUZ_DIR/timing_txt2icsv.py --speed-json $TIMFUZ_DIR/speed/build/speed.json --out timing4i.csv.tmp timing4.txt + mv timing4i.csv.tmp timing4i.csv } diff --git a/fuzzers/007-timing/projects/placelut/generate.sh b/fuzzers/007-timing/projects/placelut/generate.sh index fff71d31..40bc4550 100755 --- a/fuzzers/007-timing/projects/placelut/generate.sh +++ b/fuzzers/007-timing/projects/placelut/generate.sh @@ -7,5 +7,5 @@ TIMFUZ_DIR=$XRAY_DIR/fuzzers/007-timing python ../generate.py --sdx 4 --sdy 4 >top.v vivado -mode batch -source ../generate.tcl -python3 $TIMFUZ_DIR/timing_txt2csv.py --speed-json $TIMFUZ_DIR/speed/build/speed.json --out timing3.csv timing3.txt +python3 $TIMFUZ_DIR/timing_txt2csv.py --speed-json $TIMFUZ_DIR/speed/build/speed.json --out timing4.csv timing4.txt diff --git a/fuzzers/007-timing/projects/project.mk b/fuzzers/007-timing/projects/project.mk index 31379388..9545c888 100644 --- a/fuzzers/007-timing/projects/project.mk +++ b/fuzzers/007-timing/projects/project.mk @@ -4,7 +4,7 @@ N := 1 SPECIMENS := $(addprefix specimen_,$(shell seq -f '%03.0f' $(N))) SPECIMENS_OK := $(addsuffix /OK,$(SPECIMENS)) -CSVS := $(addsuffix /timing3i.csv,$(SPECIMENS)) +CSVS := $(addsuffix /timing4i.csv,$(SPECIMENS)) TIMFUZ_DIR=$(XRAY_DIR)/fuzzers/007-timing RREF_CORNER=slow_max # Allow an empty system of equations? diff --git a/fuzzers/007-timing/projects/project.tcl b/fuzzers/007-timing/projects/project.tcl index b1478201..23e6e7e1 100644 --- a/fuzzers/007-timing/projects/project.tcl +++ b/fuzzers/007-timing/projects/project.tcl @@ -15,9 +15,9 @@ proc pin_bel {pin} { # This allows tracing the full path along with pips proc write_info3 {} { set outdir "." - set fp [open "$outdir/timing3.txt" w] + set fp [open "$outdir/timing4.txt" w] # bel as site/bel, so don't bother with site - puts $fp "net src_bel dst_bel ico fast_max fast_min slow_max slow_min pips inodes wires" + puts $fp "linetype net src_site src_site_pin src_bel src_bel_pin dst_site dst_site_pin dst_bel dst_bel_pin ico fast_max fast_min slow_max slow_min pips inodes wires" set TIME_start [clock clicks -milliseconds] set verbose 0 @@ -43,6 +43,7 @@ proc write_info3 {} { # With OUT filter this seems to be sufficient set src_pin [get_pins -leaf -filter {DIRECTION == OUT} -of_objects $net] set src_bel [pin_bel $src_pin] + set src_bel_pin [get_bel_pins -of_objects $src_pin] set src_site [get_sites -of_objects $src_bel] # Only one net driver set src_site_pins [get_site_pins -filter {DIRECTION == OUT} -of_objects $net] @@ -76,12 +77,14 @@ proc write_info3 {} { } else { set delays [get_net_delays -of_objects $net] } + puts $fp "GROUP $ico [llength $delays]" foreach delay $delays { set delaystr [get_property NAME $delay] set dst_pins [get_property TO_PIN $delay] set dst_pin [get_pins $dst_pins] #puts " $delaystr: $src_pin => $dst_pin" set dst_bel [pin_bel $dst_pin] + set dst_bel_pin [get_bel_pins -of_objects $dst_pin] set dst_site [get_sites -of_objects $dst_bel] if $verbose { puts " Dest: $dst_pin at site $dst_site:$dst_bel" @@ -115,8 +118,7 @@ proc write_info3 {} { #set wires [get_wires -of_objects $net -from $src_site_pin -to $dst_site_pin] set wires [get_wires -of_objects $nodes] - # puts $fp "$net $src_bel $dst_bel $ico $fast_max $fast_min $slow_max $slow_min $pips" - puts -nonewline $fp "$net $src_bel $dst_bel $ico $fast_max $fast_min $slow_max $slow_min" + puts -nonewline $fp "NET $net $src_site $src_site_pin $src_bel $src_bel_pin $dst_site $dst_site_pin $dst_bel $dst_bel_pin $ico $fast_max $fast_min $slow_max $slow_min" # Write pips w/ speed index puts -nonewline $fp " " diff --git a/fuzzers/007-timing/rref.py b/fuzzers/007-timing/rref.py index c003a3e8..95d1abb6 100644 --- a/fuzzers/007-timing/rref.py +++ b/fuzzers/007-timing/rref.py @@ -223,13 +223,13 @@ def main(): default='build_speed/speed.json', help='Provides speed index to name translation') parser.add_argument('--out', help='Output sub.json substitution result') - parser.add_argument('fns_in', nargs='*', help='timing3i.csv input files') + parser.add_argument('fns_in', nargs='*', help='timing4i.csv input files') args = parser.parse_args() bench = Benchmark() fns_in = args.fns_in if not fns_in: - fns_in = glob.glob('specimen_*/timing3i.csv') + fns_in = glob.glob('specimen_*/timing4i.csv') try: run( diff --git a/fuzzers/007-timing/solve_leastsq.py b/fuzzers/007-timing/solve_leastsq.py index 06939bdc..8f01b54e 100644 --- a/fuzzers/007-timing/solve_leastsq.py +++ b/fuzzers/007-timing/solve_leastsq.py @@ -119,14 +119,14 @@ def main(): parser.add_argument('--corner', required=True, default="slow_max", help='') parser.add_argument( '--out', default=None, help='output timing delay .json') - parser.add_argument('fns_in', nargs='+', help='timing3i.csv input files') + parser.add_argument('fns_in', nargs='+', help='timing4i.csv input files') args = parser.parse_args() # Store options in dict to ease passing through functions bench = Benchmark() fns_in = args.fns_in if not fns_in: - fns_in = glob.glob('specimen_*/timing3i.csv') + fns_in = glob.glob('specimen_*/timing4i.csv') sub_json = None if args.sub_json: diff --git a/fuzzers/007-timing/solve_linprog.py b/fuzzers/007-timing/solve_linprog.py index 1f2eecc4..40586f5e 100644 --- a/fuzzers/007-timing/solve_linprog.py +++ b/fuzzers/007-timing/solve_linprog.py @@ -162,14 +162,14 @@ def main(): parser.add_argument('--corner', required=True, default="slow_max", help='') parser.add_argument( '--out', default=None, help='output timing delay .json') - parser.add_argument('fns_in', nargs='+', help='timing3i.csv input files') + parser.add_argument('fns_in', nargs='+', help='timing4i.csv input files') args = parser.parse_args() # Store options in dict to ease passing through functions bench = Benchmark() fns_in = args.fns_in if not fns_in: - fns_in = glob.glob('specimen_*/timing3i.csv') + fns_in = glob.glob('specimen_*/timing4i.csv') sub_json = None if args.sub_json: diff --git a/fuzzers/007-timing/solve_qor.py b/fuzzers/007-timing/solve_qor.py index 51edc0a2..55da0583 100644 --- a/fuzzers/007-timing/solve_qor.py +++ b/fuzzers/007-timing/solve_qor.py @@ -37,14 +37,14 @@ def main(): '--bounds-csv', required=True, help='Previous solve result starting point') - parser.add_argument('fns_in', nargs='+', help='timing3i.csv input files') + parser.add_argument('fns_in', nargs='+', help='timing4i.csv input files') args = parser.parse_args() # Store options in dict to ease passing through functions bench = Benchmark() fns_in = args.fns_in if not fns_in: - fns_in = glob.glob('specimen_*/timing3i.csv') + fns_in = glob.glob('specimen_*/timing4i.csv') try: run( diff --git a/fuzzers/007-timing/timing_txt2icsv.py b/fuzzers/007-timing/timing_txt2icsv.py index 6e09a3d6..e1a04bae 100644 --- a/fuzzers/007-timing/timing_txt2icsv.py +++ b/fuzzers/007-timing/timing_txt2icsv.py @@ -1,106 +1,20 @@ #!/usr/bin/env python3 from timfuz import Benchmark, A_di2ds +from timing_txt2json import gen_timing4n, load_speed_json + import glob import math import json import sys from collections import OrderedDict -# Speed index: some sort of special value -SI_NONE = 0xFFFF - # prefix to make easier to track # models do not overlap between PIPs and WIREs PREFIX_W = 'WIRE_' PREFIX_P = 'PIP_' -def parse_pip(s): - # Entries like - # CLK_BUFG_REBUF_X60Y117/CLK_BUFG_REBUF.CLK_BUFG_REBUF_R_CK_GCLK0_BOT<<->>CLK_BUFG_REBUF_R_CK_GCLK0_TOP - # Convert to (site, type, pip_junction, pip) - pipstr, speed_index = s.split(':') - speed_index = int(speed_index) - site, instance = pipstr.split('/') - #type, pip_junction, pip = others.split('.') - #return (site, type, pip_junction, pip) - return site, instance, int(speed_index) - - -def parse_node(s): - node, nwires = s.split(':') - return node, int(nwires) - - -def parse_wire(s): - # CLBLM_R_X3Y80/CLBLM_M_D6:952 - wirestr, speed_index = s.split(':') - site, instance = wirestr.split('/') - return site, instance, int(speed_index) - - -# FIXME: these actually have a delay element -# Probably need to put these back in -def remove_virtual_pips(pips): - return pips - return filter(lambda pip: not re.match(r'CLBL[LM]_[LR]_', pip[0]), pips) - - -def load_timing3(f, name='file'): - # src_bel dst_bel ico fast_max fast_min slow_max slow_min pips - f.readline() - ret = [] - bads = 0 - for l in f: - # FIXME: hack - if 0 and 'CLK' in l: - continue - - l = l.strip() - if not l: - continue - parts = l.split(' ') - # FIXME: deal with these nodes - if len(parts) != 11: - bads += 1 - continue - net, src_bel, dst_bel, ico, fast_max, fast_min, slow_max, slow_min, pips, nodes, wires = parts - pips = pips.split('|') - nodes = nodes.split('|') - wires = wires.split('|') - ret.append( - { - 'net': net, - 'src_bel': src_bel, - 'dst_bel': dst_bel, - 'ico': int(ico), - # ps - 'fast_max': int(fast_max), - 'fast_min': int(fast_min), - 'slow_max': int(slow_max), - 'slow_min': int(slow_min), - 'pips': remove_virtual_pips([parse_pip(pip) for pip in pips]), - 'nodes': [parse_node(node) for node in nodes], - 'wires': [parse_wire(wire) for wire in wires], - 'line': l, - }) - print(' load %s: %d bad, %d good' % (name, bads, len(ret))) - #assert 0 - return ret - - -def load_speed_json(f): - j = json.load(f) - # Index speed indexes to names - speed_i2s = {} - for k, v in j['speed_model'].items(): - i = v['speed_index'] - if i != SI_NONE: - speed_i2s[i] = k - return j, speed_i2s - - # Verify the nodes and wires really do line up def vals2Adi_check(vals, names): print('Checking') @@ -113,131 +27,61 @@ def vals2Adi_check(vals, names): assert 0 -def vals2Adi(vals, speed_i2s, name_tr={}, name_drop=[], verbose=False): +def json2Ads(vals, verbose=False): + '''Convert timing4 JSON into Ads interconnect equations''' + def pip2speed(pip): - _site, _name, speed_index = pip - return PREFIX_P + speed_i2s[speed_index] + _site, _name, pip_name = pip + return PREFIX_P + pip_name def wire2speed(wire): - _site, _name, speed_index = wire - return PREFIX_W + speed_i2s[speed_index] + _site, _name, wire_name = wire + return PREFIX_W + wire_name - # Want this ordered - names = OrderedDict() - - print( - 'Creating matrix w/ tr: %d, drop: %d' % (len(name_tr), len(name_drop))) - - # Take sites out entirely using handy "interconnect only" option - #vals = filter(lambda x: str(x).find('SLICE') >= 0, vals) - # Highest count while still getting valid result - - # First index all of the given pip types - # Start out as set then convert to list to keep matrix order consistent - sys.stdout.write('Indexing delay elements ') - sys.stdout.flush() - progress = max(1, len(vals) / 100) - for vali, val in enumerate(vals): - if vali % progress == 0: - sys.stdout.write('.') - sys.stdout.flush() - odl = [(pip2speed(pip), None) for pip in val['pips']] - names.update(OrderedDict(odl)) - - odl = [(wire2speed(wire), None) for wire in val['wires']] - names.update(OrderedDict(odl)) - print(' done') - - # Apply transform - orig_names = len(names) - for k in (list(name_drop) + list(name_tr.keys())): - if k in names: - del names[k] - else: - print('WARNING: failed to remove %s' % k) - names.update(OrderedDict([(name, None) for name in name_tr.values()])) - print('Names tr %d => %d' % (orig_names, len(names))) - - # Make unique list - names = list(names.keys()) - name_s2i = {} - for namei, name in enumerate(names): - name_s2i[name] = namei - if verbose: - for name in names: - print('NAME: ', name) - for name in name_drop: - print('DROP: ', name) - for l, r in name_tr.items(): - print('TR: %s => %s' % (l, r)) - - # Now create a matrix with all of these delays - # Each row needs len(names) elements - # -2 means 2 elements present, 0 means absent - # (could hit same pip twice) - print('Creating delay element matrix w/ %d names' % len(names)) - Adi = [None for _i in range(len(vals))] - for vali, val in enumerate(vals): + print('Making equations') + def mk_row(val): def add_name(name): - if name in name_drop: - return - name = name_tr.get(name, name) - namei = name_s2i[name] - row_di[namei] = row_di.get(namei, 0) + 1 + row_ds[name] = row_ds.get(name, 0) + 1 - # Start with 0 occurances - #row = [0 for _i in range(len(names))] - row_di = {} + row_ds = {} - #print('pips: ', val['pips'] for pip in val['pips']: add_name(pip2speed(pip)) for wire in val['wires']: add_name(wire2speed(wire)) - #A_ub.append(row) - Adi[vali] = row_di + return row_ds - return Adi, names + return [mk_row(val) for val in vals] -# TODO: load directly as Ads -# remove names_tr, names_drop -def vals2Ads(vals, speed_i2s, verbose=False): - Adi, names = vals2Adi(vals, speed_i2s, verbose=False) - return A_di2ds(Adi, names) - - -def load_Ads(speed_json_f, f_ins): +def load_Ads(speed_json_f, fn_ins): print('Loading data') _speedj, speed_i2s = load_speed_json(speed_json_f) - vals = [] - for avals in [load_timing3(f_in, name) for f_in, name in f_ins]: - vals.extend(avals) + for fn_in in fn_ins: + vals = list(gen_timing4n(fn_in, speed_i2s)) + Ads = json2Ads(vals) - Ads = vals2Ads(vals, speed_i2s) + def mkb(val): + t = val['t'] + return (t['fast_max'], t['fast_min'], t['slow_max'], t['slow_min']) - def mkb(val): - return ( - val['fast_max'], val['fast_min'], val['slow_max'], val['slow_min']) + bs = [mkb(val) for val in vals] + ico = [val['ico'] for val in vals] - b = [mkb(val) for val in vals] - ico = [val['ico'] for val in vals] - - return Ads, b, ico + for row_bs, row_ds, row_ico in zip(bs, Ads, ico): + yield row_bs, row_ds, row_ico -def run(speed_json_f, fout, f_ins, verbose=0, corner=None): - Ads, bs, ico = load_Ads(speed_json_f, f_ins) - +def run(speed_json_f, fout, fns_in, verbose=0, corner=None): fout.write('ico,fast_max fast_min slow_max slow_min,rows...\n') - for row_bs, row_ds, row_ico in zip(bs, Ads, ico): + for row_bs, row_ds, row_ico in load_Ads(speed_json_f, fns_in): # XXX: consider removing ico column # its obsolete at this point - if not ico: + if not row_ico: continue # like: 123 456 120 450, 1 a, 2 b # first column has delay corners, followed by delay element count @@ -252,19 +96,19 @@ def main(): parser = argparse.ArgumentParser( description= - 'Convert obscure timing3.txt into more readable but roughly equivilent timing3i.csv (interconnect)' + 'Convert obscure timing4.txt into more readable but roughly equivilent timing4i.csv (interconnect)' ) parser.add_argument('--verbose', type=int, help='') # made a bulk conversion easier...keep? parser.add_argument( - '--auto-name', action='store_true', help='timing3.txt => timing3i.csv') + '--auto-name', action='store_true', help='timing4.txt => timing4i.csv') parser.add_argument( '--speed-json', default='build_speed/speed.json', help='Provides speed index to name translation') - parser.add_argument('--out', default=None, help='Output timing3i.csv file') - parser.add_argument('fns_in', nargs='+', help='Input timing3.txt files') + parser.add_argument('--out', default=None, help='Output timing4i.csv file') + parser.add_argument('fns_in', nargs='+', help='Input timing4.txt files') args = parser.parse_args() bench = Benchmark() @@ -284,12 +128,12 @@ def main(): fns_in = args.fns_in if not fns_in: - fns_in = glob.glob('specimen_*/timing3.txt') + fns_in = glob.glob('specimen_*/timing4.txt') run( speed_json_f=open(args.speed_json, 'r'), fout=fout, - f_ins=[(open(fn_in, 'r'), fn_in) for fn_in in fns_in], + fns_in=fns_in, verbose=args.verbose) diff --git a/fuzzers/007-timing/timing_txt2json.py b/fuzzers/007-timing/timing_txt2json.py new file mode 100644 index 00000000..573911e8 --- /dev/null +++ b/fuzzers/007-timing/timing_txt2json.py @@ -0,0 +1,237 @@ +#!/usr/bin/env python3 + +from timfuz import Benchmark, A_di2ds +import glob +import math +import json +import sys +from collections import OrderedDict + +# Speed index: some sort of special value +SI_NONE = 0xFFFF + + +def parse_pip(s, speed_i2s): + # Entries like + # CLK_BUFG_REBUF_X60Y117/CLK_BUFG_REBUF.CLK_BUFG_REBUF_R_CK_GCLK0_BOT<<->>CLK_BUFG_REBUF_R_CK_GCLK0_TOP + # Convert to (site, type, pip_junction, pip) + pipstr, speed_index = s.split(':') + speed_index = int(speed_index) + site, instance = pipstr.split('/') + #type, pip_junction, pip = others.split('.') + #return (site, type, pip_junction, pip) + return site, instance, speed_i2s[int(speed_index)] + + +def parse_node(s): + node, nwires = s.split(':') + return node, int(nwires) + + +def parse_wire(s, speed_i2s): + # CLBLM_R_X3Y80/CLBLM_M_D6:952 + wirestr, speed_index = s.split(':') + site, instance = wirestr.split('/') + return site, instance, speed_i2s[int(speed_index)] + + +def gen_timing4(fn, speed_i2s): + f = open(fn, 'r') + header_want = 'linetype net src_site src_site_pin src_bel src_bel_pin dst_site dst_site_pin dst_bel dst_bel_pin ico fast_max fast_min slow_max slow_min pips inodes wires' + ncols = len(header_want.split()) + + # src_bel dst_bel ico fast_max fast_min slow_max slow_min pips + header_got = f.readline().strip() + if header_got != header_want: + raise Exception("Unexpected columns") + + rets = 0 + # XXX: there were malformed lines, but think they are fixed now? + bads = 0 + net_lines = 0 + for l in f: + def group_line(): + ncols = len('lintype ico delays'.split()) + assert len(parts) == ncols + _lintype, ico, delays = parts + return int(ico), int(delays) + + def net_line(): + assert len(parts) == ncols + _lintype, net, src_site, src_site_pin, src_bel, src_bel_pin, dst_site, dst_site_pin, dst_bel, dst_bel_pin, ico, fast_max, fast_min, slow_max, slow_min, pips, nodes, wires = parts + pips = pips.split('|') + nodes = nodes.split('|') + wires = wires.split('|') + return { + 'net': net, + 'src': { + 'site': src_site, + 'site_pin': src_site_pin, + 'bel': src_bel, + 'bel_pin': src_bel_pin, + }, + 'dst': { + 'site': dst_site, + 'site_pin': dst_site_pin, + 'bel': dst_bel, + 'bel_pin': dst_bel_pin, + }, + 't': { + # ps + 'fast_max': int(fast_max), + 'fast_min': int(fast_min), + 'slow_max': int(slow_max), + 'slow_min': int(slow_min), + }, + 'ico': int(ico), + 'pips': [parse_pip(pip, speed_i2s) for pip in pips], + 'nodes': [parse_node(node) for node in nodes], + 'wires': [parse_wire(wire, speed_i2s) for wire in wires], + 'line': l, + } + + l = l.strip() + if not l: + continue + parts = l.split(' ') + lintype = parts[0] + + val = { + 'NET': net_line, + 'GROUP': group_line, + }[lintype]() + yield lintype, val + + rets += 1 + print( + ' load %s: %d bad, %d good, %u net lines' % + (fn, bads, rets, net_lines)) + + +def gen_timing4n(fn, speed_i2s): + '''Only generate nets''' + for lintype, val in gen_timing4(fn, speed_i2s): + if lintype == 'NET': + yield val + + +def gen_timing4i(fn, speed_i2s): + ''' + Like above, but aggregate ico + non-ico into single entries + Key these based on uniqueness of (src_bel, dst_bel) + + ico 0 is followed by 1 + They should probably even be in the same order + Maybe just assert that? + ''' + entries = {} + timgen = gen_timing4(fn, speed_i2s) + while True: + def get_ico(exp_ico): + ret = [] + try: + lintype, val = gen.next() + except StopIteration: + return None + assert lintype == 'GROUP' + ico, delays = val + assert ico == exp_ico + for _ in range(delays): + lintype, val = gen.next() + assert lintype == 'NET' + ret.append(val) + return ret + + ico0s = get_ico(0) + if ico0 is None: + break + ico1s = get_ico(1) + # TODO: verify this is actually true + assert len(ico0s) == len(ico1s) + + def same_path(l, r): + # if source and dest are the same, should be the same thing + return l['src']['bel_pin'] == r['src']['bel_pin'] and l['dst']['bel_pin'] == r['dst']['bel_pin'] + + for ico0, ico1 in zip(ico0s, ico1s): + # TODO: verify this is actually true + # otherwise move to more complex algorithm + assert same_path(ico0, ico1) + # aggregate timing info as (ic0, ic1) into ico0 + ico0['t'] = ( + ico0['t'], + ico1['t'], + ) + yield ico0 + +def load_speed_json(f): + j = json.load(f) + # Index speed indexes to names + speed_i2s = {} + for k, v in j['speed_model'].items(): + i = v['speed_index'] + if i != SI_NONE: + speed_i2s[i] = k + return j, speed_i2s + + +def run(speed_json_f, fout, fns_in, verbose=0, corner=None): + print('Loading data') + _speedj, speed_i2s = load_speed_json(speed_json_f) + + fnout = open(fout, 'w') + + vals = [] + for fn_in in fns_in: + for j in load_timing4(fn_in, speed_i2s): + fnout.write(json.dumps(j) + '\n') + + +def main(): + import argparse + + parser = argparse.ArgumentParser( + description= + 'Convert obscure timing4.txt into more readable but roughly equivilent timing4.json' + ) + + parser.add_argument('--verbose', type=int, help='') + # made a bulk conversion easier...keep? + parser.add_argument( + '--auto-name', action='store_true', help='timing4.txt => timing4i.csv') + parser.add_argument( + '--speed-json', + default='build_speed/speed.json', + help='Provides speed index to name translation') + parser.add_argument('--out', default=None, help='Output timing4i.csv file') + parser.add_argument('fns_in', nargs='+', help='Input timing4.txt files') + args = parser.parse_args() + bench = Benchmark() + + fnout = args.out + if fnout is None: + if args.auto_name: + assert len(args.fns_in) == 1 + fnin = args.fns_in[0] + fnout = fnin.replace('.txt', '.json') + assert fnout != fnin, 'Expect .txt in' + else: + # practically there are too many stray prints to make this work as expected + assert 0, 'File name required' + fnout = '/dev/stdout' + print("Writing to %s" % fnout) + fout = open(fnout, 'w') + + fns_in = args.fns_in + if not fns_in: + fns_in = glob.glob('specimen_*/timing4.txt') + + run( + speed_json_f=open(args.speed_json, 'r'), + fout=fout, + fns_in=fns_in, + verbose=args.verbose) + + +if __name__ == '__main__': + main()