2018-08-30 00:24:41 +02:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
2018-09-12 04:45:33 +02:00
|
|
|
from timfuz import Benchmark, loadc_Ads_bs, load_sub, Ads2bounds, corners2csv, corner_s2i
|
2018-08-30 00:24:41 +02:00
|
|
|
|
2018-09-17 20:15:42 +02:00
|
|
|
|
2018-09-18 00:40:27 +02:00
|
|
|
def gen_flat(fns_in, sub_json, corner=None):
|
2018-09-25 04:44:04 +02:00
|
|
|
Ads, bs = loadc_Ads_bs(fns_in)
|
2018-09-12 04:45:33 +02:00
|
|
|
bounds = Ads2bounds(Ads, bs)
|
2018-09-19 22:45:13 +02:00
|
|
|
# Elements with zero delay assigned due to sub group
|
|
|
|
|
group_zeros = set()
|
|
|
|
|
# Elements with a concrete delay
|
2018-08-30 00:24:41 +02:00
|
|
|
nonzeros = set()
|
|
|
|
|
|
2018-09-19 22:45:13 +02:00
|
|
|
if corner:
|
|
|
|
|
zero_row = [None, None, None, None]
|
|
|
|
|
zero_row[corner_s2i[corner]] = 0
|
|
|
|
|
else:
|
|
|
|
|
zero_row = None
|
|
|
|
|
|
2018-09-12 04:45:33 +02:00
|
|
|
for bound_name, bound_bs in bounds.items():
|
2018-08-30 00:24:41 +02:00
|
|
|
sub = sub_json['subs'].get(bound_name, None)
|
2018-09-19 22:45:13 +02:00
|
|
|
if bound_name in sub_json['zero_names']:
|
|
|
|
|
if zero_row:
|
|
|
|
|
yield bound_name, 0
|
|
|
|
|
elif sub:
|
2018-09-25 01:09:24 +02:00
|
|
|
#print('sub', sub)
|
2018-08-30 00:24:41 +02:00
|
|
|
# put entire delay into pivot
|
|
|
|
|
pivot = sub_json['pivots'][bound_name]
|
2018-09-19 22:45:13 +02:00
|
|
|
assert pivot not in group_zeros
|
2018-08-30 00:24:41 +02:00
|
|
|
nonzeros.add(pivot)
|
|
|
|
|
non_pivot = set(sub.keys() - set([pivot]))
|
|
|
|
|
#for name in non_pivot:
|
|
|
|
|
# assert name not in nonzeros, (pivot, name, nonzeros)
|
2018-09-19 22:45:13 +02:00
|
|
|
group_zeros.update(non_pivot)
|
2018-09-25 01:09:24 +02:00
|
|
|
#print('yield PIVOT', pivot)
|
2018-09-12 04:45:33 +02:00
|
|
|
yield pivot, bound_bs
|
2018-08-30 00:24:41 +02:00
|
|
|
else:
|
|
|
|
|
nonzeros.add(bound_name)
|
2018-09-12 04:45:33 +02:00
|
|
|
yield bound_name, bound_bs
|
2018-08-30 00:24:41 +02:00
|
|
|
# non-pivots can appear multiple times, but they should always be zero
|
|
|
|
|
# however, due to substitution limitations, just warn
|
2018-09-19 22:45:13 +02:00
|
|
|
violations = group_zeros.intersection(nonzeros)
|
2018-08-30 00:24:41 +02:00
|
|
|
if len(violations):
|
|
|
|
|
print('WARNING: %s non-0 non-pivot' % (len(violations)))
|
|
|
|
|
|
2018-09-12 04:45:33 +02:00
|
|
|
# XXX: how to best handle these?
|
|
|
|
|
# should they be fixed 0?
|
2018-09-19 22:45:13 +02:00
|
|
|
if zero_row:
|
2018-09-25 01:09:24 +02:00
|
|
|
# ZERO names should always be zero
|
|
|
|
|
#print('ZEROs: %u' % len(sub_json['zero_names']))
|
2018-09-19 22:49:23 +02:00
|
|
|
for zero in sub_json['zero_names']:
|
2018-09-25 01:09:24 +02:00
|
|
|
#print('yield ZERO', zero)
|
2018-09-19 22:49:23 +02:00
|
|
|
yield zero, zero_row
|
2018-09-25 01:09:24 +02:00
|
|
|
|
|
|
|
|
real_zeros = group_zeros - violations
|
|
|
|
|
print(
|
|
|
|
|
'Zero candidates: %u w/ %u non-pivot conflicts => %u zeros as solved'
|
|
|
|
|
% (len(group_zeros), len(violations), len(real_zeros)))
|
|
|
|
|
# Only yield elements not already yielded
|
|
|
|
|
for zero in real_zeros:
|
|
|
|
|
#print('yield solve-0', zero)
|
2018-09-12 04:45:33 +02:00
|
|
|
yield zero, zero_row
|
2018-08-30 00:24:41 +02:00
|
|
|
|
2018-09-17 20:15:42 +02:00
|
|
|
|
2018-09-19 21:05:40 +02:00
|
|
|
def run(fns_in, fnout, sub_json, corner=None, verbose=False):
|
2018-08-30 00:24:41 +02:00
|
|
|
with open(fnout, 'w') as fout:
|
|
|
|
|
fout.write('ico,fast_max fast_min slow_max slow_min,rows...\n')
|
2018-09-19 22:49:23 +02:00
|
|
|
for name, corners in sorted(list(gen_flat(fns_in, sub_json,
|
|
|
|
|
corner=corner))):
|
2018-09-18 00:40:27 +02:00
|
|
|
row_ico = 1
|
|
|
|
|
items = [str(row_ico), corners2csv(corners)]
|
|
|
|
|
items.append('%u %s' % (1, name))
|
|
|
|
|
fout.write(','.join(items) + '\n')
|
2018-08-30 00:24:41 +02:00
|
|
|
|
2018-09-17 20:15:42 +02:00
|
|
|
|
2018-08-30 00:24:41 +02:00
|
|
|
def main():
|
|
|
|
|
import argparse
|
|
|
|
|
|
2018-09-17 23:23:00 +02:00
|
|
|
parser = argparse.ArgumentParser(
|
2018-09-18 00:40:27 +02:00
|
|
|
description='Substitute .csv to ungroup correlated variables')
|
2018-08-30 00:24:41 +02:00
|
|
|
|
|
|
|
|
parser.add_argument('--verbose', action='store_true', help='')
|
|
|
|
|
parser.add_argument('--sub-csv', help='')
|
2018-09-17 20:15:42 +02:00
|
|
|
parser.add_argument(
|
|
|
|
|
'--sub-json',
|
|
|
|
|
required=True,
|
|
|
|
|
help='Group substitutions to make fully ranked')
|
2018-09-12 04:45:33 +02:00
|
|
|
parser.add_argument('--corner', default=None, help='')
|
2018-09-17 23:23:00 +02:00
|
|
|
parser.add_argument('--out', default=None, help='output timing delay .csv')
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
'fns_in',
|
2018-09-18 00:40:27 +02:00
|
|
|
nargs='+',
|
2018-09-17 23:23:00 +02:00
|
|
|
help='input timing delay .csv (NOTE: must be single column)')
|
2018-08-30 00:24:41 +02:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
# Store options in dict to ease passing through functions
|
|
|
|
|
bench = Benchmark()
|
|
|
|
|
|
2018-08-30 01:05:10 +02:00
|
|
|
sub_json = load_sub(args.sub_json)
|
2018-08-30 00:24:41 +02:00
|
|
|
|
|
|
|
|
try:
|
2018-09-17 20:15:42 +02:00
|
|
|
run(
|
2018-09-17 23:23:00 +02:00
|
|
|
args.fns_in,
|
|
|
|
|
args.out,
|
2018-09-17 20:15:42 +02:00
|
|
|
sub_json=sub_json,
|
|
|
|
|
verbose=args.verbose,
|
|
|
|
|
corner=args.corner)
|
2018-08-30 00:24:41 +02:00
|
|
|
finally:
|
|
|
|
|
print('Exiting after %s' % bench)
|
|
|
|
|
|
2018-09-17 20:15:42 +02:00
|
|
|
|
2018-08-30 00:24:41 +02:00
|
|
|
if __name__ == '__main__':
|
|
|
|
|
main()
|