From 7379977c1d835e6a05b86f78ae75a42cda67e422 Mon Sep 17 00:00:00 2001 From: John McMaster Date: Mon, 17 Sep 2018 19:13:09 -0700 Subject: [PATCH 01/10] timfuz: reorganize minitest, fixup Signed-off-by: John McMaster --- fuzzers/007-timing/test_unique/.gitignore | 3 + fuzzers/007-timing/test_unique/Makefile | 22 ++++ fuzzers/007-timing/test_unique/README.md | 11 ++ fuzzers/007-timing/test_unique/generate.sh | 9 ++ fuzzers/007-timing/test_unique/generate.tcl | 97 ++++++++++++++++ .../007-timing/test_unique}/node_unique.py | 19 +-- .../007-timing/test_unique}/perf_test.py | 0 .../007-timing/test_unique}/pip_unique.py | 21 ++-- fuzzers/007-timing/test_unique/top.v | 109 ++++++++++++++++++ .../007-timing/test_unique}/wire_unique.py | 19 +-- 10 files changed, 288 insertions(+), 22 deletions(-) create mode 100644 fuzzers/007-timing/test_unique/.gitignore create mode 100644 fuzzers/007-timing/test_unique/Makefile create mode 100644 fuzzers/007-timing/test_unique/README.md create mode 100755 fuzzers/007-timing/test_unique/generate.sh create mode 100644 fuzzers/007-timing/test_unique/generate.tcl rename {minitests/timfuz => fuzzers/007-timing/test_unique}/node_unique.py (86%) rename {minitests/timfuz => fuzzers/007-timing/test_unique}/perf_test.py (100%) rename {minitests/timfuz => fuzzers/007-timing/test_unique}/pip_unique.py (81%) create mode 100644 fuzzers/007-timing/test_unique/top.v rename {minitests/timfuz => fuzzers/007-timing/test_unique}/wire_unique.py (83%) diff --git a/fuzzers/007-timing/test_unique/.gitignore b/fuzzers/007-timing/test_unique/.gitignore new file mode 100644 index 00000000..374eddc2 --- /dev/null +++ b/fuzzers/007-timing/test_unique/.gitignore @@ -0,0 +1,3 @@ +specimen_* +build + diff --git a/fuzzers/007-timing/test_unique/Makefile b/fuzzers/007-timing/test_unique/Makefile new file mode 100644 index 00000000..f7dca553 --- /dev/null +++ b/fuzzers/007-timing/test_unique/Makefile @@ -0,0 +1,22 @@ +N := 1 +SPECIMENS := $(addprefix specimen_,$(shell seq -f '%03.0f' $(N))) +SPECIMENS_OK := $(addsuffix /OK,$(SPECIMENS)) + +all: $(SPECIMENS_OK) + +$(SPECIMENS_OK): + bash generate.sh $(subst /OK,,$@) || (if [ "$(BADPRJ_OK)" != 'Y' ] ; then exit 1; fi; exit 0) + touch $@ + +run: + $(MAKE) clean + $(MAKE) all + touch run.ok + +clean: + rm -rf specimen_[0-9][0-9][0-9]/ seg_clblx.segbits __pycache__ run.ok + rm -rf vivado*.log vivado_*.str vivado*.jou design *.bits *.dcp *.bit + rm -rf build + +.PHONY: all run clean + diff --git a/fuzzers/007-timing/test_unique/README.md b/fuzzers/007-timing/test_unique/README.md new file mode 100644 index 00000000..44db73b9 --- /dev/null +++ b/fuzzers/007-timing/test_unique/README.md @@ -0,0 +1,11 @@ +Characterizes how attributes vary across pips, wires, and nodes. Usage: + +``` +$ make +$ python3 pip_unique.py +$ python3 wire_unique.py +$ python3 node_unique.py +``` + +NOTE: this will take a long time + diff --git a/fuzzers/007-timing/test_unique/generate.sh b/fuzzers/007-timing/test_unique/generate.sh new file mode 100755 index 00000000..76583ac2 --- /dev/null +++ b/fuzzers/007-timing/test_unique/generate.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +set -ex + +source ${XRAY_GENHEADER} +TIMFUZ_DIR=$XRAY_DIR/fuzzers/007-timing + +vivado -mode batch -source ../generate.tcl + diff --git a/fuzzers/007-timing/test_unique/generate.tcl b/fuzzers/007-timing/test_unique/generate.tcl new file mode 100644 index 00000000..99fd39d0 --- /dev/null +++ b/fuzzers/007-timing/test_unique/generate.tcl @@ -0,0 +1,97 @@ +source ../../../../utils/utils.tcl + +proc build_design {} { + create_project -force -part $::env(XRAY_PART) design design + read_verilog ../../src/picorv32.v + read_verilog ../top.v + synth_design -top top + + puts "Locking pins" + set_property LOCK_PINS {I0:A1 I1:A2 I2:A3 I3:A4 I4:A5 I5:A6} \ + [get_cells -quiet -filter {REF_NAME == LUT6} -hierarchical] + + puts "Package stuff" + set_property -dict "PACKAGE_PIN $::env(XRAY_PIN_00) IOSTANDARD LVCMOS33" [get_ports clk] + set_property -dict "PACKAGE_PIN $::env(XRAY_PIN_01) IOSTANDARD LVCMOS33" [get_ports stb] + set_property -dict "PACKAGE_PIN $::env(XRAY_PIN_02) IOSTANDARD LVCMOS33" [get_ports di] + set_property -dict "PACKAGE_PIN $::env(XRAY_PIN_03) IOSTANDARD LVCMOS33" [get_ports do] + + puts "pblocking" + create_pblock roi + set roipb [get_pblocks roi] + set_property EXCLUDE_PLACEMENT 1 $roipb + add_cells_to_pblock $roipb [get_cells roi] + resize_pblock $roipb -add "$::env(XRAY_ROI)" + + puts "randplace" + randplace_pblock 50 roi + + set_property CFGBVS VCCO [current_design] + set_property CONFIG_VOLTAGE 3.3 [current_design] + set_property BITSTREAM.GENERAL.PERFRAMECRC YES [current_design] + + puts "dedicated route" + set_property CLOCK_DEDICATED_ROUTE FALSE [get_nets clk_IBUF] + + place_design + route_design + + write_checkpoint -force design.dcp + # disable combinitorial loop + # set_property IS_ENABLED 0 [get_drc_checks {LUTLP-1}] + #write_bitstream -force design.bit +} + +proc pips_all {} { + set outdir "." + set fp [open "$outdir/pip_all.txt" w] + set items [get_pips] + puts "Items: [llength $items]" + + set needspace 0 + set properties [list_property [lindex $items 0]] + foreach item $items { + set needspace 0 + foreach property $properties { + set val [get_property $property $item] + if {"$val" ne ""} { + if $needspace { + puts -nonewline $fp " " + } + puts -nonewline $fp "$property:$val" + set needspace 1 + } + } + puts $fp "" + } + close $fp +} +proc wires_all {} { + set outdir "." + set fp [open "$outdir/wire_all.txt" w] + set items [get_wires] + puts "Items: [llength $items]" + + set needspace 0 + set properties [list_property [lindex $items 0]] + foreach item $items { + set needspace 0 + foreach property $properties { + set val [get_property $property $item] + if {"$val" ne ""} { + if $needspace { + puts -nonewline $fp " " + } + puts -nonewline $fp "$property:$val" + set needspace 1 + } + } + puts $fp "" + } + close $fp +} + +build_design +pips_all +wires_all + diff --git a/minitests/timfuz/node_unique.py b/fuzzers/007-timing/test_unique/node_unique.py similarity index 86% rename from minitests/timfuz/node_unique.py rename to fuzzers/007-timing/test_unique/node_unique.py index 4a0630e7..6e87e438 100644 --- a/minitests/timfuz/node_unique.py +++ b/fuzzers/007-timing/test_unique/node_unique.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python3 + import re @@ -40,8 +42,8 @@ def run(node_fin, verbose=0): return anode['wname'] #return (anode['tile_type'], anode['wname']) - if nodei % 1000 == 0: - print 'Check node %d' % nodei + if nodei % 10000 == 0: + print('Check node %d, %u node types' % (nodei, len(refnodes))) # Existing node? try: refnode = refnodes[getk(anode)] @@ -62,10 +64,10 @@ def run(node_fin, verbose=0): if k in refnode and k in anode: def fail(): - print 'Mismatch on %s' % k - print refnode[k], anode[k] - print refnode['l'] - print anode['l'] + print('Mismatch on %s' % k) + print(refnode[k], anode[k]) + print(refnode['l']) + print(anode['l']) #assert 0 if k == 'SPEED_CLASS': @@ -97,6 +99,9 @@ if __name__ == '__main__': parser.add_argument('--verbose', type=int, help='') parser.add_argument( - 'node_fn_in', default='/dev/stdin', nargs='?', help='Input file') + 'node_fn_in', + default='specimen_001/wire_all.txt', + nargs='?', + help='Input file') args = parser.parse_args() run(open(args.node_fn_in, 'r'), verbose=args.verbose) diff --git a/minitests/timfuz/perf_test.py b/fuzzers/007-timing/test_unique/perf_test.py similarity index 100% rename from minitests/timfuz/perf_test.py rename to fuzzers/007-timing/test_unique/perf_test.py diff --git a/minitests/timfuz/pip_unique.py b/fuzzers/007-timing/test_unique/pip_unique.py similarity index 81% rename from minitests/timfuz/pip_unique.py rename to fuzzers/007-timing/test_unique/pip_unique.py index f49cb1ca..6df2f5b2 100644 --- a/minitests/timfuz/pip_unique.py +++ b/fuzzers/007-timing/test_unique/pip_unique.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python3 + import re @@ -29,8 +31,8 @@ def run(node_fin, verbose=0): return anode['wname'] return (anode['tile_type'], anode['wname']) - if nodei % 1000 == 0: - print 'Check node %d' % nodei + if nodei % 10000 == 0: + print('Check node %d, %u node types' % (nodei, len(refnodes))) # Existing node? try: refnode = refnodes[getk(anode)] @@ -56,14 +58,14 @@ def run(node_fin, verbose=0): if k in refnode and k in anode: def fail(): - print 'Mismatch on %s' % k - print refnode[k], anode[k] - print refnode['l'] - print anode['l'] + print('Mismatch on %s' % k) + print(refnode[k], anode[k]) + print(refnode['l']) + print(anode['l']) #assert 0 if refnode[k] != anode[k]: - print + print('') fail() # A key in one but not the other? elif k in refnode or k in anode: @@ -81,6 +83,9 @@ if __name__ == '__main__': parser.add_argument('--verbose', type=int, help='') parser.add_argument( - 'node_fn_in', default='/dev/stdin', nargs='?', help='Input file') + 'node_fn_in', + default='specimen_001/pip_all.txt', + nargs='?', + help='Input file') args = parser.parse_args() run(open(args.node_fn_in, 'r'), verbose=args.verbose) diff --git a/fuzzers/007-timing/test_unique/top.v b/fuzzers/007-timing/test_unique/top.v new file mode 100644 index 00000000..0cb9b8a3 --- /dev/null +++ b/fuzzers/007-timing/test_unique/top.v @@ -0,0 +1,109 @@ +//move some stuff to minitests/ncy0 + +`define SEED 32'h12345678 + +module top(input clk, stb, di, output do); + localparam integer DIN_N = 42; + localparam integer DOUT_N = 79; + + reg [DIN_N-1:0] din; + wire [DOUT_N-1:0] dout; + + reg [DIN_N-1:0] din_shr; + reg [DOUT_N-1:0] dout_shr; + + always @(posedge clk) begin + din_shr <= {din_shr, di}; + dout_shr <= {dout_shr, din_shr[DIN_N-1]}; + if (stb) begin + din <= din_shr; + dout_shr <= dout; + end + end + + assign do = dout_shr[DOUT_N-1]; + + roi #(.DIN_N(DIN_N), .DOUT_N(DOUT_N)) + roi ( + .clk(clk), + .din(din), + .dout(dout) + ); +endmodule + +module roi(input clk, input [DIN_N-1:0] din, output [DOUT_N-1:0] dout); + parameter integer DIN_N = -1; + parameter integer DOUT_N = -1; + + /* + //Take out for now to make sure LUTs are more predictable + picorv32 picorv32 ( + .clk(clk), + .resetn(din[0]), + .mem_valid(dout[0]), + .mem_instr(dout[1]), + .mem_ready(din[1]), + .mem_addr(dout[33:2]), + .mem_wdata(dout[66:34]), + .mem_wstrb(dout[70:67]), + .mem_rdata(din[33:2]) + ); + */ + + /* + randluts randluts ( + .din(din[41:34]), + .dout(dout[78:71]) + ); + */ + randluts #(.N(150)) randluts ( + .din(din[41:34]), + .dout(dout[78:71]) + ); +endmodule + +module randluts(input [7:0] din, output [7:0] dout); + parameter integer N = 250; + + function [31:0] xorshift32(input [31:0] xorin); + begin + xorshift32 = xorin; + xorshift32 = xorshift32 ^ (xorshift32 << 13); + xorshift32 = xorshift32 ^ (xorshift32 >> 17); + xorshift32 = xorshift32 ^ (xorshift32 << 5); + end + endfunction + + function [63:0] lutinit(input [7:0] a, b); + begin + lutinit[63:32] = xorshift32(xorshift32(xorshift32(xorshift32({a, b} ^ `SEED)))); + lutinit[31: 0] = xorshift32(xorshift32(xorshift32(xorshift32({b, a} ^ `SEED)))); + end + endfunction + + wire [(N+1)*8-1:0] nets; + + assign nets[7:0] = din; + assign dout = nets[(N+1)*8-1:N*8]; + + genvar i, j; + generate + for (i = 0; i < N; i = i+1) begin:is + for (j = 0; j < 8; j = j+1) begin:js + localparam integer k = xorshift32(xorshift32(xorshift32(xorshift32((i << 20) ^ (j << 10) ^ `SEED)))) & 255; + (* KEEP, DONT_TOUCH *) + LUT6 #( + .INIT(lutinit(i, j)) + ) lut ( + .I0(nets[8*i+(k+0)%8]), + .I1(nets[8*i+(k+1)%8]), + .I2(nets[8*i+(k+2)%8]), + .I3(nets[8*i+(k+3)%8]), + .I4(nets[8*i+(k+4)%8]), + .I5(nets[8*i+(k+5)%8]), + .O(nets[8*i+8+j]) + ); + end + end + endgenerate +endmodule diff --git a/minitests/timfuz/wire_unique.py b/fuzzers/007-timing/test_unique/wire_unique.py similarity index 83% rename from minitests/timfuz/wire_unique.py rename to fuzzers/007-timing/test_unique/wire_unique.py index c7b57272..4d286566 100644 --- a/minitests/timfuz/wire_unique.py +++ b/fuzzers/007-timing/test_unique/wire_unique.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python3 + import re @@ -29,8 +31,8 @@ def run(node_fin, verbose=0): return anode['wname'] #return (anode['tile_type'], anode['wname']) - if nodei % 1000 == 0: - print 'Check node %d' % nodei + if nodei % 10000 == 0: + print('Check node %d, %u node types' % (nodei, len(refnodes))) # Existing node? try: refnode = refnodes[getk(anode)] @@ -61,10 +63,10 @@ def run(node_fin, verbose=0): if k in refnode and k in anode: def fail(): - print 'Mismatch on %s' % k - print refnode[k], anode[k] - print refnode['l'] - print anode['l'] + print('Mismatch on %s' % k) + print(refnode[k], anode[k]) + print(refnode['l']) + print(anode['l']) #assert 0 if refnode[k] != anode[k]: @@ -86,6 +88,9 @@ if __name__ == '__main__': parser.add_argument('--verbose', type=int, help='') parser.add_argument( - 'node_fn_in', default='/dev/stdin', nargs='?', help='Input file') + 'node_fn_in', + default='specimen_001/wire_all.txt', + nargs='?', + help='Input file') args = parser.parse_args() run(open(args.node_fn_in, 'r'), verbose=args.verbose) From 937b8e2fe82c27564dda7a3687ef8d7250552bad Mon Sep 17 00:00:00 2001 From: John McMaster Date: Tue, 18 Sep 2018 16:00:22 -0700 Subject: [PATCH 02/10] timfuz test_unique: small doc update Signed-off-by: John McMaster --- fuzzers/007-timing/test_unique/README.md | 2 +- fuzzers/007-timing/test_unique/node_unique.py | 2 +- fuzzers/007-timing/test_unique/pip_unique.py | 2 +- fuzzers/007-timing/test_unique/wire_unique.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/fuzzers/007-timing/test_unique/README.md b/fuzzers/007-timing/test_unique/README.md index 44db73b9..84c19977 100644 --- a/fuzzers/007-timing/test_unique/README.md +++ b/fuzzers/007-timing/test_unique/README.md @@ -7,5 +7,5 @@ $ python3 wire_unique.py $ python3 node_unique.py ``` -NOTE: this will take a long time +NOTE: "make" will take a long time (about 2.5 hours on my machine) diff --git a/fuzzers/007-timing/test_unique/node_unique.py b/fuzzers/007-timing/test_unique/node_unique.py index 6e87e438..df6cd0db 100644 --- a/fuzzers/007-timing/test_unique/node_unique.py +++ b/fuzzers/007-timing/test_unique/node_unique.py @@ -42,7 +42,7 @@ def run(node_fin, verbose=0): return anode['wname'] #return (anode['tile_type'], anode['wname']) - if nodei % 10000 == 0: + if nodei % 100000 == 0: print('Check node %d, %u node types' % (nodei, len(refnodes))) # Existing node? try: diff --git a/fuzzers/007-timing/test_unique/pip_unique.py b/fuzzers/007-timing/test_unique/pip_unique.py index 6df2f5b2..c64c1744 100644 --- a/fuzzers/007-timing/test_unique/pip_unique.py +++ b/fuzzers/007-timing/test_unique/pip_unique.py @@ -31,7 +31,7 @@ def run(node_fin, verbose=0): return anode['wname'] return (anode['tile_type'], anode['wname']) - if nodei % 10000 == 0: + if nodei % 100000 == 0: print('Check node %d, %u node types' % (nodei, len(refnodes))) # Existing node? try: diff --git a/fuzzers/007-timing/test_unique/wire_unique.py b/fuzzers/007-timing/test_unique/wire_unique.py index 4d286566..2f4cc339 100644 --- a/fuzzers/007-timing/test_unique/wire_unique.py +++ b/fuzzers/007-timing/test_unique/wire_unique.py @@ -31,7 +31,7 @@ def run(node_fin, verbose=0): return anode['wname'] #return (anode['tile_type'], anode['wname']) - if nodei % 10000 == 0: + if nodei % 100000 == 0: print('Check node %d, %u node types' % (nodei, len(refnodes))) # Existing node? try: From 744c61e4a6bbaa830a0f7e83e0672895fb344e08 Mon Sep 17 00:00:00 2001 From: John McMaster Date: Wed, 19 Sep 2018 11:19:42 -0700 Subject: [PATCH 03/10] timfuz: test_zero Signed-off-by: John McMaster --- fuzzers/.gitignore | 2 + fuzzers/007-timing/test_zero/README.md | 1 + fuzzers/007-timing/test_zero/process.py | 69 +++++++++++++++++++++++++ 3 files changed, 72 insertions(+) create mode 100644 fuzzers/.gitignore create mode 100644 fuzzers/007-timing/test_zero/README.md create mode 100644 fuzzers/007-timing/test_zero/process.py diff --git a/fuzzers/.gitignore b/fuzzers/.gitignore new file mode 100644 index 00000000..01a18a08 --- /dev/null +++ b/fuzzers/.gitignore @@ -0,0 +1,2 @@ +timing3.csv +timing3.txt diff --git a/fuzzers/007-timing/test_zero/README.md b/fuzzers/007-timing/test_zero/README.md new file mode 100644 index 00000000..ef5ea1e4 --- /dev/null +++ b/fuzzers/007-timing/test_zero/README.md @@ -0,0 +1 @@ +Collect info on ZERO speed classes diff --git a/fuzzers/007-timing/test_zero/process.py b/fuzzers/007-timing/test_zero/process.py new file mode 100644 index 00000000..21d322eb --- /dev/null +++ b/fuzzers/007-timing/test_zero/process.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python3 + +import sys +import os +import time +import json + + +def run_types(tilej, verbose=False): + + def process(etype): + # dict[model] = set((tile, wire/pip)) + zeros = {} + print('Processing %s' % etype) + # Index delay models by type, recording where they occured + for tilek, tilev in tilej['tiles'].items(): + for ename, emodel in tilev[etype].items(): + if emodel.find('ZERO') >= 0: + zeros.setdefault(emodel, set()).add((tilek, ename)) + # Print out delay model instances + print('%s ZERO types: %u, %s' % (etype, len(zeros), zeros.keys())) + print('%s ZERO instances: %u' % (etype, sum([len(x) for x in zeros.values()]))) + for model in sorted(zeros.keys()): + modelv = zeros[model] + print('Model: %s' % model) + for tile_name, element_name in sorted(list(modelv)): + print(' %s: %s' % (tile_name, element_name)) + + process('wires') + print('') + process('pips') + +def run_prefix(tilej, verbose=False): + + def process(etype): + prefixes = set() + print('Processing %s' % etype) + # Index delay models by type, recording where they occured + for tilek, tilev in tilej['tiles'].items(): + for ename, emodel in tilev[etype].items(): + prefix = emodel.split('_')[0] + prefixes.add(prefix) + print('%s prefixes: %u' % (etype, len(prefixes))) + for prefix in sorted(prefixes): + print(' %s' % prefix) + + process('wires') + print('') + process('pips') + +def run(fnin, verbose=False): + tilej = json.load((open(fnin, 'r'))) + run_types(tilej) + print('') + print('') + run_prefix(tilej) + +def main(): + import argparse + + parser = argparse.ArgumentParser(description='Solve timing solution') + parser.add_argument('fnin', default="../timgrid/build/timgrid-s.json", nargs='?', help='input timgrid JSON') + args = parser.parse_args() + + run(args.fnin, verbose=False) + + +if __name__ == '__main__': + main() From 7c0828e6c2c77081a6d7d0b2a78988f33be4e5bd Mon Sep 17 00:00:00 2001 From: John McMaster Date: Wed, 19 Sep 2018 11:41:55 -0700 Subject: [PATCH 04/10] timfuz: --sub-csv => --bounds-csv Signed-off-by: John McMaster --- fuzzers/007-timing/projects/corner.mk | 2 +- fuzzers/007-timing/solve_linprog.py | 4 ++-- fuzzers/007-timing/tile_annotate.py | 2 +- fuzzers/007-timing/timfuz_solve.py | 6 +++--- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/fuzzers/007-timing/projects/corner.mk b/fuzzers/007-timing/projects/corner.mk index f332d727..1bfa8d6b 100644 --- a/fuzzers/007-timing/projects/corner.mk +++ b/fuzzers/007-timing/projects/corner.mk @@ -32,7 +32,7 @@ build/$(CORNER)/leastsq.csv: build/sub.json build/grouped.csv build/checksub bui build/$(CORNER)/linprog.csv: build/$(CORNER)/leastsq.csv build/grouped.csv # Tweak rough timing model, making sure all constraints are satisfied - ALLOW_ZERO_EQN=$(ALLOW_ZERO_EQN) python3 $(TIMFUZ_DIR)/solve_linprog.py --sub-json build/sub.json --sub-csv build/$(CORNER)/leastsq.csv --massage build/grouped.csv --corner $(CORNER) --out build/$(CORNER)/linprog.csv.tmp + ALLOW_ZERO_EQN=$(ALLOW_ZERO_EQN) python3 $(TIMFUZ_DIR)/solve_linprog.py --sub-json build/sub.json --bounds-csv build/$(CORNER)/leastsq.csv --massage build/grouped.csv --corner $(CORNER) --out build/$(CORNER)/linprog.csv.tmp mv build/$(CORNER)/linprog.csv.tmp build/$(CORNER)/linprog.csv build/$(CORNER)/flat.csv: build/$(CORNER)/linprog.csv diff --git a/fuzzers/007-timing/solve_linprog.py b/fuzzers/007-timing/solve_linprog.py index d22e14d1..02ed7396 100644 --- a/fuzzers/007-timing/solve_linprog.py +++ b/fuzzers/007-timing/solve_linprog.py @@ -192,7 +192,7 @@ def main(): parser.add_argument('--verbose', action='store_true', help='') parser.add_argument('--massage', action='store_true', help='') - parser.add_argument('--sub-csv', help='') + parser.add_argument('--bounds-csv', help='Previous solve result starting point') parser.add_argument( '--sub-json', help='Group substitutions to make fully ranked') parser.add_argument('--corner', required=True, default="slow_max", help='') @@ -215,7 +215,7 @@ def main(): timfuz_solve.run( run_corner=run_corner, sub_json=sub_json, - sub_csv=args.sub_csv, + bounds_csv=args.bounds_csv, fns_in=fns_in, corner=args.corner, massage=args.massage, diff --git a/fuzzers/007-timing/tile_annotate.py b/fuzzers/007-timing/tile_annotate.py index ffc07a21..af3d3d2a 100644 --- a/fuzzers/007-timing/tile_annotate.py +++ b/fuzzers/007-timing/tile_annotate.py @@ -51,7 +51,7 @@ def main(): default='build/timgrid-vc.json', help='tilegrid timing delay values at corner (timgrid-vc.json)') parser.add_argument( - 'fn_ins', nargs='+', help='Input flattened timing csv (flat.json)') + 'fn_ins', nargs='+', help='Input flattened timing csv (flat.csv)') args = parser.parse_args() run(args.fn_ins, args.out, args.timgrid_s, verbose=False) diff --git a/fuzzers/007-timing/timfuz_solve.py b/fuzzers/007-timing/timfuz_solve.py index 684cf05d..de1c5812 100644 --- a/fuzzers/007-timing/timfuz_solve.py +++ b/fuzzers/007-timing/timfuz_solve.py @@ -98,7 +98,7 @@ def run( corner, run_corner, sub_json=None, - sub_csv=None, + bounds_csv=None, dedup=True, massage=False, outfn=None, @@ -132,8 +132,8 @@ def run( Special .csv containing one variable per line Used primarily for multiple optimization passes, such as different algorithms or additional constraints ''' - if sub_csv: - Ads2, b2 = loadc_Ads_b([sub_csv], corner, ico=True) + if bounds_csv: + Ads2, b2 = loadc_Ads_b([bounds_csv], corner, ico=True) bounds = Ads2bounds(Ads2, b2) assert len(bounds), 'Failed to load bounds' rows_old = len(Ads) From ee3ef206adcd9166d12fa5a4f947cc25440d533a Mon Sep 17 00:00:00 2001 From: John McMaster Date: Wed, 19 Sep 2018 11:53:57 -0700 Subject: [PATCH 05/10] timfuz: make rref deterministic Signed-off-by: John McMaster --- fuzzers/007-timing/rref.py | 32 ++++++---- fuzzers/007-timing/solve_linprog.py | 3 +- fuzzers/007-timing/test_zero/process.py | 15 +++-- fuzzers/007-timing/timfuz.py | 81 ++++++++++++++++++++++--- 4 files changed, 103 insertions(+), 28 deletions(-) diff --git a/fuzzers/007-timing/rref.py b/fuzzers/007-timing/rref.py index fedcfef5..f3169b64 100644 --- a/fuzzers/007-timing/rref.py +++ b/fuzzers/007-timing/rref.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -from timfuz import Benchmark, Ar_di2np, loadc_Ads_b, index_names, A_ds2np, simplify_rows +from timfuz import Benchmark, Ar_di2np, loadc_Ads_b, index_names, A_ds2np, simplify_rows, OrderedSet import numpy as np import glob import math @@ -27,14 +27,15 @@ class State(object): self.names = index_names(self.Ads) # known zero delay elements - self.drop_names = set(drop_names) + self.drop_names = OrderedSet(drop_names) # active names in rows # includes sub variables, excludes variables that have been substituted out - self.base_names = set(self.names) + self.base_names = OrderedSet(self.names) + #self.names = OrderedSet(self.base_names) self.names = set(self.base_names) # List of variable substitutions # k => dict of v:n entries that it came from - self.subs = {} + self.subs = OrderedDict() self.verbose = True def print_stats(self): @@ -63,11 +64,16 @@ class State(object): def write_state(state, fout): j = { - 'names': dict([(x, None) for x in state.names]), - 'drop_names': list(state.drop_names), - 'base_names': list(state.base_names), - 'subs': dict([(name, values) for name, values in state.subs.items()]), - 'pivots': state.pivots, + 'names': + OrderedDict([(x, None) for x in state.names]), + 'drop_names': + list(state.drop_names), + 'base_names': + list(state.base_names), + 'subs': + OrderedDict([(name, values) for name, values in state.subs.items()]), + 'pivots': + state.pivots, } json.dump(j, fout, sort_keys=True, indent=4, separators=(',', ': ')) @@ -89,7 +95,7 @@ def Anp2matrix(Anp): def row_np2ds(rownp, names): - ret = {} + ret = OrderedDict() assert len(rownp) == len(names), (len(rownp), len(names)) for namei, name in enumerate(names): v = rownp[namei] @@ -102,7 +108,7 @@ def row_sym2dsf(rowsym, names): '''Convert a sympy row into a dictionary of keys to (numerator, denominator) tuples''' from sympy import fraction - ret = {} + ret = OrderedDict() assert len(rowsym) == len(names), (len(rowsym), len(names)) for namei, name in enumerate(names): v = rowsym[namei] @@ -145,7 +151,7 @@ def state_rref(state, verbose=False): print('rref') sympy.pprint(rref) - state.pivots = {} + state.pivots = OrderedDict() def row_solved(rowsym, row_pivot): for ci, c in enumerate(rowsym): @@ -177,7 +183,7 @@ def state_rref(state, verbose=False): state.names.add(group_name) # Remove substituted variables # Note: variables may appear multiple times - state.names.difference_update(set(rowdsf.keys())) + state.names.difference_update(OrderedSet(rowdsf.keys())) pivot_name = names[row_pivot] state.pivots[group_name] = pivot_name if verbose: diff --git a/fuzzers/007-timing/solve_linprog.py b/fuzzers/007-timing/solve_linprog.py index 02ed7396..0825bac3 100644 --- a/fuzzers/007-timing/solve_linprog.py +++ b/fuzzers/007-timing/solve_linprog.py @@ -192,7 +192,8 @@ def main(): parser.add_argument('--verbose', action='store_true', help='') parser.add_argument('--massage', action='store_true', help='') - parser.add_argument('--bounds-csv', help='Previous solve result starting point') + parser.add_argument( + '--bounds-csv', help='Previous solve result starting point') parser.add_argument( '--sub-json', help='Group substitutions to make fully ranked') parser.add_argument('--corner', required=True, default="slow_max", help='') diff --git a/fuzzers/007-timing/test_zero/process.py b/fuzzers/007-timing/test_zero/process.py index 21d322eb..3b2ce10c 100644 --- a/fuzzers/007-timing/test_zero/process.py +++ b/fuzzers/007-timing/test_zero/process.py @@ -7,7 +7,6 @@ import json def run_types(tilej, verbose=False): - def process(etype): # dict[model] = set((tile, wire/pip)) zeros = {} @@ -19,7 +18,9 @@ def run_types(tilej, verbose=False): zeros.setdefault(emodel, set()).add((tilek, ename)) # Print out delay model instances print('%s ZERO types: %u, %s' % (etype, len(zeros), zeros.keys())) - print('%s ZERO instances: %u' % (etype, sum([len(x) for x in zeros.values()]))) + print( + '%s ZERO instances: %u' % + (etype, sum([len(x) for x in zeros.values()]))) for model in sorted(zeros.keys()): modelv = zeros[model] print('Model: %s' % model) @@ -30,8 +31,8 @@ def run_types(tilej, verbose=False): print('') process('pips') -def run_prefix(tilej, verbose=False): +def run_prefix(tilej, verbose=False): def process(etype): prefixes = set() print('Processing %s' % etype) @@ -48,6 +49,7 @@ def run_prefix(tilej, verbose=False): print('') process('pips') + def run(fnin, verbose=False): tilej = json.load((open(fnin, 'r'))) run_types(tilej) @@ -55,11 +57,16 @@ def run(fnin, verbose=False): print('') run_prefix(tilej) + def main(): import argparse parser = argparse.ArgumentParser(description='Solve timing solution') - parser.add_argument('fnin', default="../timgrid/build/timgrid-s.json", nargs='?', help='input timgrid JSON') + parser.add_argument( + 'fnin', + default="../timgrid/build/timgrid-s.json", + nargs='?', + help='input timgrid JSON') args = parser.parse_args() run(args.fnin, verbose=False) diff --git a/fuzzers/007-timing/timfuz.py b/fuzzers/007-timing/timfuz.py index 581de3a0..a3bc3a1e 100644 --- a/fuzzers/007-timing/timfuz.py +++ b/fuzzers/007-timing/timfuz.py @@ -13,10 +13,76 @@ import sys import random import glob from fractions import Fraction +import collections from benchmark import Benchmark -NAME_ZERO = set( + +# Equations are filtered out until nothing is left +class SimplifiedToZero(Exception): + pass + + +# http://code.activestate.com/recipes/576694/ +class OrderedSet(collections.MutableSet): + def __init__(self, iterable=None): + self.end = end = [] + end += [None, end, end] # sentinel node for doubly linked list + self.map = {} # key --> [key, prev, next] + if iterable is not None: + self |= iterable + + def __len__(self): + return len(self.map) + + def __contains__(self, key): + return key in self.map + + def add(self, key): + if key not in self.map: + end = self.end + curr = end[1] + curr[2] = end[1] = self.map[key] = [key, curr, end] + + def discard(self, key): + if key in self.map: + key, prev, next = self.map.pop(key) + prev[2] = next + next[1] = prev + + def __iter__(self): + end = self.end + curr = end[2] + while curr is not end: + yield curr[0] + curr = curr[2] + + def __reversed__(self): + end = self.end + curr = end[1] + while curr is not end: + yield curr[0] + curr = curr[1] + + def pop(self, last=True): + if not self: + raise KeyError('set is empty') + key = self.end[1][0] if last else self.end[2][0] + self.discard(key) + return key + + def __repr__(self): + if not self: + return '%s()' % (self.__class__.__name__, ) + return '%s(%r)' % (self.__class__.__name__, list(self)) + + def __eq__(self, other): + if isinstance(other, OrderedSet): + return len(self) == len(other) and list(self) == list(other) + return set(self) == set(other) + + +NAME_ZERO = OrderedSet( [ "BSW_CLK_ZERO", "BSW_ZERO", @@ -40,11 +106,6 @@ corner_s2i = OrderedDict( ]) -# Equations are filtered out until nothing is left -class SimplifiedToZero(Exception): - pass - - def allow_zero_eqns(): return os.getenv('ALLOW_ZERO_EQN', 'N') == 'Y' @@ -148,7 +209,7 @@ def check_feasible(A_ub, b_ub): def Ab_ub_dt2d(eqns): '''Convert dict using the rows as keys into a list of dicts + b_ub list (ie return A_ub, b_ub)''' #return [dict(rowt) for rowt in eqns] - rows = [(dict(rowt), b) for rowt, b in eqns.items()] + rows = [(OrderedDict(rowt), b) for rowt, b in eqns.items()] A_ubd, b_ub = zip(*rows) return list(A_ubd), list(b_ub) @@ -411,7 +472,7 @@ def derive_eq_by_col(A_ubd, b_ub, verbose=0): b_ub[row_refi] /= v knowns[k] = b_ub[row_refi] print(' done') - #knowns_set = set(knowns.keys()) + #knowns_set = OrderedSet(knowns.keys()) print('%d constrained' % len(knowns)) ''' Now see what we can do @@ -648,7 +709,7 @@ def loadc_Ads_raw(fns): def index_names(Ads): - names = set() + names = OrderedSet() for row_ds in Ads: for k1 in row_ds.keys(): names.add(k1) @@ -740,7 +801,7 @@ def run_sub_json(Ads, sub_json, strict=False, verbose=False): ncols_new = 0 print('Subbing %u rows' % len(Ads)) - prints = set() + prints = OrderedSet() for rowi, row in enumerate(Ads): if 0 and verbose: From 6321be7857fcc43b563a46dbdd75b3ac2a359b11 Mon Sep 17 00:00:00 2001 From: John McMaster Date: Wed, 19 Sep 2018 12:05:40 -0700 Subject: [PATCH 06/10] timfuz: make csv_group2flat deterministic Signed-off-by: John McMaster --- fuzzers/007-timing/csv_group2flat.py | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/fuzzers/007-timing/csv_group2flat.py b/fuzzers/007-timing/csv_group2flat.py index 403b2991..a80c11ac 100644 --- a/fuzzers/007-timing/csv_group2flat.py +++ b/fuzzers/007-timing/csv_group2flat.py @@ -39,18 +39,10 @@ def gen_flat(fns_in, sub_json, corner=None): yield zero, zero_row -def run(fns_in, fnout, sub_json, corner=None, sort=False, verbose=False): - ''' - if sort: - sortf = sorted - else: - sortf = lambda x: x - ''' - +def run(fns_in, fnout, sub_json, corner=None, verbose=False): with open(fnout, 'w') as fout: fout.write('ico,fast_max fast_min slow_max slow_min,rows...\n') - #for name, corners in sortf(gen_flat(fnin, sub_json)): - for name, corners in gen_flat(fns_in, sub_json, corner=corner): + for name, corners in sorted(list(gen_flat(fns_in, sub_json, corner=corner))): row_ico = 1 items = [str(row_ico), corners2csv(corners)] items.append('%u %s' % (1, name)) @@ -64,7 +56,6 @@ def main(): description='Substitute .csv to ungroup correlated variables') parser.add_argument('--verbose', action='store_true', help='') - #parser.add_argument('--sort', action='store_true', help='') parser.add_argument('--sub-csv', help='') parser.add_argument( '--sub-json', @@ -87,7 +78,6 @@ def main(): args.fns_in, args.out, sub_json=sub_json, - #sort=args.sort, verbose=args.verbose, corner=args.corner) finally: From c2c996c706cb85604e2d5f2d992f1f3dbed0f9cf Mon Sep 17 00:00:00 2001 From: John McMaster Date: Wed, 19 Sep 2018 12:11:21 -0700 Subject: [PATCH 07/10] timfuz: delete dead code Signed-off-by: John McMaster --- fuzzers/007-timing/rref.py | 22 +--------------------- 1 file changed, 1 insertion(+), 21 deletions(-) diff --git a/fuzzers/007-timing/rref.py b/fuzzers/007-timing/rref.py index f3169b64..1c665dd0 100644 --- a/fuzzers/007-timing/rref.py +++ b/fuzzers/007-timing/rref.py @@ -78,22 +78,6 @@ def write_state(state, fout): json.dump(j, fout, sort_keys=True, indent=4, separators=(',', ': ')) -def Anp2matrix(Anp): - ''' - Original idea was to make into a square matrix - but this loses too much information - so now this actually isn't doing anything and should probably be eliminated - ''' - - ncols = len(Anp[0]) - A_ub2 = [np.zeros(ncols) for _i in range(ncols)] - dst_rowi = 0 - for rownp in Anp: - A_ub2[dst_rowi] = np.add(A_ub2[dst_rowi], rownp) - dst_rowi = (dst_rowi + 1) % ncols - return A_ub2 - - def row_np2ds(rownp, names): ret = OrderedDict() assert len(rownp) == len(names), (len(rownp), len(names)) @@ -123,11 +107,7 @@ def state_rref(state, verbose=False): names, Anp = A_ds2np(state.Ads) print('np: %u rows x %u cols' % (len(Anp), len(Anp[0]))) - if 0: - print('Combining rows into matrix') - mnp = Anp2matrix(Anp) - else: - mnp = Anp + mnp = Anp print('Matrix: %u rows x %u cols' % (len(mnp), len(mnp[0]))) print('Converting np to sympy matrix') mfrac = fracm_quick(mnp) From 06d47dcb5e5509a7751ad91cdcedbc3ece4dc5b7 Mon Sep 17 00:00:00 2001 From: John McMaster Date: Wed, 19 Sep 2018 13:45:13 -0700 Subject: [PATCH 08/10] timfuz: handle zeros more concretely Signed-off-by: John McMaster --- fuzzers/007-timing/csv_group2flat.py | 30 +++++++++++++------ fuzzers/007-timing/rref.py | 43 ++++++++++++++++++++-------- fuzzers/007-timing/solve_leastsq.py | 43 +--------------------------- fuzzers/007-timing/solve_linprog.py | 42 ++------------------------- fuzzers/007-timing/timfuz.py | 7 ++++- fuzzers/007-timing/timfuz_solve.py | 41 +++++++++++++++++++++++++- 6 files changed, 101 insertions(+), 105 deletions(-) diff --git a/fuzzers/007-timing/csv_group2flat.py b/fuzzers/007-timing/csv_group2flat.py index a80c11ac..a9bd79ac 100644 --- a/fuzzers/007-timing/csv_group2flat.py +++ b/fuzzers/007-timing/csv_group2flat.py @@ -6,36 +6,48 @@ from timfuz import Benchmark, loadc_Ads_bs, load_sub, Ads2bounds, corners2csv, c def gen_flat(fns_in, sub_json, corner=None): Ads, bs = loadc_Ads_bs(fns_in, ico=True) bounds = Ads2bounds(Ads, bs) - zeros = set() + # Elements with zero delay assigned due to sub group + group_zeros = set() + # Elements with a concrete delay nonzeros = set() + if corner: + zero_row = [None, None, None, None] + zero_row[corner_s2i[corner]] = 0 + else: + zero_row = None + for bound_name, bound_bs in bounds.items(): sub = sub_json['subs'].get(bound_name, None) - if sub: + if bound_name in sub_json['zero_names']: + if zero_row: + yield bound_name, 0 + elif sub: + print('sub', sub) # put entire delay into pivot pivot = sub_json['pivots'][bound_name] - assert pivot not in zeros + assert pivot not in group_zeros nonzeros.add(pivot) non_pivot = set(sub.keys() - set([pivot])) #for name in non_pivot: # assert name not in nonzeros, (pivot, name, nonzeros) - zeros.update(non_pivot) + group_zeros.update(non_pivot) yield pivot, bound_bs else: nonzeros.add(bound_name) yield bound_name, bound_bs # non-pivots can appear multiple times, but they should always be zero # however, due to substitution limitations, just warn - violations = zeros.intersection(nonzeros) + violations = group_zeros.intersection(nonzeros) if len(violations): print('WARNING: %s non-0 non-pivot' % (len(violations))) # XXX: how to best handle these? # should they be fixed 0? - if corner: - zero_row = [None, None, None, None] - zero_row[corner_s2i[corner]] = 0 - for zero in zeros - violations: + if zero_row: + print('zero_row', len(group_zeros), len(violations)) + for zero in group_zeros - violations: + print('zero', zero) yield zero, zero_row diff --git a/fuzzers/007-timing/rref.py b/fuzzers/007-timing/rref.py index 1c665dd0..18a60a67 100644 --- a/fuzzers/007-timing/rref.py +++ b/fuzzers/007-timing/rref.py @@ -9,6 +9,20 @@ import sympy from collections import OrderedDict from fractions import Fraction +def rm_zero_cols(Ads, verbose=True): + removed = OrderedSet() + + print('Removing ZERO elements') + for row_ds in Ads: + for k in set(row_ds.keys()): + if k in removed: + del row_ds[k] + elif k.find('ZERO') >= 0: + del row_ds[k] + removed.add(k) + if verbose: + print(' Removing %s' % k) + return removed def fracr_quick(r): return [Fraction(numerator=int(x), denominator=1) for x in r] @@ -20,14 +34,13 @@ def fracm_quick(m): print('fracm_quick type: %s' % t) return [fracr_quick(r) for r in m] - class State(object): - def __init__(self, Ads, drop_names=[]): + def __init__(self, Ads, zero_names=[]): self.Ads = Ads self.names = index_names(self.Ads) # known zero delay elements - self.drop_names = OrderedSet(drop_names) + self.zero_names = OrderedSet(zero_names) # active names in rows # includes sub variables, excludes variables that have been substituted out self.base_names = OrderedSet(self.names) @@ -46,30 +59,34 @@ class State(object): " Largest: %u" % max([len(x) for x in self.subs.values()])) print(" Rows: %u" % len(self.Ads)) print( - " Cols (in): %u" % (len(self.base_names) + len(self.drop_names))) + " Cols (in): %u" % (len(self.base_names) + len(self.zero_names))) print(" Cols (preprocessed): %u" % len(self.base_names)) - print(" Drop names: %u" % len(self.drop_names)) + print(" ZERO names: %u" % len(self.zero_names)) print(" Cols (out): %u" % len(self.names)) print(" Solvable vars: %u" % len(self.names & self.base_names)) assert len(self.names) >= len(self.subs) @staticmethod - def load(fn_ins, simplify=False, corner=None): + def load(fn_ins, simplify=False, corner=None, rm_zero=False): + zero_names = OrderedSet() + Ads, b = loadc_Ads_b(fn_ins, corner=corner, ico=True) + if rm_zero: + zero_names = rm_zero_cols(Ads) if simplify: print('Simplifying corner %s' % (corner, )) Ads, b = simplify_rows(Ads, b, remove_zd=False, corner=corner) - return State(Ads) + return State(Ads, zero_names=zero_names) def write_state(state, fout): j = { 'names': OrderedDict([(x, None) for x in state.names]), - 'drop_names': - list(state.drop_names), + 'zero_names': + sorted(list(state.zero_names)), 'base_names': - list(state.base_names), + sorted(list(state.base_names)), 'subs': OrderedDict([(name, values) for name, values in state.subs.items()]), 'pivots': @@ -172,11 +189,11 @@ def state_rref(state, verbose=False): return state -def run(fnout, fn_ins, simplify=False, corner=None, verbose=0): +def run(fnout, fn_ins, simplify=False, corner=None, rm_zero=False, verbose=0): print('Loading data') assert len(fn_ins) > 0 - state = State.load(fn_ins, simplify=simplify, corner=corner) + state = State.load(fn_ins, simplify=simplify, corner=corner, rm_zero=rm_zero) state_rref(state, verbose=verbose) state.print_stats() if fnout: @@ -195,6 +212,7 @@ def main(): parser.add_argument('--verbose', action='store_true', help='') parser.add_argument('--simplify', action='store_true', help='') parser.add_argument('--corner', default="slow_max", help='') + parser.add_argument('--rm-zero', action='store_true', help='Remove ZERO elements') parser.add_argument( '--speed-json', default='build_speed/speed.json', @@ -214,6 +232,7 @@ def main(): fn_ins=fns_in, simplify=args.simplify, corner=args.corner, + rm_zero=args.rm_zero, verbose=args.verbose) finally: print('Exiting after %s' % bench) diff --git a/fuzzers/007-timing/solve_leastsq.py b/fuzzers/007-timing/solve_leastsq.py index db22b168..953c4dcf 100644 --- a/fuzzers/007-timing/solve_leastsq.py +++ b/fuzzers/007-timing/solve_leastsq.py @@ -30,47 +30,6 @@ def mkestimate(Anp, b): return x0 -def save(outfn, xvals, names, corner): - # ballpark minimum actual observed delay is around 7 (carry chain) - # anything less than one is probably a solver artifact - delta = 0.5 - corneri = corner_s2i[corner] - - # Round conservatively - roundf = { - 'fast_max': math.ceil, - 'fast_min': math.floor, - 'slow_max': math.ceil, - 'slow_min': math.floor, - }[corner] - - print('Writing results') - skips = 0 - keeps = 0 - with open(outfn, 'w') as fout: - # write as one variable per line - # this natively forms a bound if fed into linprog solver - fout.write('ico,fast_max fast_min slow_max slow_min,rows...\n') - for xval, name in zip(xvals, names): - row_ico = 1 - - # also review ceil vs floor choice for min vs max - # lets be more conservative for now - if xval < delta: - #print('Skipping %s: %0.6f' % (name, xval)) - skips += 1 - continue - keeps += 1 - #xvali = round(xval) - - items = [str(row_ico), acorner2csv(roundf(xval), corneri)] - items.append('%u %s' % (1, name)) - fout.write(','.join(items) + '\n') - print( - 'Wrote: skip %u => %u / %u valid delays' % (skips, keeps, len(names))) - assert keeps, 'Failed to estimate delay' - - def run_corner( Anp, b, names, corner, verbose=False, opts={}, meta={}, outfn=None): # Given timing scores for above delays (-ps) @@ -140,7 +99,7 @@ def run_corner( print('Done') if outfn: - save(outfn, res.x, names, corner) + timfuz_solve.solve_save(outfn, res.x, names, corner, verbose=verbose) def main(): diff --git a/fuzzers/007-timing/solve_linprog.py b/fuzzers/007-timing/solve_linprog.py index 0825bac3..e3e99c18 100644 --- a/fuzzers/007-timing/solve_linprog.py +++ b/fuzzers/007-timing/solve_linprog.py @@ -12,50 +12,12 @@ import time import timfuz_solve -def save(outfn, xvals, names, corner): - # ballpark minimum actual observed delay is around 7 (carry chain) - # anything less than one is probably a solver artifact - delta = 0.5 - corneri = corner_s2i[corner] - - roundf = { - 'fast_max': math.ceil, - 'fast_min': math.floor, - 'slow_max': math.ceil, - 'slow_min': math.floor, - }[corner] - - print('Writing results') - zeros = 0 - with open(outfn, 'w') as fout: - # write as one variable per line - # this natively forms a bound if fed into linprog solver - fout.write('ico,fast_max fast_min slow_max slow_min,rows...\n') - for xval, name in zip(xvals, names): - row_ico = 1 - - # FIXME: only report for the given corner? - # also review ceil vs floor choice for min vs max - # lets be more conservative for now - if xval < delta: - print('WARNING: near 0 delay on %s: %0.6f' % (name, xval)) - zeros += 1 - #continue - items = [str(row_ico), acorner2csv(roundf(xval), corneri)] - items.append('%u %s' % (1, name)) - fout.write(','.join(items) + '\n') - nonzeros = len(names) - zeros - print( - 'Wrote: %u / %u constrained delays, %u zeros' % - (nonzeros, len(names), zeros)) - - def run_corner( Anp, b, names, corner, verbose=False, opts={}, meta={}, outfn=None): if len(Anp) == 0: print('WARNING: zero equations') if outfn: - save(outfn, [], [], corner) + timfuz_solve.solve_save(outfn, [], [], corner) return maxcorner = { 'slow_max': True, @@ -180,7 +142,7 @@ def run_corner( print('Delay on %d / %d' % (nonzeros, len(res.x))) if outfn: - save(outfn, res.x, names, corner) + timfuz_solve.solve_save(outfn, res.x, names, corner, verbose=verbose) def main(): diff --git a/fuzzers/007-timing/timfuz.py b/fuzzers/007-timing/timfuz.py index a3bc3a1e..540c4abc 100644 --- a/fuzzers/007-timing/timfuz.py +++ b/fuzzers/007-timing/timfuz.py @@ -107,6 +107,7 @@ corner_s2i = OrderedDict( def allow_zero_eqns(): + '''If true, allow a system of equations with no equations''' return os.getenv('ALLOW_ZERO_EQN', 'N') == 'Y' @@ -732,7 +733,7 @@ def row_sub_vars(row, sub_json, strict=False, verbose=False): print(row.items()) delvars = 0 - for k in sub_json['drop_names']: + for k in sub_json['zero_names']: try: del row[k] delvars += 1 @@ -908,3 +909,7 @@ def tilej_stats(tilej): print( ' %s: %u / %u solved, %u / %u covered' % (etype, solved, net, covered, net)) + +def load_bounds(bounds_csv, corner, ico=True): + Ads, b = loadc_Ads_b([bounds_csv], corner, ico=ico) + return Ads2bounds(Ads, b) diff --git a/fuzzers/007-timing/timfuz_solve.py b/fuzzers/007-timing/timfuz_solve.py index de1c5812..b3c43cfb 100644 --- a/fuzzers/007-timing/timfuz_solve.py +++ b/fuzzers/007-timing/timfuz_solve.py @@ -1,9 +1,10 @@ #!/usr/bin/env python3 -from timfuz import simplify_rows, loadc_Ads_b, index_names, A_ds2np, run_sub_json, print_eqns, Ads2bounds, instances, SimplifiedToZero, allow_zero_eqns +from timfuz import simplify_rows, loadc_Ads_b, index_names, A_ds2np, run_sub_json, print_eqns, Ads2bounds, instances, SimplifiedToZero, allow_zero_eqns, corner_s2i, acorner2csv from timfuz_massage import massage_equations import numpy as np import sys +import math def check_feasible(A_ub, b_ub): @@ -93,6 +94,44 @@ def filter_bounds(Ads, b, bounds, corner): return ret_Ads, ret_b +def solve_save(outfn, xvals, names, corner, save_zero=True, verbose=False): + # ballpark minimum actual observed delay is around 7 (carry chain) + # anything less than one is probably a solver artifact + delta = 0.5 + corneri = corner_s2i[corner] + + roundf = { + 'fast_max': math.ceil, + 'fast_min': math.floor, + 'slow_max': math.ceil, + 'slow_min': math.floor, + }[corner] + + print('Writing results') + zeros = 0 + with open(outfn, 'w') as fout: + # write as one variable per line + # this natively forms a bound if fed into linprog solver + fout.write('ico,fast_max fast_min slow_max slow_min,rows...\n') + for xval, name in zip(xvals, names): + row_ico = 1 + + if xval < delta: + if verbose: + print('WARNING: near 0 delay on %s: %0.6f' % (name, xval)) + zeros += 1 + if not save_zero: + continue + items = [str(row_ico), acorner2csv(roundf(xval), corneri)] + items.append('%u %s' % (1, name)) + fout.write(','.join(items) + '\n') + nonzeros = len(names) - zeros + print( + 'Wrote: %u / %u constrained delays, %u zeros' % + (nonzeros, len(names), zeros)) + assert nonzeros, 'Failed to estimate delay' + + def run( fns_in, corner, From 7fd5e77935387d95ac17c947c0f7879883ab20d9 Mon Sep 17 00:00:00 2001 From: John McMaster Date: Wed, 19 Sep 2018 13:49:23 -0700 Subject: [PATCH 09/10] timfuz: fix ZERO removal support Signed-off-by: John McMaster --- fuzzers/007-timing/csv_group2flat.py | 6 ++++-- fuzzers/007-timing/rref.py | 9 +++++++-- fuzzers/007-timing/solve_linprog.py | 3 ++- fuzzers/007-timing/timfuz.py | 1 + 4 files changed, 14 insertions(+), 5 deletions(-) diff --git a/fuzzers/007-timing/csv_group2flat.py b/fuzzers/007-timing/csv_group2flat.py index a9bd79ac..da0251dd 100644 --- a/fuzzers/007-timing/csv_group2flat.py +++ b/fuzzers/007-timing/csv_group2flat.py @@ -46,15 +46,17 @@ def gen_flat(fns_in, sub_json, corner=None): # should they be fixed 0? if zero_row: print('zero_row', len(group_zeros), len(violations)) + for zero in sub_json['zero_names']: + yield zero, zero_row for zero in group_zeros - violations: - print('zero', zero) yield zero, zero_row def run(fns_in, fnout, sub_json, corner=None, verbose=False): with open(fnout, 'w') as fout: fout.write('ico,fast_max fast_min slow_max slow_min,rows...\n') - for name, corners in sorted(list(gen_flat(fns_in, sub_json, corner=corner))): + for name, corners in sorted(list(gen_flat(fns_in, sub_json, + corner=corner))): row_ico = 1 items = [str(row_ico), corners2csv(corners)] items.append('%u %s' % (1, name)) diff --git a/fuzzers/007-timing/rref.py b/fuzzers/007-timing/rref.py index 18a60a67..be13c805 100644 --- a/fuzzers/007-timing/rref.py +++ b/fuzzers/007-timing/rref.py @@ -9,6 +9,7 @@ import sympy from collections import OrderedDict from fractions import Fraction + def rm_zero_cols(Ads, verbose=True): removed = OrderedSet() @@ -24,6 +25,7 @@ def rm_zero_cols(Ads, verbose=True): print(' Removing %s' % k) return removed + def fracr_quick(r): return [Fraction(numerator=int(x), denominator=1) for x in r] @@ -34,6 +36,7 @@ def fracm_quick(m): print('fracm_quick type: %s' % t) return [fracr_quick(r) for r in m] + class State(object): def __init__(self, Ads, zero_names=[]): self.Ads = Ads @@ -193,7 +196,8 @@ def run(fnout, fn_ins, simplify=False, corner=None, rm_zero=False, verbose=0): print('Loading data') assert len(fn_ins) > 0 - state = State.load(fn_ins, simplify=simplify, corner=corner, rm_zero=rm_zero) + state = State.load( + fn_ins, simplify=simplify, corner=corner, rm_zero=rm_zero) state_rref(state, verbose=verbose) state.print_stats() if fnout: @@ -212,7 +216,8 @@ def main(): parser.add_argument('--verbose', action='store_true', help='') parser.add_argument('--simplify', action='store_true', help='') parser.add_argument('--corner', default="slow_max", help='') - parser.add_argument('--rm-zero', action='store_true', help='Remove ZERO elements') + parser.add_argument( + '--rm-zero', action='store_true', help='Remove ZERO elements') parser.add_argument( '--speed-json', default='build_speed/speed.json', diff --git a/fuzzers/007-timing/solve_linprog.py b/fuzzers/007-timing/solve_linprog.py index e3e99c18..0bb0dd6f 100644 --- a/fuzzers/007-timing/solve_linprog.py +++ b/fuzzers/007-timing/solve_linprog.py @@ -142,7 +142,8 @@ def run_corner( print('Delay on %d / %d' % (nonzeros, len(res.x))) if outfn: - timfuz_solve.solve_save(outfn, res.x, names, corner, verbose=verbose) + timfuz_solve.solve_save( + outfn, res.x, names, corner, verbose=verbose) def main(): diff --git a/fuzzers/007-timing/timfuz.py b/fuzzers/007-timing/timfuz.py index 540c4abc..eb9d2530 100644 --- a/fuzzers/007-timing/timfuz.py +++ b/fuzzers/007-timing/timfuz.py @@ -910,6 +910,7 @@ def tilej_stats(tilej): ' %s: %u / %u solved, %u / %u covered' % (etype, solved, net, covered, net)) + def load_bounds(bounds_csv, corner, ico=True): Ads, b = loadc_Ads_b([bounds_csv], corner, ico=ico) return Ads2bounds(Ads, b) From 0ddd9fd33c16ef97c63a695ad4fa7dfa2b12dd9f Mon Sep 17 00:00:00 2001 From: John McMaster Date: Wed, 19 Sep 2018 13:49:36 -0700 Subject: [PATCH 10/10] timfuz: QoR checker Signed-off-by: John McMaster --- fuzzers/007-timing/solve_qor.py | 60 +++++++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) create mode 100644 fuzzers/007-timing/solve_qor.py diff --git a/fuzzers/007-timing/solve_qor.py b/fuzzers/007-timing/solve_qor.py new file mode 100644 index 00000000..d3bb1eea --- /dev/null +++ b/fuzzers/007-timing/solve_qor.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python3 + +from timfuz import Benchmark, load_sub, load_bounds, loadc_Ads_b +import numpy as np + + +def run(fns_in, corner, bounds_csv, verbose=False): + print('Loading data') + Ads, borig = loadc_Ads_b(fns_in, corner, ico=True) + + bounds = load_bounds(bounds_csv, corner) + # verify is flattened + for k in bounds.keys(): + assert 'GROUP_' not in k, 'Must operate on flattened bounds' + + # compute our timing model delay at the given corner + bgots = [] + for row_ds in Ads: + delays = [n * bounds[x] for x, n in row_ds.items()] + bgots.append(sum(delays)) + + ses = (np.asarray(bgots) - np.asarray(borig))**2 + mse = (ses).mean(axis=None) + print('MSE aggregate: %0.1f' % mse) + print('Min SE: %0.1f' % min(ses)) + print('Max SE: %0.1f' % max(ses)) + + +def main(): + import argparse + + parser = argparse.ArgumentParser(description='Report a timing fit score') + + parser.add_argument('--verbose', action='store_true', help='') + parser.add_argument('--corner', required=True, default="slow_max", help='') + parser.add_argument( + '--bounds-csv', + required=True, + help='Previous solve result starting point') + parser.add_argument('fns_in', nargs='+', help='timing3.csv input files') + args = parser.parse_args() + # Store options in dict to ease passing through functions + bench = Benchmark() + + fns_in = args.fns_in + if not fns_in: + fns_in = glob.glob('specimen_*/timing3.csv') + + try: + run( + fns_in=fns_in, + corner=args.corner, + bounds_csv=args.bounds_csv, + verbose=args.verbose) + finally: + print('Exiting after %s' % bench) + + +if __name__ == '__main__': + main()