Merge pull request #899 from antmicro/routing-bels

Fuzzers: 007-timing: add routing BELs fuzzer
This commit is contained in:
Karol Gugala 2019-07-03 05:36:28 +02:00 committed by GitHub
commit 219f0f0175
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 235 additions and 20 deletions

2
fuzzers/007-timing/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
*.json
sdfs

View File

@ -1,10 +1,30 @@
SLICEL_SDFS = bel/build/CLBLL_L.sdf bel/build/CLBLL_R.sdf bel/build/CLBLM_L.sdf bel/build/CLBLM_R.sdf routing-bels/build/slicel.sdf
SLICEM_SDFS = bel/build/CLBLL_L.sdf bel/build/CLBLL_R.sdf bel/build/CLBLM_L.sdf bel/build/CLBLM_R.sdf routing-bels/build/slicem.sdf
run: all
all: bel/build/sdf
touch run.ok
all: pushdb
clean:
cd bel && $(MAKE) clean
cd routing-bels && $(MAKE) clean
bel/build/sdf:
bel/build/sdf.ok:
cd bel && $(MAKE)
routing-bels/build/sdf:
cd routing-bels && $(MAKE)
mergesdfs: bel/build/sdf.ok routing-bels/build/sdf
mkdir -p sdfs
python3 ${XRAY_UTILS_DIR}/sdfmerge.py --sdfs $(SLICEM_SDFS) --site SLICEM --out sdfs/slicem.sdf
python3 ${XRAY_UTILS_DIR}/sdfmerge.py --sdfs $(SLICEL_SDFS) --site SLICEL --out sdfs/slicel.sdf --json debu.json
cp bel/build/*.sdf sdfs
pushdb: mergesdfs
mkdir -p ${XRAY_DATABASE_DIR}/${XRAY_DATABASE}/timings
cp sdfs/*.sdf ${XRAY_DATABASE_DIR}/${XRAY_DATABASE}/timings
touch run.ok
cleandb:
rm -rf ${XRAY_DATABASE_DIR}/${XRAY_DATABASE}/timings

View File

@ -1,12 +1,12 @@
all: pushdb
all: build/sdf.ok
clean:
rm -rf build
build/bel_timings.txt:
build/run.ok:
bash runme.sh
build/fixup_timings: build/bel_timings.txt
build/fixup_timings: build/run.ok
python3 fixup_timings_txt.py --txtin build/bel_timings.txt --txtout build/bel_timings.txt --site RAMBFIFO36E1 --slice BRAM_L --type timings
python3 fixup_timings_txt.py --txtin build/bel_timings.txt --txtout build/bel_timings.txt --site RAMBFIFO36E1 --slice BRAM_R --type timings
python3 fixup_timings_txt.py --txtin build/bel_pins.txt --txtout build/bel_pins.txt --site RAMBFIFO36E1 --slice BRAM_L --type pins
@ -17,17 +17,7 @@ build/fixup_timings: build/bel_timings.txt
build/bel_timings.json: build/fixup_timings
python3 tim2json.py --timings=build/bel_timings.txt --json=build/bel_timings.json --properties=build/bel_properties.txt --propertiesmap=properties_map.json --pinaliasmap=pin_alias_map.json --belpins=build/bel_pins.txt --sitepins=build/tile_pins.txt --debug true
build/sdf: build/bel_timings.json
build/sdf.ok: build/bel_timings.json
python3 ${XRAY_UTILS_DIR}/makesdf.py --json=${PWD}/build/bel_timings.json --sdf=${PWD}/build
touch build/sdf.ok
cleandb:
rm -rf ${XRAY_DATABASE_DIR}/${XRAY_DATABASE}/timings
mergesdfs: build/sdf
python3 ${XRAY_UTILS_DIR}/sdfmerge.py --sdfs build/CLBLL_L.sdf build/CLBLL_R.sdf build/CLBLM_L.sdf build/CLBLM_R.sdf --site SLICEM --out build/slicem.sdf
python3 ${XRAY_UTILS_DIR}/sdfmerge.py --sdfs build/CLBLL_L.sdf build/CLBLL_R.sdf build/CLBLM_L.sdf build/CLBLM_R.sdf --site SLICEL --out build/slicel.sdf
pushdb: mergesdfs
mkdir -p ${XRAY_DATABASE_DIR}/${XRAY_DATABASE}/timings
cp build/*sdf ${XRAY_DATABASE_DIR}/${XRAY_DATABASE}/timings
touch run.ok

View File

@ -0,0 +1 @@
*json

View File

@ -0,0 +1,17 @@
all: build/slicel.sdf build/slicem.sdf
clean:
rm -rf build
build/slicel.sdf: build/run.ok
python3 tim2sdf.py --timings build/slicel.txt --site SLICEL --sdf build/slicel.sdf
build/slicem.sdf: build/run.ok
python3 tim2sdf.py --timings build/slicem.txt --site SLICEM --sdf build/slicem.sdf
build/run.ok:
bash runme.sh
cleandb:
rm -rf ${XRAY_DATABASE_DIR}/${XRAY_DATABASE}/timings

View File

@ -0,0 +1,13 @@
#!/bin/bash
set -ex
# Create build dir
export SRC_DIR=$PWD
export BUILD_DIR=build
mkdir -p $BUILD_DIR
cd $BUILD_DIR
${XRAY_VIVADO} -mode batch -source $SRC_DIR/runme.tcl
test -z "$(fgrep CRITICAL vivado.log)" && touch run.ok

View File

@ -0,0 +1,62 @@
source "$::env(XRAY_DIR)/utils/utils.tcl"
proc create_design {} {
create_project -force -part $::env(XRAY_PART) design design
read_verilog $::env(SRC_DIR)/top.v
synth_design -top top -flatten_hierarchy none
set_property -dict "PACKAGE_PIN $::env(XRAY_PIN_00) IOSTANDARD LVCMOS33" [get_ports di]
set_property -dict "PACKAGE_PIN $::env(XRAY_PIN_01) IOSTANDARD LVCMOS33" [get_ports do]
set_property CFGBVS VCCO [current_design]
set_property CONFIG_VOLTAGE 3.3 [current_design]
set_property BITSTREAM.GENERAL.PERFRAMECRC YES [current_design]
}
proc place_and_route_design {} {
place_design
route_design
write_checkpoint -force design.dcp
}
proc dump_model_timings {timing_fp models} {
set properties [list "DELAY" "FAST_MAX" "FAST_MIN" "SLOW_MAX" "SLOW_MIN"]
foreach model $models {
set timing_line {}
lappend timing_line "$model"
foreach property $properties {
set value [get_property $property [get_speed_models -patterns $model]]
lappend timing_line "$property:$value"
}
puts $timing_fp $timing_line
}
}
proc dump {} {
set slicel_fp [open "slicel.txt" w]
set slicem_fp [open "slicem.txt" w]
set slicel_speed_models [get_speed_models -patterns *_sl_*]
set slicem_speed_models [get_speed_models -patterns *_sm_*]
dump_model_timings $slicel_fp $slicel_speed_models
dump_model_timings $slicem_fp $slicem_speed_models
close $slicel_fp
close $slicem_fp
}
proc run {} {
create_design
place_and_route_design
dump
write_bitstream -force design.bit
}
run

View File

@ -0,0 +1,105 @@
#!/usr/bin/env python3
import argparse
import json
from sdf_timing import sdfparse
from sdf_timing import utils
def generate_sdf(timings, sdffile):
sdf_data = sdfparse.emit(timings, timescale='1ns')
with open(sdffile, 'w') as fp:
fp.write(sdf_data)
def add_timing_paths_entry(paths, type, values):
paths[type] = dict()
paths[type]['min'] = values[0]
paths[type]['avg'] = values[1]
paths[type]['max'] = values[2]
return paths
def read_raw_timings(fin, site):
timings = dict()
timings['cells'] = dict()
with open(fin, "r") as f:
for line in f:
raw_data = line.split()
speed_model = raw_data[0]
if speed_model.startswith('bel_d_'):
speed_model = speed_model[6:]
speed_model_split = speed_model.split('_')
interconn_input = "_".join(speed_model_split[1:-1])
interconn_output = speed_model_split[-1]
celltype = "routing_bel"
if celltype not in timings['cells']:
timings['cells'][celltype] = dict()
cellsite = site + '/' + interconn_output.upper()
if cellsite not in timings['cells'][celltype]:
timings['cells'][celltype][cellsite] = dict()
if speed_model not in timings['cells'][celltype][cellsite]:
timings['cells'][celltype][cellsite][speed_model] = dict()
delays = dict()
# each timing entry reports 5 delays
for d in range(0, 5):
(t, v) = raw_data[d + 1].split(':')
delays[t] = v
# create entry for sdf writer
iport = dict()
iport['port'] = interconn_input
iport['port_edge'] = None
oport = dict()
oport['port'] = interconn_output
oport['port_edge'] = None
paths = dict()
paths = add_timing_paths_entry(
paths, 'slow', [delays['SLOW_MIN'], None, delays['SLOW_MAX']])
paths = add_timing_paths_entry(
paths, 'fast', [delays['FAST_MIN'], None, delays['FAST_MAX']])
if speed_model.endswith('diff'):
iport['port'] = "_".join(speed_model_split[1:])
iport['port_edge'] = None
timings['cells'][celltype][cellsite][
speed_model] = utils.add_device(iport, paths)
else:
timings['cells'][celltype][cellsite][
speed_model] = utils.add_interconnect(iport, oport, paths)
timings['cells'][celltype][cellsite][speed_model][
'is_absolute'] = True
return timings
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--timings', type=str, help='Raw timing input file')
parser.add_argument('--sdf', type=str, help='output sdf file')
parser.add_argument(
'--site', type=str, help='Site of the processed timings')
parser.add_argument(
'--debug', type=bool, default=False, help='Enable debug json dumps')
args = parser.parse_args()
timings = read_raw_timings(args.timings, args.site)
if args.debug:
with open("debug" + args.site + ".json", 'w') as fp:
json.dump(timings, fp, indent=4, sort_keys=True)
generate_sdf(timings, args.sdf)
if __name__ == '__main__':
main()

View File

@ -0,0 +1,5 @@
module top(input di, output do);
assign do = di;
endmodule

@ -1 +1 @@
Subproject commit 95010bcea451ad8a5bf7308a80001d16012ae116
Subproject commit 679152c9e6b92627cffba186618ab98ac29b627b

View File

@ -55,7 +55,7 @@ def main():
timings_list.append(timing)
merged_sdf = merge(timings_list, args.site)
open(args.out, 'w').write(sdfparse.emit(merged_sdf))
open(args.out, 'w').write(sdfparse.emit(merged_sdf, timescale='1ns'))
if args.json is not None:
with open(args.json, 'w') as fp: