Generate tile types, site types, tilegrid, tileconn for entire part.

Signed-off-by: Keith Rothman <537074+litghost@users.noreply.github.com>
This commit is contained in:
Keith Rothman 2018-09-19 14:49:15 -07:00
parent 31013a3e43
commit 85e14f81a1
29 changed files with 2502 additions and 0 deletions

5
.gitignore vendored
View File

@ -3,3 +3,8 @@
# Ignore database directories _except_ for their settings
database/*/*
!database/*/settings.sh
**/specimen_*
**/output
run.ok
__pycache__
*.pyc

View File

@ -0,0 +1,26 @@
N := 1
SPECIMENS := $(addprefix specimen_,$(shell seq -f '%03.0f' $(N)))
SPECIMENS_OK := $(addsuffix /OK,$(SPECIMENS))
database: $(SPECIMENS_OK)
true
pushdb:
true
$(SPECIMENS_OK):
bash generate.sh $(subst /OK,,$@)
touch $@
run:
$(MAKE) clean
$(MAKE) database
$(MAKE) pushdb
touch run.ok
clean:
rm -rf specimen_[0-9][0-9][0-9]/ run.ok
.PHONY: database pushdb run clean

View File

@ -0,0 +1,5 @@
#!/bin/bash -x
source ${XRAY_GENHEADER}
vivado -mode batch -source ../generate.tcl

View File

@ -0,0 +1,19 @@
create_project -force -part $::env(XRAY_PART) design design
set_property design_mode PinPlanning [current_fileset]
open_io_design -name io_1
set downhill_fp [open downhill_wires.txt w]
set uphill_fp [open uphill_wires.txt w]
#set_param tcl.collectionResultDisplayLimit 0
foreach pip [get_pips] {
foreach downhill_node [get_nodes -downhill -of_object $pip] {
set ordered_downhill_wires [get_wires -from $pip -of_object $downhill_node]
puts $downhill_fp "$pip $downhill_node $ordered_downhill_wires"
}
foreach uphill_node [get_nodes -uphill -of_object $pip] {
set ordered_uphill_wires [get_wires -to $pip -of_object $uphill_node]
puts $uphill_fp "$pip $uphill_node $ordered_uphill_wires"
}
}
close $downhill_fp
close $uphill_fp

View File

@ -0,0 +1,26 @@
N := 1
SPECIMENS := $(addprefix specimen_,$(shell seq -f '%03.0f' $(N)))
SPECIMENS_OK := $(addsuffix /OK,$(SPECIMENS))
database: $(SPECIMENS_OK)
true
pushdb:
cp specimen_001/*.csv ${XRAY_DATABASE_DIR}/$(XRAY_DATABASE)/
$(SPECIMENS_OK):
bash generate.sh $(subst /OK,,$@)
touch $@
run:
$(MAKE) clean
$(MAKE) database
$(MAKE) pushdb
touch run.ok
clean:
rm -rf specimen_[0-9][0-9][0-9]/ run.ok
.PHONY: database pushdb run clean

View File

@ -0,0 +1,5 @@
#!/bin/bash -x
source ${XRAY_GENHEADER}
vivado -mode batch -source ../generate.tcl

View File

@ -0,0 +1,27 @@
# This script dumps the count of each major object count for sanity checking.
#
# For large parts, this may take a while, hence why it is a seperate generate
# step.
create_project -force -part $::env(XRAY_PART) design design
set_property design_mode PinPlanning [current_fileset]
open_io_design -name io_1
set fp [open element_counts.csv w]
puts $fp "type,count"
puts $fp "tiles,[llength [get_tiles]]"
set sites [get_sites]
set num_site_pins 0
set num_site_pips 0
puts $fp "sites,[llength $sites]"
foreach site $sites {
set num_site_pins [expr $num_site_pins + [llength [get_site_pins -of_objects $site]]]
set num_site_pips [expr $num_site_pips + [llength [get_site_pips -of_objects $site]]]
}
puts $fp "site_pins,$num_site_pins"
puts $fp "site_pips,$num_site_pips"
puts $fp "pips,[llength [get_pips]]"
puts $fp "package_pins,[llength [get_package_pins]]"
puts $fp "nodes,[llength [get_nodes]]"
puts $fp "wires,[llength [get_wires]]"
close $fp

View File

@ -0,0 +1,30 @@
N := 1
SPECIMENS := $(addprefix specimen_,$(shell seq -f '%03.0f' $(N)))
SPECIMENS_OK := $(addsuffix /OK,$(SPECIMENS))
database: $(SPECIMENS_OK)
true
pushdb:
cp output/tile_type_*.json ${XRAY_DATABASE_DIR}/$(XRAY_DATABASE)/
rm ${XRAY_DATABASE_DIR}/$(XRAY_DATABASE)/tile_type_*_site_type_*.json
cp output/site_type_*.json ${XRAY_DATABASE_DIR}/$(XRAY_DATABASE)/
cp output/tileconn.json ${XRAY_DATABASE_DIR}/$(XRAY_DATABASE)/
cp output/tilegrid.json ${XRAY_DATABASE_DIR}/$(XRAY_DATABASE)/
$(SPECIMENS_OK):
bash generate.sh $(subst /OK,,$@)
touch $@
run:
$(MAKE) clean
$(MAKE) database
$(MAKE) pushdb
touch run.ok
clean:
rm -rf specimen_[0-9][0-9][0-9]/ output/ run.ok
.PHONY: database pushdb run clean

View File

@ -0,0 +1,41 @@
import json
with open('output/error_nodes.json') as f:
flat_error_nodes = json.load(f)
error_nodes = {}
for node, raw_node, generated_nodes in flat_error_nodes:
if node not in error_nodes:
error_nodes[node] = {
'raw_node': set(raw_node),
'generated_nodes': set(),
}
assert error_nodes[node]['raw_node'] == set(raw_node)
error_nodes[node]['generated_nodes'].add(tuple(sorted(generated_nodes)))
for node, error in error_nodes.items():
combined_generated_nodes = set()
for generated_node in error['generated_nodes']:
combined_generated_nodes |= set(generated_node)
assert error['raw_node'] == combined_generated_nodes, (node, error)
good_node = max(error['generated_nodes'], key=lambda x: len(x))
bad_nodes = error['generated_nodes'] - set((good_node,))
if max(len(generated_node) for generated_node in bad_nodes) > 1:
assert False, node
else:
not_pcie = False
for generated_node in bad_nodes:
for wire in generated_node:
if not wire.startswith('PCIE'):
not_pcie = True
if not_pcie:
#print(node, good_node, map(tuple, bad_nodes))
print(repr((node, tuple(map(tuple, bad_nodes)))))
pass
else:
#print(repr((node, map(tuple, bad_nodes))))
pass

View File

@ -0,0 +1,121 @@
""" Tool to cleanup site pins JSON dumps.
This tool has two behaviors. This first is to rename site names from global
coordinates to site local coordinates. The second is remove the tile prefix
from node names.
For example CLBLM_L_X8Y149 contains two sites named SLICE_X10Y149 and
SLICE_X11Y149. SLICE_X10Y149 becomes X0Y0 and SLICE_X11Y149 becomes X1Y0.
"""
from __future__ import print_function
import json
import json5
import re
import sys
import copy
# All site names appear to follow the pattern <type>_X<abs coord>Y<abs coord>.
# Generally speaking, only the tile relatively coordinates are required to
# assemble arch defs, so we re-origin the coordinates to be relative to the tile
# (e.g. start at X0Y0) and discard the prefix from the name.
SITE_COORDINATE_PATTERN = re.compile('^(.+)_X([0-9]+)Y([0-9]+)$')
def find_origin_coordinate(sites):
""" Find the coordinates of each site within the tile, and then subtract the
smallest coordinate to re-origin them all to be relative to the tile.
"""
if len(sites) == 0:
return 0, 0
def inner_():
for site in sites:
coordinate = SITE_COORDINATE_PATTERN.match(site['name'])
assert coordinate is not None, site
x_coord = int(coordinate.group(2))
y_coord = int(coordinate.group(3))
yield x_coord, y_coord
x_coords, y_coords = zip(*inner_())
min_x_coord = min(x_coords)
min_y_coord = min(y_coords)
return min_x_coord, min_y_coord
def create_site_pin_to_wire_maps(tile_name, nodes):
""" Create a map from site_pin names to nodes.
Create a mapping from site pins to tile local wires. For each node that is
attached to a site pin, there should only be 1 tile local wire.
"""
# Remove tile prefix (e.g. CLBLM_L_X8Y149/) from node names.
# Routing resources will not have the prefix.
tile_prefix = tile_name + '/'
site_pin_to_wires = {}
for node in nodes:
if len(node['site_pins']) == 0:
continue
wire_names = [
wire for wire in node['wires'] if wire.startswith(tile_prefix)
]
assert len(wire_names) == 1, (node, tile_prefix)
for site_pin in node["site_pins"]:
assert site_pin not in site_pin_to_wires
site_pin_to_wires[site_pin] = wire_names[0]
return site_pin_to_wires
def main():
site_pins = json5.load(sys.stdin)
output_site_pins = {}
output_site_pins["tile_type"] = site_pins["tile_type"]
output_site_pins["sites"] = copy.deepcopy(site_pins["sites"])
site_pin_to_wires = create_site_pin_to_wire_maps(
site_pins['tile_name'], site_pins['nodes'])
min_x_coord, min_y_coord = find_origin_coordinate(site_pins['sites'])
for site in output_site_pins['sites']:
orig_site_name = site['name']
coordinate = SITE_COORDINATE_PATTERN.match(orig_site_name)
x_coord = int(coordinate.group(2))
y_coord = int(coordinate.group(3))
site['name'] = 'X{}Y{}'.format(
x_coord - min_x_coord, y_coord - min_y_coord)
site['prefix'] = coordinate.group(1)
site['x_coord'] = x_coord - min_x_coord
site['y_coord'] = y_coord - min_y_coord
for site_pin in site['site_pins']:
assert site_pin['name'].startswith(orig_site_name + '/')
if site_pin['name'] in site_pin_to_wires:
site_pin['wire'] = site_pin_to_wires[site_pin['name']]
else:
print(
(
'***WARNING***: Site pin {} for tile type {} is not connected, '
'make sure all instaces of this tile type has this site_pin '
'disconnected.').format(
site_pin['name'], site_pins['tile_type']),
file=sys.stderr)
site_pin['name'] = site_pin['name'][len(orig_site_name) + 1:]
json.dump(output_site_pins, sys.stdout, indent=2)
sys.stdout.write('\n')
if __name__ == "__main__":
main()

View File

@ -0,0 +1,261 @@
import argparse
import datetime
import progressbar
import json
import os.path
import prjxray.lib
import pickle
import collections
def build_node_index(fname):
node_index = {}
with open(fname, 'rb') as f:
f.seek(0, 2)
bytes = f.tell()
f.seek(0, 0)
with progressbar.ProgressBar(max_value=bytes) as bar:
end_of_line = 0
for l in f:
parts = l.decode('utf8').split(' ')
pip, node = parts[0:2]
if node not in node_index:
node_index[node] = []
node_index[node].append(end_of_line)
end_of_line = f.tell()
bar.update(end_of_line)
return node_index
def read_node(expected_node, wire_file, node_index):
with open(wire_file, 'rb') as f:
for index in node_index:
f.seek(index, 0)
parts = f.readline().decode('utf8').strip().split(' ')
pip, node = parts[0:2]
wires = parts[2:]
assert node == expected_node, repr((node, expected_node, index))
yield wires
def generate_edges(graph, root, graph_nodes):
""" Starting from root, generate an edge in dir and insert into graph.
If the tree forks, simply insert a joins to indicate the split.
"""
edge = [root]
prev_root = None
while True:
outbound_edges = graph_nodes[root]
outbound_edges -= set((prev_root,))
if len(outbound_edges) > 1:
graph['edges'].append(edge)
if root not in graph['joins']:
graph['joins'][root] = set()
graph['joins'][root] |= outbound_edges
for element in graph_nodes[root]:
if element not in graph['joins']:
graph['joins'][element] = set()
graph['joins'][element].add(root)
break
else:
if len(outbound_edges) == 0:
graph['edges'].append(edge)
break
next_root = tuple(outbound_edges)[0]
edge.append(next_root)
prev_root, root = root, next_root
def create_ordered_wires_for_node(node, wires_in_node, downhill, uphill):
if len(wires_in_node) <= 2:
return {'edges': [wires_in_node], 'joins': {}}
downhill = set(tuple(l) for l in downhill)
uphill = set(tuple(l) for l in uphill)
roots = set()
all_wires = set()
for wire in downhill:
if len(wire) > 0:
roots |= set((wire[0], wire[-1]))
all_wires |= set(wire)
for wire in uphill:
if len(wire) > 0:
roots |= set((wire[0], wire[-1]))
all_wires |= set(wire)
assert len(wires_in_node) >= len(all_wires)
if len(all_wires) <= 2:
return {'edges': tuple(all_wires), 'joins': {}}
graph_nodes = dict((wire, set()) for wire in all_wires)
for wire in all_wires:
for down in downhill:
try:
idx = down.index(wire)
if idx+1 < len(down):
graph_nodes[wire].add(down[idx+1])
if idx-1 >= 0:
graph_nodes[wire].add(down[idx-1])
except ValueError:
continue
for up in uphill:
try:
idx = up.index(wire)
if idx+1 < len(up):
graph_nodes[wire].add(up[idx+1])
if idx-1 >= 0:
graph_nodes[wire].add(up[idx-1])
except ValueError:
continue
graph = {'edges': [], 'joins': {}}
while len(roots) > 0:
root = roots.pop()
if len(graph_nodes[root]) > 0:
generate_edges(graph, root, graph_nodes)
# Dedup identical edges.
final_edges = set()
for edge in graph['edges']:
edge1 = tuple(edge)
edge2 = tuple(edge[::-1])
if edge1 > edge2:
final_edges.add((edge2, edge1))
else:
final_edges.add((edge1, edge2))
edges = [edge[0] for edge in final_edges]
element_index = {}
for edge in edges:
for idx, element in enumerate(edge):
if element not in element_index:
element_index[element] = []
element_index[element].append((idx, edge))
new_edges = []
for edge in edges:
starts = element_index[edge[0]]
ends = element_index[edge[-1]]
found_any = False
for start in starts:
start_idx, other_edge = start
if other_edge is edge:
continue
for end in ends:
if other_edge is not end[1]:
continue
found_any = True
end_idx, _ = end
# check if the interior elements are the same.
if start_idx > end_idx:
step = -1
else:
step = 1
other_edge_slice = slice(start_idx, end_idx+step if end_idx+step >= 0 else None, step)
if edge != other_edge[other_edge_slice]:
new_edges.append(edge)
if not found_any:
new_edges.append(edge)
output = {
'edges': new_edges,
'joins': dict((key, tuple(value))
for key, value in graph['joins'].items()),
'wires': wires_in_node,
}
all_wires_in_output = set()
for edge in output['edges']:
all_wires_in_output |= set(edge)
for element in output['joins']:
all_wires_in_output.add(element)
return output
def main():
parser = argparse.ArgumentParser(description="")
parser.add_argument('--dump_all_root_dir', required=True)
parser.add_argument('--ordered_wires_root_dir', required=True)
parser.add_argument('--output_dir', required=True)
args = parser.parse_args()
downhill_wires = os.path.join(args.ordered_wires_root_dir, 'downhill_wires.txt')
uphill_wires = os.path.join(args.ordered_wires_root_dir, 'uphill_wires.txt')
assert os.path.exists(downhill_wires)
assert os.path.exists(uphill_wires)
print('{} Reading root.csv'.format(datetime.datetime.now()))
tiles, nodes = prjxray.lib.read_root_csv(args.dump_all_root_dir)
print('{} Loading node<->wire mapping'.format(datetime.datetime.now()))
node_lookup = prjxray.lib.NodeLookup()
node_lookup_file = os.path.join(args.output_dir, 'nodes.pickle')
if os.path.exists(node_lookup_file):
node_lookup.load_from_file(node_lookup_file)
else:
node_lookup.load_from_root_csv(nodes)
node_lookup.save_to_file(node_lookup_file)
wire_index_file = os.path.join(args.output_dir, 'wire_index.pickle')
if os.path.exists(wire_index_file):
print('{} Reading wire<->node index'.format(datetime.datetime.now()))
with open(wire_index_file, 'rb') as f:
wire_index = pickle.load(f)
downhill_wire_node_index = wire_index['downhill']
uphill_wire_node_index = wire_index['uphill']
else:
print('{} Creating wire<->node index'.format(datetime.datetime.now()))
downhill_wire_node_index = build_node_index(downhill_wires)
uphill_wire_node_index = build_node_index(uphill_wires)
with open(wire_index_file, 'wb') as f:
pickle.dump({
'downhill': downhill_wire_node_index,
'uphill': uphill_wire_node_index,
}, f)
print('{} Creating node tree'.format(datetime.datetime.now()))
nodes = collections.OrderedDict()
for node in progressbar.progressbar(sorted(node_lookup.nodes)):
nodes[node] = create_ordered_wires_for_node(
node,
tuple(wire['wire'] for wire in node_lookup.nodes[node]),
tuple(read_node(node, downhill_wires, downhill_wire_node_index[node] if node in downhill_wire_node_index else [])),
tuple(read_node(node, uphill_wires, uphill_wire_node_index[node] if node in uphill_wire_node_index else [])))
print('{} Writing node tree'.format(datetime.datetime.now()))
with open(os.path.join(args.output_dir, 'node_tree.json'), 'w') as f:
json.dump(nodes, f, indent=2)
if __name__ == '__main__':
main()

View File

@ -0,0 +1,7 @@
#!/bin/bash -x
source ${XRAY_GENHEADER}
vivado -mode batch -source ../generate.tcl
cd .. && ./generate_after_dump.sh

View File

@ -0,0 +1,137 @@
create_project -force -part $::env(XRAY_PART) design design
set_property design_mode PinPlanning [current_fileset]
open_io_design -name io_1
set root_fp [open root.csv w]
puts $root_fp "filetype,subtype,filename"
foreach tile [get_tiles] {
set fname tile_$tile.json5
set tile_type [get_property TYPE $tile]
puts $root_fp "tile,$tile_type,$fname"
set fp [open $fname w]
puts $fp "\{"
puts $fp "\t\"tile\": \"$tile\","
# tile properties:
# CLASS COLUMN DEVICE_ID FIRST_SITE_ID GRID_POINT_X GRID_POINT_Y INDEX
# INT_TILE_X INT_TILE_Y IS_CENTER_TILE IS_DCM_TILE IS_GT_CLOCK_SITE_TILE
# IS_GT_SITE_TILE NAME NUM_ARCS NUM_SITES ROW SLR_REGION_ID
# TILE_PATTERN_IDX TILE_TYPE TILE_TYPE_INDEX TILE_X TILE_Y TYPE
puts $fp "\t\"type\": \"$tile_type\","
puts $fp "\t\"x\": [get_property GRID_POINT_X $tile],"
puts $fp "\t\"y\": [get_property GRID_POINT_Y $tile],"
puts $fp "\t\"sites\": \["
foreach site [get_sites -of_objects $tile] {
# site properties:
# ALTERNATE_SITE_TYPES CLASS CLOCK_REGION IS_BONDED IS_CLOCK_BUFFER
# IS_CLOCK_PAD IS_GLOBAL_CLOCK_BUFFER IS_GLOBAL_CLOCK_PAD IS_PAD
# IS_REGIONAL_CLOCK_BUFFER IS_REGIONAL_CLOCK_PAD IS_RESERVED IS_TEST
# IS_USED MANUAL_ROUTING NAME NUM_ARCS NUM_BELS NUM_INPUTS NUM_OUTPUTS
# NUM_PINS PRIMITIVE_COUNT PROHIBIT PROHIBIT_FROM_PERSIST RPM_X RPM_Y
# SITE_PIPS SITE_TYPE
puts $fp "\t\t\{"
puts $fp "\t\t\t\"site\":\"$site\","
puts $fp "\t\t\t\"type\":\"[get_property SITE_TYPE $site]\","
puts $fp "\t\t\t\"site_pins\": \["
foreach site_pin [get_site_pins -of_objects $site] {
# site_pin properties:
# CLASS DIRECTION INDEX INDEX_IN_BUS INDEX_IN_SITE INDEX_IN_TILE IS_BAD
# IS_INPUT IS_OUTPUT IS_PART_OF_BUS IS_TEST IS_USED NAME SITE_ID
# SPEED_INDEX
puts $fp "\t\t\t\{"
puts $fp "\t\t\t\t\"site_pin\":\"$site_pin\","
puts $fp "\t\t\t\t\"index_in_site\":\"[get_property INDEX_IN_SITE $site_pin]\","
puts $fp "\t\t\t\t\"direction\":\"[get_property DIRECTION $site_pin]\","
set site_pin_node [get_nodes -of_objects $site_pin]
if {[llength $site_pin_node] == 0} {
puts $fp "\t\t\t\t\"node\":null,"
} else {
puts $fp "\t\t\t\t\"node\":\"$site_pin_node\","
}
puts $fp "\t\t\t\},"
}
puts $fp "\t\t\t\],"
puts $fp "\t\t\t\"site_pips\": \["
foreach site_pip [get_site_pips -of_objects $site] {
puts $fp "\t\t\t\{"
# site_pips properties:
# CLASS FROM_PIN IS_FIXED IS_USED NAME SITE TO_PIN
puts $fp "\t\t\t\t\"site_pip\":\"$site_pip\","
puts $fp "\t\t\t\t\"to_pin\":\"[get_property TO_PIN $site_pip]\","
puts $fp "\t\t\t\t\"from_pin\":\"[get_property FROM_PIN $site_pip]\","
puts $fp "\t\t\t\},"
}
puts $fp "\t\t\t\],"
puts $fp "\t\t\t\"package_pins\": \["
foreach package_pin [get_package_pins -of_objects $site] {
puts $fp "\t\t\t\t\{"
puts $fp "\t\t\t\t\t\"package_pin\":\"$package_pin\","
puts $fp "\t\t\t\t\},"
}
puts $fp "\t\t\t\],"
puts $fp "\t\t\},"
}
puts $fp "\t\],"
puts $fp "\t\"pips\": \["
foreach pip [get_pips -of_objects $tile] {
# pip properties:
# CAN_INVERT CLASS IS_BUFFERED_2_0 IS_BUFFERED_2_1 IS_DIRECTIONAL
# IS_EXCLUDED_PIP IS_FIXED_INVERSION IS_INVERTED IS_PSEUDO IS_SITE_PIP
# IS_TEST_PIP NAME SPEED_INDEX TILE
puts $fp "\t\t\{"
puts $fp "\t\t\t\"pip\":\"$pip\","
puts $fp "\t\t\t\"src_wire\":\"[get_wires -uphill -of_objects $pip]\","
puts $fp "\t\t\t\"dst_wire\":\"[get_wires -downhill -of_objects $pip]\","
puts $fp "\t\t\t\"is_pseudo\":\"[get_property IS_PSEUDO $pip]\","
puts $fp "\t\t\t\"is_directional\":\"[get_property IS_DIRECTIONAL $pip]\","
puts $fp "\t\t\t\"can_invert\":\"[get_property CAN_INVERT $pip]\","
puts $fp "\t\t\},"
}
puts $fp "\t\],"
puts $fp "\t\"wires\": \["
foreach wire [get_wires -of_objects $tile] {
# wire properties:
# CLASS COST_CODE ID_IN_TILE_TYPE IS_CONNECTED IS_INPUT_PIN IS_OUTPUT_PIN
# IS_PART_OF_BUS NAME NUM_DOWNHILL_PIPS NUM_INTERSECTS NUM_PIPS
# NUM_TILE_PORTS NUM_UPHILL_PIPS SPEED_INDEX TILE_NAME TILE_PATTERN_OFFSET
puts $fp "\t\t\{"
puts $fp "\t\t\t\"wire\":\"$wire\","
puts $fp "\t\t\},"
}
puts $fp "\t\],"
puts $fp "\}"
close $fp
}
foreach node [get_nodes] {
file mkdir [file dirname $node]
set fname $node.json5
puts $root_fp "node,,$fname"
set fp [open $fname w]
# node properties:
# BASE_CLOCK_REGION CLASS COST_CODE COST_CODE_NAME IS_BAD IS_COMPLETE
# IS_GND IS_INPUT_PIN IS_OUTPUT_PIN IS_PIN IS_VCC NAME NUM_WIRES PIN_WIRE
# SPEED_CLASS
puts $fp "\{"
puts $fp "\t\"node\": \"$node\","
puts $fp "\t\"wires\": \["
foreach wire [get_wires -of_objects $node] {
# wire properties:
# CLASS COST_CODE ID_IN_TILE_TYPE IS_CONNECTED IS_INPUT_PIN IS_OUTPUT_PIN
# IS_PART_OF_BUS NAME NUM_DOWNHILL_PIPS NUM_INTERSECTS NUM_PIPS
# NUM_TILE_PORTS NUM_UPHILL_PIPS SPEED_INDEX TILE_NAME TILE_PATTERN_OFFSET
puts $fp "\t\t\{"
puts $fp "\t\t\t\"wire\":\"$wire\","
puts $fp "\t\t\},"
}
puts $fp "\t\]"
puts $fp "\}"
close $fp
}
close $root_fp

View File

@ -0,0 +1,13 @@
#!/bin/bash -xe
rm -r output
mkdir -p output
python3 reduce_tile_types.py \
--root_dir specimen_001/ \
--output_dir output
python3 create_node_tree.py \
--dump_all_root_dir specimen_001/ \
--ordered_wires_root_dir ../072-ordered_wires/specimen_001/ \
--output_dir output
python3 reduce_site_types.py --output_dir output
python3 generate_grid.py --root_dir specimen_001/ --output_dir output

View File

@ -0,0 +1,664 @@
""" Generate grid from database dump """
from __future__ import print_function
import argparse
import prjxray.lib
import pyjson5 as json5
import multiprocessing
import progressbar
import os.path
import json
import datetime
import pickle
import inspect
import sys
def get_tile_grid_info(fname):
with open(fname, 'r') as f:
tile = json5.load(f)
return {
tile['tile']: {
'type': tile['type'],
'grid_x': tile['x'],
'grid_y': tile['y'],
'sites': dict(
(site['site'], site['type']) for site in tile['sites']
),
'wires': set(
wire['wire'] for wire in tile['wires'],
)
},
}
def read_json5(fname):
with open(fname, 'r') as f:
return json5.load(f)
def generate_tilesizes(grid):
""" ***BROKEN DO NOT USE*** """
assert False
tilesizes = {}
tiles = grid['tiles']
coord_to_tile = create_coord_to_tile(tiles)
for tile in grid['tiles']:
tilesizes[grid['tiles'][tile]['type']] = {
'grid_x_size': 1,
'grid_y_size': None,
}
x, y = zip(*coord_to_tile.keys())
min_x = min(x)
max_x = max(x)
min_y = min(y)
max_y = max(y)
for x in range(min_x, max_x+1):
tiles_slice = [(y, tiles[coord_to_tile[(x, y)]]['type']) for y in range(min_y, max_y+1) if tiles[coord_to_tile[(x, y)]]['type'] != 'NULL']
for (y1, tile_type), (y2, _) in zip(tiles_slice[::-1], tiles_slice[-2::-1]):
grid_y_size = y1-y2
if tilesizes[tile_type]['grid_y_size'] is None:
tilesizes[tile_type]['grid_y_size'] = grid_y_size
else:
tilesizes[tile_type]['grid_y_size'] = min(tilesizes[tile_type]['grid_y_size'], grid_y_size)
for tile_type in tilesizes:
if tilesizes[tile_type]['grid_y_size'] is None:
tilesizes[tile_type]['grid_y_size'] = 1
return tilesizes
def is_edge_shared(edge1, edge2):
""" Returns true if edge1 or edge2 overlap
>>> is_edge_shared((0, 1), (0, 1))
True
>>> is_edge_shared((0, 2), (0, 1))
True
>>> is_edge_shared((0, 1), (0, 2))
True
>>> is_edge_shared((1, 2), (0, 3))
True
>>> is_edge_shared((0, 3), (1, 2))
True
>>> is_edge_shared((1, 2), (0, 2))
True
>>> is_edge_shared((0, 2), (1, 2))
True
>>> is_edge_shared((0, 2), (1, 3))
True
>>> is_edge_shared((1, 3), (0, 2))
True
>>> is_edge_shared((0, 1), (1, 2))
False
>>> is_edge_shared((1, 2), (0, 1))
False
>>> is_edge_shared((0, 1), (2, 3))
False
>>> is_edge_shared((2, 3), (0, 1))
False
"""
assert edge1[0] < edge1[1], edge1
assert edge2[0] < edge2[1], edge2
if edge1[0] <= edge2[0]:
return edge2[0] < edge1[1]
else:
return edge1[0] < edge2[1]
def share_edge(a, b):
""" Returns true if box defined by a and b share any edge.
Box is defined as (x-min, y-min, x-max, y-max).
>>> share_edge((0, 0, 1, 1), (1, 0, 2, 1))
True
>>> share_edge((1, 0, 2, 1), (0, 0, 1, 1))
True
>>> share_edge((0, 0, 1, 1), (0, 1, 1, 2))
True
>>> share_edge((0, 1, 1, 2), (0, 0, 1, 1))
True
>>> share_edge((0, 0, 1, 3), (1, 0, 2, 1))
True
>>> share_edge((1, 0, 2, 1), (0, 0, 1, 3))
True
>>> share_edge((0, 0, 3, 1), (0, 1, 1, 2))
True
>>> share_edge((0, 1, 1, 2), (0, 0, 3, 1))
True
>>> share_edge((0, 0, 1, 1), (1, 1, 2, 2))
False
>>> share_edge((1, 1, 2, 2), (0, 0, 1, 1))
False
>>> share_edge((0, 0, 1, 3), (1, 3, 2, 4))
False
>>> share_edge((0, 0, 1, 3), (1, 2, 2, 4))
True
"""
a_x_min, a_y_min, a_x_max, a_y_max = a
b_x_min, b_y_min, b_x_max, b_y_max = b
if a_x_min == b_x_max or a_x_max == b_x_min:
return is_edge_shared((a_y_min, a_y_max), (b_y_min, b_y_max))
if a_y_min == b_y_max or a_y_max == b_y_min:
return is_edge_shared((a_x_min, a_x_max), (b_x_min, b_x_max))
def next_wire_in_dimension(wire1, tile1, wire2, tile2, tiles, x_wires, y_wires, wire_map, wires_in_node):
""" next_wire_in_dimension returns true if tile1 and tile2 are in the same
row and column, and must be adjcent.
"""
tile1_info = tiles[tile1]
tile2_info = tiles[tile2]
tile1_x = tile1_info['grid_x']
tile2_x = tile2_info['grid_x']
tile1_y = tile1_info['grid_y']
tile2_y = tile2_info['grid_y']
# All wires are in the same row or column or if the each wire lies in its own
# row or column.
if len(y_wires) == 1 or len(x_wires) == len(wires_in_node) or abs(tile1_y-tile2_y) == 0:
ordered_wires = sorted(x_wires.keys())
idx1 = ordered_wires.index(tile1_x)
idx2 = ordered_wires.index(tile2_x)
if len(x_wires[tile1_x]) == 1 and len(x_wires[tile2_x]) == 1:
return abs(idx1-idx2) == 1
if len(x_wires) == 1 or len(y_wires) == len(wires_in_node) or abs(tile1_x-tile2_x) == 0:
ordered_wires = sorted(y_wires.keys())
idx1 = ordered_wires.index(tile1_y)
idx2 = ordered_wires.index(tile2_y)
if len(y_wires[tile1_y]) == 1 and len(y_wires[tile2_y]) == 1:
return abs(idx1-idx2) == 1
return None
def only_wire(tile1, tile2, tiles, x_wires, y_wires):
""" only_wire returns true if tile1 and tile2 only have 1 wire in their respective x or y dimension.
"""
tile1_info = tiles[tile1]
tile2_info = tiles[tile2]
tile1_x = tile1_info['grid_x']
tile2_x = tile2_info['grid_x']
tiles_x_adjacent = abs(tile1_x-tile2_x) == 1
if tiles_x_adjacent and len(x_wires[tile1_x]) == 1 and len(x_wires[tile2_x]) == 1:
return True
tile1_y = tile1_info['grid_y']
tile2_y = tile2_info['grid_y']
tiles_y_adjacent = abs(tile1_y-tile2_y) == 1
if tiles_y_adjacent and len(y_wires[tile1_y]) == 1 and len(y_wires[tile2_y]) == 1:
return True
return None
def is_directly_connected(node, node_tree, wire1, wire2):
if 'wires' in node_tree:
node_tree_wires = node_tree['wires']
else:
if len(node_tree['edges']) == 1 and len(node_tree['joins']) == 0:
node_tree_wires = node_tree['edges'][0]
else:
return None
if wire1 not in node_tree_wires:
return None
if wire2 not in node_tree_wires:
return None
# Is there than edge that has wire1 next to wire2?
for edge in node_tree['edges']:
idx1 = None
idx2 = None
try:
idx1 = edge.index(wire1)
except ValueError:
pass
try:
idx2 = edge.index(wire2)
except ValueError:
pass
if idx1 is not None and idx2 is not None:
return abs(idx1 - idx2) == 1
if idx1 is not None and (idx1 != 0 and idx1 != len(edge)-1):
return False
if idx2 is not None and (idx2 != 0 and idx2 != len(edge)-1):
return False
# Is there a join of nodes between wire1 and wire2?
if wire1 in node_tree['joins']:
return wire2 in node_tree['joins'][wire1]
if wire2 in node_tree['joins']:
assert wire1 not in node_tree['joins'][wire2]
return None
def is_connected(wire1, tile1, wire2, tile2, node, wires_in_tiles, wire_map, node_tree, tiles, x_wires, y_wires, wires_in_node):
""" Check if two wires are directly connected. """
next_wire_in_dim = next_wire_in_dimension(wire1, tile1, wire2, tile2, tiles,
x_wires, y_wires,
wire_map, wires_in_node)
if next_wire_in_dim is not None:
return next_wire_in_dim
# Because there are multiple possible wire connections between these two
# tiles, consult the node_tree to determine if the two wires are actually connected.
#
# Warning: The node_tree is incomplete because it is not know how to extract
# ordered wire information from the node.
#
# Example node CLK_BUFG_REBUF_X60Y142/CLK_BUFG_REBUF_R_CK_GCLK0_BOT
# It does not appear to be possible to get ordered wire connection information
# for the first two wires connected to PIP
# CLK_BUFG_REBUF_X60Y117/CLK_BUFG_REBUF.CLK_BUFG_REBUF_R_CK_GCLK0_BOT<<->>CLK_BUFG_REBUF_R_CK_GCLK0_TOP
#
# However, it happens to be that theses wires are the only wires in their
# tiles, so the earlier "only wires in tile" check will pass.
connected = is_directly_connected(node['node'], node_tree[node['node']], wire1, wire2)
if connected is not None:
return connected
is_only_wire = only_wire(tile1, tile2, tiles, x_wires, y_wires)
if is_only_wire is not None:
return is_only_wire
# The node_tree didn't specify these wires, and the wires are not
# unambiguously connected.
return False
def process_node(tileconn, key_history, node, wire_map, node_tree, tiles):
wires = [wire['wire'] for wire in node['wires']]
wires_in_tiles = {}
x_wires = {}
y_wires = {}
for wire in wires:
wire_info = wire_map[wire]
if wire_info['tile'] not in wires_in_tiles:
wires_in_tiles[wire_info['tile']] = []
wires_in_tiles[wire_info['tile']].append(wire)
grid_x = tiles[wire_info['tile']]['grid_x']
if grid_x not in x_wires:
x_wires[grid_x] = []
x_wires[grid_x].append(wire)
grid_y = tiles[wire_info['tile']]['grid_y']
if grid_y not in y_wires:
y_wires[grid_y] = []
y_wires[grid_y].append(wire)
if len(wires) == 2:
wire1 = wires[0]
wire_info1 = wire_map[wire1]
wire2 = wires[1]
wire_info2 = wire_map[wire2]
update_tile_conn(tileconn, key_history, wire1, wire_info1, wire2, wire_info2, tiles)
return
for idx, wire1 in enumerate(wires):
wire_info1 = wire_map[wire1]
for wire2 in wires[idx+1:]:
wire_info2 = wire_map[wire2]
if not is_connected(
wire1, wire_info1['tile'],
wire2, wire_info2['tile'],
node, wires_in_tiles, wire_map, node_tree, tiles, x_wires, y_wires, wires):
continue
update_tile_conn(tileconn, key_history, wire1, wire_info1, wire2, wire_info2, tiles)
def update_tile_conn(tileconn, key_history, wirename1, wire1, wirename2, wire2, tiles):
# Ensure that (wire1, wire2) is sorted, so we can easy check if a connection
# already exists.
tile1 = tiles[wire1['tile']]
tile2 = tiles[wire2['tile']]
if (
(wire1['type'], wire1['shortname'], tile1['grid_x'], tile1['grid_y']) >
(wire2['type'], wire2['shortname'], tile2['grid_x'], tile2['grid_y'])
):
wire1, tile1, wire2, tile2 = wire2, tile2, wire1, tile1
tileconn.append({
"grid_deltas": [
tile2['grid_x'] - tile1['grid_x'],
tile2['grid_y'] - tile1['grid_y'],
],
"tile_types": [
tile1['type'],
tile2['type'],
],
"wire_pair": [
wire1['shortname'],
wire2['shortname'],
],
})
def flatten_tile_conn(tileconn):
""" Convert tileconn that is key'd to identify specific wire pairs between tiles
key (tile1_type, wire1_name, tile2_type, wire2_name) to flat tile connect list
that relates tile types and relative coordinates and a full list of wires to
connect. """
flat_tileconn = {}
for conn in tileconn:
key = (tuple(conn['tile_types']), tuple(conn['grid_deltas']))
if key not in flat_tileconn:
flat_tileconn[key] = {
'tile_types': conn['tile_types'],
'grid_deltas': conn['grid_deltas'],
'wire_pairs': set()
}
flat_tileconn[key]['wire_pairs'].add(tuple(conn['wire_pair']))
def inner():
for output in flat_tileconn.values():
yield {
'tile_types': output['tile_types'],
'grid_deltas': output['grid_deltas'],
'wire_pairs': tuple(output['wire_pairs']),
}
return tuple(inner())
def is_tile_type(tiles, coord_to_tile, coord, tile_type):
if coord not in coord_to_tile:
return False
target_tile = tiles[coord_to_tile[coord]]
return target_tile['type'] == tile_type
def get_connections(wire, wire_info, conn, idx, coord_to_tile, tiles):
""" Yields (tile_coord, wire) for each wire that should be connected to specified wire. """
pair = conn['wire_pairs'][idx]
wire_tile_type = wire_info['type']
tile_types = conn['tile_types']
shortname = wire_info['shortname']
grid_deltas = conn['grid_deltas']
wire1 = tile_types[0] == wire_tile_type and shortname == pair[0]
wire2 = tile_types[1] == wire_tile_type and shortname == pair[1]
assert wire1 or wire2, (wire, conn)
tile_of_wire = wire_info['tile']
start_coord_x = tiles[tile_of_wire]['grid_x']
start_coord_y = tiles[tile_of_wire]['grid_y']
if wire1:
target_coord_x = start_coord_x + grid_deltas[0]
target_coord_y = start_coord_y + grid_deltas[1]
target_tile_type = tile_types[1]
target_wire = pair[1]
target_tile = (target_coord_x, target_coord_y)
if is_tile_type(tiles, coord_to_tile, target_tile, target_tile_type):
yield target_tile, target_wire
if wire2:
target_coord_x = start_coord_x - grid_deltas[0]
target_coord_y = start_coord_y - grid_deltas[1]
target_tile_type = tile_types[0]
target_wire = pair[0]
target_tile = (target_coord_x, target_coord_y)
if is_tile_type(tiles, coord_to_tile, target_tile, target_tile_type):
yield target_tile, target_wire
def make_connection(wire_nodes, wire1, wire2):
if wire_nodes[wire1] is wire_nodes[wire2]:
assert wire1 in wire_nodes[wire1]
assert wire2 in wire_nodes[wire2]
return
new_node = wire_nodes[wire1] | wire_nodes[wire2]
for wire in new_node:
wire_nodes[wire] = new_node
def create_coord_to_tile(tiles):
coord_to_tile = {}
for tile, tileinfo in tiles.items():
coord_to_tile[(tileinfo['grid_x'], tileinfo['grid_y'])] = tile
return coord_to_tile
def connect_wires(tiles, tileconn, wire_map):
""" Connect individual wires into groups of wires called nodes. """
# Initialize all nodes to originally only contain the wire by itself.
wire_nodes = {}
for wire in wire_map:
wire_nodes[wire] = set([wire])
wire_connection_map = {}
for conn in tileconn:
for idx, (wire1, wire2) in enumerate(conn['wire_pairs']):
key1 = (conn['tile_types'][0], wire1)
if key1 not in wire_connection_map:
wire_connection_map[key1] = []
wire_connection_map[key1].append((conn, idx))
key2 = (conn['tile_types'][1], wire2)
if key2 not in wire_connection_map:
wire_connection_map[key2] = []
wire_connection_map[key2].append((conn, idx))
coord_to_tile = create_coord_to_tile(tiles)
for wire, wire_info in progressbar.progressbar(wire_map.items()):
key = (wire_info['type'], wire_info['shortname'])
if key not in wire_connection_map:
continue
for conn, idx in wire_connection_map[key]:
for target_tile, target_wire in get_connections(wire, wire_info, conn, idx, coord_to_tile, tiles):
full_wire_name = coord_to_tile[target_tile] + '/' + target_wire
assert wire_map[full_wire_name]['shortname'] == target_wire, (
target_tile, target_wire, wire, conn
)
assert wire_map[full_wire_name]['tile'] == coord_to_tile[target_tile], (
wire_map[full_wire_name]['tile'], coord_to_tile[target_tile]
)
make_connection(wire_nodes, wire, full_wire_name)
# Find unique nodes
nodes = {}
for node in wire_nodes.values():
nodes[id(node)] = node
# Flatten to list of lists.
return tuple(tuple(node) for node in nodes.values())
def generate_tilegrid(pool, tiles):
wire_map = {}
grid = {
'segments': {},
'tiles': {},
}
num_tiles = 0
for tile_type in tiles:
num_tiles += len(tiles[tile_type])
idx = 0
with progressbar.ProgressBar(max_value=num_tiles) as bar:
for tile_type in tiles:
for tile in pool.imap_unordered(
get_tile_grid_info,
tiles[tile_type],
chunksize = 20,
):
bar.update(idx)
assert len(tile) == 1, tile
tilename = tuple(tile.keys())[0]
for wire in tile[tilename]['wires']:
assert wire not in wire_map, (wire, wire_map)
assert wire.startswith(tilename + '/'), (wire, tilename)
wire_map[wire] = {
'tile': tilename,
'type': tile[tilename]['type'],
'shortname': wire[len(tilename)+1:],
}
del tile[tilename]['wires']
grid['tiles'].update(tile)
idx += 1
bar.update(idx)
return grid, wire_map
def generate_tileconn(pool, node_tree, nodes, wire_map, grid):
tileconn = []
key_history = {}
raw_node_data = []
with progressbar.ProgressBar(max_value=len(nodes)) as bar:
for idx, node in enumerate(pool.imap_unordered(
read_json5,
nodes,
chunksize = 20,
)):
bar.update(idx)
raw_node_data.append(node)
process_node(tileconn, key_history, node, wire_map, node_tree, grid['tiles'])
bar.update(idx+1)
tileconn = flatten_tile_conn(tileconn)
return tileconn, raw_node_data
def main():
parser = argparse.ArgumentParser(description="Reduces raw database dump into prototype tiles, grid, and connections.")
parser.add_argument('--root_dir', required=True)
parser.add_argument('--output_dir', required=True)
parser.add_argument('--verify_only', action='store_true')
args = parser.parse_args()
tiles, nodes = prjxray.lib.read_root_csv(args.root_dir)
processes = min(multiprocessing.cpu_count(), 10)
print('{} Running {} processes'.format(datetime.datetime.now(), processes))
pool = multiprocessing.Pool(processes=processes)
node_tree_file = os.path.join(args.output_dir, 'node_tree.json')
tilegrid_file = os.path.join(args.output_dir, 'tilegrid.json')
tileconn_file = os.path.join(args.output_dir, 'tileconn.json')
wire_map_file = os.path.join(args.output_dir, 'wiremap.pickle')
if not args.verify_only:
print('{} Creating tile map'.format(datetime.datetime.now()))
grid, wire_map = generate_tilegrid(pool, tiles)
with open(tilegrid_file, 'w') as f:
json.dump(grid, f, indent=2)
with open(wire_map_file, 'wb') as f:
pickle.dump(wire_map, f)
print('{} Reading node tree'.format(datetime.datetime.now()))
with open(node_tree_file) as f:
node_tree = json.load(f)
print('{} Creating tile connections'.format(datetime.datetime.now()))
tileconn, raw_node_data = generate_tileconn(pool, node_tree, nodes, wire_map, grid)
print('{} Writing tileconn'.format(datetime.datetime.now()))
with open(tileconn_file, 'w') as f:
json.dump(tileconn, f, indent=2)
else:
print('{} Reading tilegrid'.format(datetime.datetime.now()))
with open(tilegrid_file) as f:
grid = json.load(f)
with open(wire_map_file, 'rb') as f:
wire_map = pickle.load(f)
print('{} Reading raw_node_data'.format(datetime.datetime.now()))
raw_node_data = []
with progressbar.ProgressBar(max_value=len(nodes)) as bar:
for idx, node in enumerate(pool.imap_unordered(
read_json5,
nodes,
chunksize = 20,
)):
bar.update(idx)
raw_node_data.append(node)
bar.update(idx+1)
print('{} Reading tileconn'.format(datetime.datetime.now()))
with open(tileconn_file) as f:
tileconn = json.load(f)
wire_nodes_file = os.path.join(args.output_dir, 'wire_nodes.pickle')
if os.path.exists(wire_nodes_file) and args.verify_only:
with open(wire_nodes_file, 'rb') as f:
wire_nodes = pickle.load(f)
else:
print("{} Connecting wires to verify tileconn".format(datetime.datetime.now()))
wire_nodes = connect_wires(grid['tiles'], tileconn, wire_map)
with open(wire_nodes_file, 'wb') as f:
pickle.dump(wire_nodes, f)
print('{} Verifing tileconn'.format(datetime.datetime.now()))
error_nodes = []
prjxray.lib.verify_nodes([
(node['node'], tuple(wire['wire'] for wire in node['wires']))
for node in raw_node_data
], wire_nodes, error_nodes)
if len(error_nodes) > 0:
error_nodes_file = os.path.join(args.output_dir, 'error_nodes.json')
with open(error_nodes_file, 'w') as f:
json.dump(error_nodes, f, indent=2)
ignored_wires = []
path_to_file = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
ignored_wires_file = os.path.join(path_to_file, 'ignored_wires.txt')
if os.path.exists(ignored_wires_file):
with open(ignored_wires_file) as f:
ignored_wires = set(l.strip() for l in f)
if not prjxray.lib.check_errors(error_nodes, ignored_wires):
print('{} errors detected, see {} for details.'.format(len(error_nodes), error_nodes_file))
sys.exit(1)
else:
print('{} errors ignored because of {}\nSee {} for details.'.format(
len(error_nodes), ignored_wires_file, error_nodes_file))
if __name__ == '__main__':
main()

View File

@ -0,0 +1,33 @@
import json
with open('output/error_nodes.json') as f:
flat_error_nodes = json.load(f)
error_nodes = {}
for node, raw_node, generated_nodes in flat_error_nodes:
if node not in error_nodes:
error_nodes[node] = {
'raw_node': set(raw_node),
'generated_nodes': set(),
}
assert error_nodes[node]['raw_node'] == set(raw_node)
error_nodes[node]['generated_nodes'].add(tuple(sorted(generated_nodes)))
for node, error in error_nodes.items():
combined_generated_nodes = set()
for generated_node in error['generated_nodes']:
combined_generated_nodes |= set(generated_node)
assert error['raw_node'] == combined_generated_nodes, (node, error)
good_node = max(error['generated_nodes'], key=lambda x: len(x))
bad_nodes = error['generated_nodes'] - set((good_node,))
if max(len(generated_node) for generated_node in bad_nodes) > 1:
assert False, node
else:
for generated_node in bad_nodes:
for wire in generated_node:
print(wire)

View File

@ -0,0 +1,110 @@
LIOI3_X0Y141/LIOI_I2GCLK_TOP1
CMT_TOP_R_UPPER_B_X8Y135/CMT_PHASER_UP_DQS_TO_PHASER_D
LIOI3_X0Y145/LIOI_I2GCLK_BOT1
LIOI3_TBYTESRC_X0Y19/LIOI_I2GCLK_TOP1
RIOI3_TBYTESRC_X43Y7/RIOI_I2GCLK_BOT1
LIOI3_TBYTESRC_X0Y43/LIOI_I2GCLK_TOP1
LIOI3_TBYTESRC_X0Y7/LIOI_I2GCLK_BOT1
LIOI3_TBYTETERM_X0Y13/LIOI_I2GCLK_TOP1
RIOI3_X43Y29/RIOI_I2GCLK_TOP1
RIOI3_X43Y33/RIOI_I2GCLK_BOT1
LIOI3_X0Y33/LIOI_I2GCLK_BOT1
LIOI3_X0Y29/LIOI_I2GCLK_TOP1
LIOI3_TBYTESRC_X0Y31/LIOI_I2GCLK_TOP1
RIOI3_TBYTESRC_X43Y19/RIOI_I2GCLK_TOP1
LIOI3_TBYTESRC_X0Y43/LIOI_I2GCLK_BOT1
LIOI3_TBYTETERM_X0Y63/LIOI_I2GCLK_TOP1
RIOI3_TBYTETERM_X43Y37/RIOI_I2GCLK_TOP1
LIOI3_TBYTETERM_X0Y113/LIOI_I2GCLK_TOP1
LIOI3_TBYTESRC_X0Y69/LIOI_I2GCLK_TOP1
RIOI3_X43Y17/RIOI_I2GCLK_TOP1
RIOI3_X43Y21/RIOI_I2GCLK_BOT1
LIOI3_TBYTESRC_X0Y57/LIOI_I2GCLK_BOT1
RIOI3_TBYTETERM_X43Y13/RIOI_I2GCLK_TOP1
LIOI3_TBYTETERM_X0Y37/LIOI_I2GCLK_TOP1
LIOI3_X0Y9/LIOI_I2GCLK_BOT1
CMT_TOP_R_LOWER_T_X8Y18/CMT_PHASER_DOWN_DQS_TO_PHASER_A
LIOI3_X0Y5/LIOI_I2GCLK_TOP1
RIOI3_TBYTESRC_X43Y57/RIOI_I2GCLK_BOT1
RIOI3_TBYTESRC_X43Y31/RIOI_I2GCLK_BOT1
LIOI3_TBYTETERM_X0Y87/LIOI_I2GCLK_BOT1
LIOI3_TBYTESRC_X0Y81/LIOI_I2GCLK_BOT1
RIOI3_TBYTESRC_X43Y43/RIOI_I2GCLK_BOT1
LIOI3_TBYTESRC_X0Y93/LIOI_I2GCLK_BOT1
RIOI3_TBYTESRC_X43Y69/RIOI_I2GCLK_BOT1
LIOI3_TBYTETERM_X0Y13/LIOI_I2GCLK_BOT1
RIOI3_TBYTESRC_X43Y31/RIOI_I2GCLK_TOP1
LIOI3_TBYTETERM_X0Y63/LIOI_I2GCLK_BOT1
LIOI3_TBYTESRC_X0Y143/LIOI_I2GCLK_BOT1
LIOI3_X0Y91/LIOI_I2GCLK_TOP1
LIOI3_X0Y95/LIOI_I2GCLK_BOT1
CMT_TOP_R_UPPER_B_X8Y83/CMT_PHASER_UP_DQS_TO_PHASER_D
LIOI3_TBYTETERM_X0Y137/LIOI_I2GCLK_BOT1
LIOI3_TBYTESRC_X0Y57/LIOI_I2GCLK_TOP1
LIOI3_TBYTETERM_X0Y87/LIOI_I2GCLK_TOP1
CMT_TOP_R_LOWER_T_X8Y70/CMT_PHASER_DOWN_DQS_TO_PHASER_A
LIOI3_X0Y59/LIOI_I2GCLK_BOT1
LIOI3_X0Y55/LIOI_I2GCLK_TOP1
LIOI3_TBYTESRC_X0Y131/LIOI_I2GCLK_TOP1
RIOI3_TBYTESRC_X43Y69/RIOI_I2GCLK_TOP1
LIOI3_TBYTESRC_X0Y143/LIOI_I2GCLK_TOP1
LIOI3_TBYTESRC_X0Y7/LIOI_I2GCLK_TOP1
RIOI3_X43Y67/RIOI_I2GCLK_TOP1
RIOI3_X43Y71/RIOI_I2GCLK_BOT1
LIOI3_TBYTETERM_X0Y37/LIOI_I2GCLK_BOT1
LIOI3_TBYTESRC_X0Y93/LIOI_I2GCLK_TOP1
RIOI3_TBYTESRC_X43Y57/RIOI_I2GCLK_TOP1
RIOI3_TBYTETERM_X43Y63/RIOI_I2GCLK_BOT1
LIOI3_TBYTETERM_X0Y113/LIOI_I2GCLK_BOT1
RIOI3_TBYTETERM_X43Y87/RIOI_I2GCLK_BOT1
LIOI3_TBYTETERM_X0Y137/LIOI_I2GCLK_TOP1
LIOI3_TBYTESRC_X0Y107/LIOI_I2GCLK_BOT1
RIOI3_TBYTETERM_X43Y13/RIOI_I2GCLK_BOT1
RIOI3_TBYTETERM_X43Y87/RIOI_I2GCLK_TOP1
LIOI3_X0Y45/LIOI_I2GCLK_BOT1
CMT_TOP_R_UPPER_B_X8Y31/CMT_PHASER_UP_DQS_TO_PHASER_D
LIOI3_X0Y41/LIOI_I2GCLK_TOP1
LIOI3_TBYTESRC_X0Y19/LIOI_I2GCLK_BOT1
LIOI3_TBYTESRC_X0Y69/LIOI_I2GCLK_BOT1
RIOI3_X43Y79/RIOI_I2GCLK_TOP1
RIOI3_X43Y83/RIOI_I2GCLK_BOT1
RIOI3_X43Y45/RIOI_I2GCLK_BOT1
RIOI3_X43Y41/RIOI_I2GCLK_TOP1
CMT_TOP_L_UPPER_B_X106Y31/CMT_PHASER_UP_DQS_TO_PHASER_D
RIOI3_TBYTESRC_X43Y19/RIOI_I2GCLK_BOT1
LIOI3_X0Y71/LIOI_I2GCLK_BOT1
LIOI3_X0Y67/LIOI_I2GCLK_TOP1
LIOI3_X0Y129/LIOI_I2GCLK_TOP1
LIOI3_X0Y133/LIOI_I2GCLK_BOT1
RIOI3_TBYTETERM_X43Y37/RIOI_I2GCLK_BOT1
LIOI3_TBYTESRC_X0Y131/LIOI_I2GCLK_BOT1
RIOI3_X43Y59/RIOI_I2GCLK_BOT1
CMT_TOP_L_LOWER_T_X106Y70/CMT_PHASER_DOWN_DQS_TO_PHASER_A
RIOI3_X43Y55/RIOI_I2GCLK_TOP1
LIOI3_X0Y105/LIOI_I2GCLK_TOP1
LIOI3_X0Y109/LIOI_I2GCLK_BOT1
CMT_TOP_R_LOWER_T_X8Y122/CMT_PHASER_DOWN_DQS_TO_PHASER_A
RIOI3_TBYTESRC_X43Y93/RIOI_I2GCLK_TOP1
LIOI3_TBYTESRC_X0Y31/LIOI_I2GCLK_BOT1
LIOI3_X0Y17/LIOI_I2GCLK_TOP1
LIOI3_X0Y21/LIOI_I2GCLK_BOT1
LIOI3_TBYTESRC_X0Y81/LIOI_I2GCLK_TOP1
LIOI3_TBYTESRC_X0Y119/LIOI_I2GCLK_BOT1
LIOI3_TBYTESRC_X0Y107/LIOI_I2GCLK_TOP1
RIOI3_TBYTETERM_X43Y63/RIOI_I2GCLK_TOP1
LIOI3_X0Y83/LIOI_I2GCLK_BOT1
LIOI3_X0Y79/LIOI_I2GCLK_TOP1
RIOI3_TBYTESRC_X43Y7/RIOI_I2GCLK_TOP1
RIOI3_X43Y95/RIOI_I2GCLK_BOT1
CMT_TOP_L_UPPER_B_X106Y83/CMT_PHASER_UP_DQS_TO_PHASER_D
RIOI3_X43Y91/RIOI_I2GCLK_TOP1
RIOI3_TBYTESRC_X43Y43/RIOI_I2GCLK_TOP1
CMT_TOP_L_LOWER_T_X106Y18/CMT_PHASER_DOWN_DQS_TO_PHASER_A
RIOI3_X43Y9/RIOI_I2GCLK_BOT1
RIOI3_X43Y5/RIOI_I2GCLK_TOP1
RIOI3_TBYTESRC_X43Y81/RIOI_I2GCLK_BOT1
LIOI3_TBYTESRC_X0Y119/LIOI_I2GCLK_TOP1
RIOI3_TBYTESRC_X43Y93/RIOI_I2GCLK_BOT1
RIOI3_TBYTESRC_X43Y81/RIOI_I2GCLK_TOP1
LIOI3_X0Y121/LIOI_I2GCLK_BOT1
LIOI3_X0Y117/LIOI_I2GCLK_TOP1

View File

@ -0,0 +1,55 @@
""" Reduce sites types to prototypes that are always correct.
reduce_tile_types.py generates per tile type site types. reduce_site_types.py
takes all site types across all tiles and creates generic site types that are
valid for all tile types.
"""
import argparse
import prjxray.lib
import os
import os.path
import re
import json
def main():
parser = argparse.ArgumentParser(description="Reduces per tile site types to generic site types.")
parser.add_argument('--output_dir', required=True)
args = parser.parse_args()
SITE_TYPE = re.compile('^tile_type_(.+)_site_type_(.+)\.json$')
site_types = {}
for path in os.listdir(args.output_dir):
match = SITE_TYPE.fullmatch(path)
if match is None:
continue
site_type = match.group(2)
if site_type not in site_types:
site_types[site_type] = []
site_types[site_type].append(path)
for site_type in site_types:
proto_site_type = None
for instance in site_types[site_type]:
with open(os.path.join(args.output_dir, instance)) as f:
instance_site_type = json.load(f)
if proto_site_type is None:
proto_site_type = instance_site_type
else:
prjxray.lib.compare_prototype_site(
proto_site_type,
instance_site_type,
)
with open(os.path.join(args.output_dir,
'site_type_{}.json'.format(site_type)), 'w') as f:
json.dump(proto_site_type, f, indent=2)
if __name__ == '__main__':
main()

View File

@ -0,0 +1,323 @@
""" Reduce tile types to prototypes that are always correct.
The dump-all generate.tcl dumps all instances of each tile type. Some tiles
are missing wires. reduce_tile_types.py generates the superset tile that
encompases all tiles of that type. If it is not possible to generate a super
set tile, an error will be generated.
"""
import argparse
import prjxray.lib
import datetime
import os.path
import json
import pyjson5 as json5
import progressbar
import multiprocessing
import os
import functools
import re
def check_and_strip_prefix(name, prefix):
assert name.startswith(prefix), repr((name, prefix))
return name[len(prefix):]
def flatten_site_pins(tile, site, site_pins, site_pin_node_to_wires):
def inner():
for site_pin in site_pins:
wires = tuple(site_pin_node_to_wires(tile, site_pin['node']))
if len(wires) == 0:
yield (check_and_strip_prefix(site_pin['site_pin'], site+'/'), None)
continue
assert len(wires) == 1, repr(wires)
yield (check_and_strip_prefix(site_pin['site_pin'], site+'/'), wires[0])
return dict(inner())
# All site names appear to follow the pattern <type>_X<abs coord>Y<abs coord>.
# Generally speaking, only the tile relatively coordinates are required to
# assemble arch defs, so we re-origin the coordinates to be relative to the tile
# (e.g. start at X0Y0) and discard the prefix from the name.
SITE_COORDINATE_PATTERN = re.compile('^(.+)_X([0-9]+)Y([0-9]+)$')
def find_origin_coordinate(sites):
""" Find the coordinates of each site within the tile, and then subtract the
smallest coordinate to re-origin them all to be relative to the tile.
"""
if len(sites) == 0:
return 0, 0
def inner_():
for site in sites:
coordinate = SITE_COORDINATE_PATTERN.match(site['site'])
assert coordinate is not None, site
x_coord = int(coordinate.group(2))
y_coord = int(coordinate.group(3))
yield x_coord, y_coord
x_coords, y_coords = zip(*inner_())
min_x_coord = min(x_coords)
min_y_coord = min(y_coords)
return min_x_coord, min_y_coord
def get_sites(tile, site_pin_node_to_wires):
min_x_coord, min_y_coord = find_origin_coordinate(tile['sites'])
for site in tile['sites']:
orig_site_name = site['site']
coordinate = SITE_COORDINATE_PATTERN.match(orig_site_name)
x_coord = int(coordinate.group(2))
y_coord = int(coordinate.group(3))
yield (
{
'name': 'X{}Y{}'.format(x_coord - min_x_coord, y_coord - min_y_coord),
'prefix': coordinate.group(1),
'x_coord': x_coord - min_x_coord,
'y_coord': y_coord - min_y_coord,
'type': site['type'],
'site_pins': dict(flatten_site_pins(
tile['tile'],
site['site'], site['site_pins'], site_pin_node_to_wires)),
}
)
def compare_sites_and_update(tile, sites, new_sites):
for site_a, site_b in zip(sites, new_sites):
assert site_a['type'] == site_b['type']
assert site_a['site_pins'].keys() == site_b['site_pins'].keys()
for site_pin in site_a['site_pins']:
if site_a['site_pins'][site_pin] is not None and site_b['site_pins'][site_pin] is not None:
assert site_a['site_pins'][site_pin] == site_b['site_pins'][site_pin]
elif site_a['site_pins'][site_pin] is None and site_b['site_pins'][site_pin] is not None:
site_a['site_pins'][site_pin] = site_b['site_pins'][site_pin]
def get_prototype_site(site):
proto = {}
proto['type'] = site['type']
proto['site_pins'] = {}
proto['site_pips'] = {}
for site_pin in site['site_pins']:
name = check_and_strip_prefix(site_pin['site_pin'], site['site'] + '/')
proto['site_pins'][name] = {
'direction': site_pin['direction'],
'index_in_site': site_pin['index_in_site'],
}
for site_pip in site['site_pips']:
name = check_and_strip_prefix(site_pip['site_pip'], site['site'] + '/')
proto['site_pips'][name] = {
'to_pin': site_pip['to_pin'],
'from_pin': site_pip['from_pin'],
}
return proto
def get_pips(tile, pips):
proto_pips = {}
for pip in pips:
name = check_and_strip_prefix(pip['pip'], tile + '/')
proto_pips[name] = {
'src_wire': check_and_strip_prefix(pip['src_wire'], tile + '/')
if pip['src_wire'] is not None else None,
'dst_wire': check_and_strip_prefix(pip['dst_wire'], tile + '/')
if pip['dst_wire'] is not None else None,
'is_pseudo': pip['is_pseudo'],
'is_directional': pip['is_directional'],
'can_invert': pip['can_invert'],
}
return proto_pips
def compare_and_update_pips(pips, new_pips):
# Pip names are always the same, but sometimes the src_wire or dst_wire
# may be missing.
assert pips.keys() == new_pips.keys(), repr((pips.keys(), new_pips.keys()))
for name in pips:
if pips[name]['src_wire'] is not None and new_pips[name]['src_wire'] is not None:
assert pips[name]['src_wire'] == new_pips[name]['src_wire'], repr((
pips[name]['src_wire'],
new_pips[name]['src_wire'],
))
elif pips[name]['src_wire'] is None and new_pips[name]['src_wire'] is not None:
pips[name]['src_wire'] = new_pips[name]['src_wire']
if pips[name]['dst_wire'] is not None and new_pips[name]['dst_wire'] is not None:
assert pips[name]['dst_wire'] == new_pips[name]['dst_wire'], repr((
pips[name]['dst_wire'],
new_pips[name]['dst_wire'],
))
elif pips[name]['dst_wire'] is None and new_pips[name]['dst_wire'] is not None:
pips[name]['dst_wire'] = new_pips[name]['dst_wire']
for k in ['is_pseudo', 'is_directional', 'can_invert']:
assert pips[name][k] == new_pips[name][k], (k, pips[name][k], new_pips[name][k])
def check_wires(wires, sites, pips):
""" Verify that the wires generates from nodes are a superset of wires in
sites and pips """
if sites is not None:
for site in sites:
for wire_to_site_pin in site['site_pins'].values():
if wire_to_site_pin is not None:
assert wire_to_site_pin in wires, repr((wire_to_site_pin, wires))
if pips is not None:
for pip in pips.values():
if pip['src_wire'] is not None:
assert pip['src_wire'] in wires, repr((pip['src_wire'], wires))
if pip['dst_wire'] is not None:
assert pip['dst_wire'] in wires, repr((pip['dst_wire'], wires))
def read_json5(fname, nodes):
node_lookup = prjxray.lib.NodeLookup()
node_lookup.load_from_nodes(nodes)
#print('{} Reading {} (in pid {})'.format(datetime.datetime.now(), fname, os.getpid()))
with open(fname) as f:
tile = json5.load(f)
#print('{} Done reading {}'.format(datetime.datetime.now(), fname))
def get_site_types():
for site in tile['sites']:
yield get_prototype_site(site)
site_types = tuple(get_site_types())
sites = tuple(get_sites(tile, node_lookup.site_pin_node_to_wires))
pips = get_pips(tile['tile'], tile['pips'])
def inner():
for wire in tile['wires']:
assert wire['wire'].startswith(tile['tile'] + '/')
yield wire['wire'][len(tile['tile'])+1:]
wires = set(inner())
wires_from_nodes = set(node_lookup.wires_for_tile(tile['tile']))
assert len(wires_from_nodes - wires) == 0, repr((wires, wires_from_nodes))
return fname, tile, site_types, sites, pips, wires
def reduce_tile(pool, site_types, tile_type, tile_instances, node_lookup):
sites = None
pips = None
wires = set()
with progressbar.ProgressBar(max_value=len(tile_instances)) as bar:
chunksize = 20
if len(tile_instances) < chunksize*2:
iter = map(lambda file: read_json5(file, node_lookup.nodes), tile_instances)
else:
print('{} Using pool.imap_unordered'.format(datetime.datetime.now()))
iter = pool.imap_unordered(
functools.partial(read_json5, nodes=node_lookup.nodes),
tile_instances,
chunksize=chunksize,
)
for idx, (fname, tile, new_site_types, new_sites, new_pips, new_wires) in enumerate(iter):
bar.update(idx)
assert tile['type'] == tile_type, repr((tile['tile'], tile_type))
for site_type in new_site_types:
if site_type['type'] in site_types:
prjxray.lib.compare_prototype_site(site_type, site_types[site_type['type']])
else:
site_types[site_type['type']] = site_type
# Sites are expect to always be the same
if sites is None:
sites = new_sites
else:
compare_sites_and_update(tile['tile'], sites, new_sites)
if pips is None:
pips = new_pips
else:
compare_and_update_pips(pips, new_pips)
wires |= new_wires
bar.update(idx+1)
check_wires(wires, sites, pips)
return {
'tile_type': tile_type,
'sites': sites,
'pips': pips,
'wires': tuple(wires),
}
def main():
parser = argparse.ArgumentParser(description="Reduces raw database dump into prototype tiles, grid, and connections.")
parser.add_argument('--root_dir', required=True)
parser.add_argument('--output_dir', required=True)
parser.add_argument('--ignore_cache', action='store_true')
args = parser.parse_args()
print('{} Reading root.csv'.format(datetime.datetime.now()))
tiles, nodes = prjxray.lib.read_root_csv(args.root_dir)
print('{} Loading node<->wire mapping'.format(datetime.datetime.now()))
node_lookup = prjxray.lib.NodeLookup()
node_lookup_file = os.path.join(args.output_dir, 'nodes.pickle')
if os.path.exists(node_lookup_file) and not args.ignore_cache:
node_lookup.load_from_file(node_lookup_file)
else:
node_lookup.load_from_root_csv(nodes)
node_lookup.save_to_file(node_lookup_file)
site_types = {}
processes = min(multiprocessing.cpu_count(), 10)
print('Running {} processes'.format(processes))
pool = multiprocessing.Pool(processes=processes)
for tile_type in sorted(tiles.keys()):
#for tile_type in ['CLBLL_L', 'CLBLL_R', 'CLBLM_L', 'CLBLM_R', 'INT_L', 'INT_L']:
tile_type_file = os.path.join(args.output_dir, 'tile_type_{}.json'.format(tile_type))
site_types = {}
if os.path.exists(tile_type_file):
print('{} Skip reduced tile for {}'.format(datetime.datetime.now(), tile_type))
continue
print('{} Generating reduced tile for {}'.format(datetime.datetime.now(), tile_type))
reduced_tile = reduce_tile(
pool,
site_types,
tile_type, tiles[tile_type],
node_lookup)
for site_type in site_types:
with open(os.path.join(
args.output_dir,
'tile_type_{}_site_type_{}.json'.format(
tile_type,
site_types[site_type]['type']
)), 'w') as f:
json.dump(site_types[site_type], f, indent=2)
with open(tile_type_file, 'w') as f:
json.dump(reduced_tile, f, indent=2)
if __name__ == '__main__':
main()

View File

@ -29,3 +29,6 @@ $(eval $(call fuzzer,057-bipips,056-rempips))
$(eval $(call fuzzer,058-hclkpips,056-rempips))
$(eval $(call fuzzer,070-tileconn,005-tilegrid))
$(eval $(call fuzzer,071-ppips,057-bipips 058-hclkpips))
$(eval $(call fuzzer,072-ordered_wires,))
$(eval $(call fuzzer,073-get_counts,))
$(eval $(call fuzzer,074-dump_all,072-ordered_wires))

0
prjxray/__init__.py Normal file
View File

66
prjxray/connections.py Normal file
View File

@ -0,0 +1,66 @@
from collections import namedtuple
WireInGrid = namedtuple('WireInGrid', 'tile grid_x grid_y wire')
Connection = namedtuple('Connection', 'wire_a wire_b')
class Connections(object):
def __init__(self, tilegrid, tileconn, tile_wires):
self.grid = tilegrid['tiles']
self.tile_wires = tile_wires
self.coord_to_tile = {}
self.coord_to_tile_type = {}
for tile, tile_info in self.grid.items():
self.coord_to_tile[(tile_info['grid_x'], tile_info['grid_y'])] = tile
self.coord_to_tile_type[(tile_info['grid_x'], tile_info['grid_y'])] = tile_info['type']
# Make sure we have tile type info for every tile in the grid.
assert tile_info['type'] in self.tile_wires, (tile_info['type'], self.tile_wires.keys())
self.potential_connections = {}
for conn in tileconn:
grid_deltas = conn['grid_deltas']
tile_types = conn['tile_types']
for pairs in conn['wire_pairs']:
key = (tile_types[0], pairs[0])
if key not in self.potential_connections:
self.potential_connections[key] = []
self.potential_connections[key].append((
grid_deltas, tile_types[1], pairs[1]
))
def all_possible_connections_from(self, wire_in_grid):
tile_type = self.coord_to_tile_type[(wire_in_grid.grid_x, wire_in_grid.grid_y)]
key = (tile_type, wire_in_grid.wire)
if key not in self.potential_connections:
return
for relative_coord, target_tile_type, target_wire in (
self.potential_connections[key]):
rel_x, rel_y = relative_coord
target_coord = (wire_in_grid.grid_x+rel_x, wire_in_grid.grid_y+rel_y)
if target_coord in self.coord_to_tile_type:
if self.coord_to_tile_type[target_coord] == target_tile_type:
yield Connection(wire_in_grid, WireInGrid(
tile = self.coord_to_tile[target_coord],
grid_x = target_coord[0],
grid_y = target_coord[1],
wire = target_wire))
def get_connections(self):
""" Yields Connection objects that represent all connections present in
the grid based on tileconn """
for tile, tile_info in self.grid.items():
for wire in self.tile_wires[tile_info['type']]:
wire_in_grid = WireInGrid(
tile = tile,
grid_x = tile_info['grid_x'],
grid_y = tile_info['grid_y'],
wire = wire)
for potential_connection in self.all_possible_connections_from(wire_in_grid):
yield potential_connection

96
prjxray/db.py Normal file
View File

@ -0,0 +1,96 @@
import os.path
import json
from prjxray import grid
from prjxray import tile
from prjxray import connections
def get_available_databases(prjxray_root):
""" Return set of available directory to databases given the root directory
of prjxray-db
"""
db_types = set()
for d in os.listdir(prjxray_root):
if d.startswith("."):
continue
dpath = os.path.join(prjxray_root, d)
if os.path.exists(os.path.join(dpath, "settings.sh")):
db_types.add(dpath)
return db_types
class Database(object):
def __init__(self, db_root):
""" Create project x-ray Database at given db_root.
db_root: Path to directory containing settings.sh, *.db, tilegrid.json and
tileconn.json
"""
self.db_root = db_root
self.tilegrid = None
self.tileconn = None
self.tile_types = None
self.tile_types = {}
for f in os.listdir(self.db_root):
if f.endswith('.json') and f.startswith('tile_type_'):
tile_type = f[len('tile_type_'):-len('.json')].lower()
segbits = os.path.join(self.db_root, 'segbits_{}.db'.format(tile_type))
if not os.path.isfile(segbits):
segbits = None
mask = os.path.join(self.db_root, 'mask_{}.db'.format(tile_type))
if not os.path.isfile(mask):
mask = None
tile_type_file = os.path.join(self.db_root, 'tile_type_{}.json'.format(tile_type.upper()))
if not os.path.isfile(tile_type_file):
tile_type_file = None
self.tile_types[tile_type.upper()] = tile.TileDbs(
segbits = segbits,
mask = mask,
tile_type = tile_type_file,
)
def get_tile_types(self):
""" Return list of tile types """
return self.tile_types.keys()
def get_tile_type(self, tile_type):
""" Return Tile object for given tilename. """
return tile.Tile(tile_type, self.tile_types[tile_type])
def _read_tilegrid(self):
""" Read tilegrid database if not already read. """
if not self.tilegrid:
with open(os.path.join(self.db_root, 'tilegrid.json')) as f:
self.tilegrid = json.load(f)
def _read_tileconn(self):
""" Read tileconn database if not already read. """
if not self.tileconn:
with open(os.path.join(self.db_root, 'tileconn.json')) as f:
self.tileconn = json.load(f)
def grid(self):
""" Return Grid object for database. """
self._read_tilegrid()
return grid.Grid(self.tilegrid)
def _read_tile_types(self):
for tile_type, db in self.tile_types.items():
with open(db.tile_type) as f:
self.tile_types[tile_type] = json.load(f)
def connections(self):
self._read_tilegrid()
self._read_tileconn()
self._read_tile_types()
tile_wires = dict((tile_type, db['wires'])
for tile_type, db in self.tile_types.items())
return connections.Connections(self.tilegrid, self.tileconn, tile_wires)

51
prjxray/grid.py Normal file
View File

@ -0,0 +1,51 @@
from collections import namedtuple
GridLoc = namedtuple('GridLoc', 'grid_x grid_y')
GridInfo = namedtuple('GridInfo', 'segment sites tile_type')
class Grid(object):
""" Object that represents grid for a given database.
Provides methods to inspect grid by name or location. Also provides mapping
of segment offsets for particular grid locations and their tile types.
"""
def __init__(self, tilegrid):
self.tilegrid = tilegrid
self.loc = {}
self.tileinfo = {}
for tile in self.tilegrid['tiles']:
tileinfo = self.tilegrid['tiles'][tile]
grid_loc = GridLoc(tileinfo['grid_x'], tileinfo['grid_y'])
self.loc[grid_loc] = tile
self.tileinfo[tile] = GridInfo(
segment = tileinfo['segment'] if 'segment' in tileinfo else None,
sites = tileinfo['sites'],
tile_type = tileinfo['type'])
x, y = zip(*self.loc.keys())
self._dims = (min(x), max(x), min(y), max(y))
def tile_locations(self):
""" Return list of tile locations. """
return self.loc.keys()
def dims(self):
""" Returns (x_min, x_max, y_min, y_max) for given Grid. """
return self._dims
def is_populated(self, grid_loc):
return grid_loc in self.loc
def loc_of_tilename(self, tilename):
tileinfo = self.tilegrid['tiles'][tilename]
return GridLoc(tileinfo['grid_x'], tileinfo['grid_y'])
def tilename_at_loc(self, grid_loc):
return self.loc[grid_loc]
def gridinfo_at_loc(self, grid_loc):
return self.tileinfo[self.loc[grid_loc]]
def gridinfo_at_tilename(self, tilename):
return self.tileinfo[tilename]

142
prjxray/lib.py Normal file
View File

@ -0,0 +1,142 @@
import os.path
import csv
import pickle
import pyjson5 as json5
import progressbar
def read_root_csv(root_dir):
""" Reads root.csv from raw db directory.
This should only be used during database generation.
"""
tiles = {}
nodes = []
with open(os.path.join(root_dir, 'root.csv')) as f:
for d in csv.DictReader(f):
if d['filetype'] == 'tile':
if d['subtype'] not in tiles:
tiles[d['subtype']] = []
tiles[d['subtype']].append(os.path.join(root_dir, d['filename']))
elif d['filetype'] == 'node':
nodes.append(os.path.join(root_dir, d['filename']))
return tiles, nodes
def verify_nodes(raw_nodes, nodes, error_nodes):
""" Compares raw_nodes with generated_nodes and adds errors to error_nodes.
Args:
raw_nodes - Iterable of (node name, iterable of wires in node).
nodes - Iterable of iterable of wires in nodes.
error_nodes - List to be appended to when an error occurs. Elements will
be 3 tuple of raw node name, raw node, and generated node
that did not match.
"""
wire_nodes = {}
for node in nodes:
node_set = set(node)
for wire in node:
wire_nodes[wire] = node_set
for node, raw_node_wires in raw_nodes:
raw_node_set = set(raw_node_wires)
for wire in sorted(raw_node_set):
if wire not in wire_nodes:
if set((wire,)) != raw_node_set:
error_nodes.append((node, tuple(raw_node_set), (wire,)))
elif wire_nodes[wire] != raw_node_set:
error_nodes.append((node, tuple(raw_node_set), tuple(wire_nodes[wire])))
def check_errors(flat_error_nodes, ignored_wires):
""" Check if error_nodes has errors that are not covered in ignored_wires.
Args:
flat_error_nodes - List of error_nodes generated from verify_nodes.
ignored_wires - List of wires that should be ignored if they were generated.
"""
error_nodes = {}
for node, raw_node, generated_nodes in flat_error_nodes:
if node not in error_nodes:
error_nodes[node] = {
'raw_node': set(raw_node),
'generated_nodes': set(),
}
# Make sure all raw nodes are the same.
assert error_nodes[node]['raw_node'] == set(raw_node)
error_nodes[node]['generated_nodes'].add(tuple(sorted(generated_nodes)))
for node, error in error_nodes.items():
combined_generated_nodes = set()
for generated_node in error['generated_nodes']:
combined_generated_nodes |= set(generated_node)
# Make sure there are not extra wires in nodes.
assert error['raw_node'] == combined_generated_nodes, (node, error)
good_node = max(error['generated_nodes'], key=lambda x: len(x))
bad_nodes = error['generated_nodes'] - set((good_node,))
# Max sure only single wires are stranded
assert max(len(generated_node) for generated_node in bad_nodes) == 1
for generate_node in bad_nodes:
for wire in generate_node:
if wire not in ignored_wires:
return False
return True
class NodeLookup(object):
def __init__(self):
self.nodes = {}
def load_from_nodes(self, nodes):
self.nodes = nodes
def load_from_root_csv(self, nodes):
for node in progressbar.progressbar(nodes):
with open(node) as f:
node_wires = json5.load(f)
assert node_wires['node'] not in self.nodes
self.nodes[node_wires['node']] = node_wires['wires']
def load_from_file(self, fname):
with open(fname, 'rb') as f:
self.nodes = pickle.load(f)
def save_to_file(self, fname):
with open(fname, 'wb') as f:
pickle.dump(self.nodes, f)
def site_pin_node_to_wires(self, tile, node):
if node is None:
return
node_wires = self.nodes[node]
for wire in node_wires:
if wire['wire'].startswith(tile + '/'):
yield wire['wire'][len(tile)+1:]
def wires_for_tile(self, tile):
for node in self.nodes.values():
for wire in node:
if wire['wire'].startswith(tile + '/'):
yield wire['wire'][len(tile)+1:]
def compare_prototype_site(proto_a, proto_b):
""" Compare two proto site type.
Will assert if prototypes are not equivalent.
"""
assert proto_a == proto_b, repr((proto_a, proto_b))

85
prjxray/tile.py Normal file
View File

@ -0,0 +1,85 @@
from collections import namedtuple
import json
""" Database files available for a tile """
TileDbs = namedtuple('TileDbs', 'segbits mask tile_type')
Pip = namedtuple('Pip', 'net_to net_from can_invert is_directional is_pseudo')
""" Site - Represents an instance of a site within a tile.
name - Name of site within tile, instance specific.
prefix - Prefix of site naming in Xilinx parlance.
type - What type of slice this instance presents.
pins - Instaces of site pins within this site and tile. This is an tuple of
SitePin tuples, and is specific to this instance of the site within
the tile.
"""
Site = namedtuple('Site', 'name x y type site_pins')
""" SitePin - Tuple representing a site pin within a tile.
Sites are generic based on type, however sites are instanced
within a tile 1 or more times. The SitePin contains both site type generic
information and tile type specific information.
name - Site type specific name. This name is expected to be the same for all
sites of the same type.
direction - Direction of this site pin. This direction is expected to be the
same for all sites of the same type.
wire - Wire name within the tile. This name is site instance specific.
"""
SitePin = namedtuple('SitePin', 'name wire direction')
class Tile(object):
""" Provides abstration of a tile in the database. """
def __init__(self, tilename, tile_dbs):
self.tilename = tilename
self.tilename_upper = self.tilename.upper()
self.tile_dbs = tile_dbs
self.wires = None
self.sites = None
self.pips = None
def yield_sites(sites):
for site in sites:
yield Site(
name = None,
type = site['type'],
x = None,
y = None,
site_pins = site['site_pins'],
)
def yield_pips(pips):
for pip in pips:
yield Pip(
net_to = pip['dst_wire'],
net_from = pip['src_wire'],
can_invert = bool(int(pip['can_invert'])),
is_directional = bool(int(pip['is_directional'])),
is_pseudo = bool(int(pip['is_pseudo'])),
)
with open(self.tile_dbs.tile_type) as f:
tile_type = json.load(f)
assert self.tilename_upper == tile_type['tile_type']
self.wires = tile_type['wires']
self.sites = tuple(yield_sites(tile_type['sites']))
self.pips = tuple(yield_pips(tile_type['pips']))
def get_wires(self):
"""Returns a set of wire names present in this tile."""
return self.wires
def get_sites(self):
""" Returns tuple of Site namedtuple's present in this tile. """
return self.sites
def get_pips(self):
""" Returns tuple of Pip namedtuple's representing the PIPs in this tile.
"""
return self.pips

View File

@ -1,2 +1,4 @@
futures
yapf
pyjson5
progressbar2

30
tools/quick_test.py Normal file
View File

@ -0,0 +1,30 @@
from __future__ import print_function
import prjxray.db
import argparse
def quick_test(db_root):
db = prjxray.db.Database(db_root)
g = db.grid()
# Verify that we have some tile information for every tile in grid.
tile_types_in_grid = set(g.gridinfo_at_loc(loc).tile_type for loc in g.tile_locations())
tile_types_in_db = set(db.get_tile_types())
assert len(tile_types_in_grid - tile_types_in_db) == 0
# Verify that all tile types can be loaded.
for tile_type in db.get_tile_types():
tile = db.get_tile_type(tile_type)
tile.get_wires()
tile.get_sites()
tile.get_pips()
def main():
parser = argparse.ArgumentParser(description="Runs a sanity check on a prjxray database.")
parser.add_argument('--db_root', required=True)
args = parser.parse_args()
quick_test(args.db_root)
if __name__ == '__main__':
main()

View File

@ -0,0 +1,119 @@
from __future__ import print_function
import prjxray.db
import prjxray.lib
import argparse
import datetime
import progressbar
import multiprocessing
import pyjson5 as json5
import json
import sys
def full_wire_name(wire_in_grid):
return '{}/{}'.format(wire_in_grid.tile, wire_in_grid.wire)
def make_connection(wires, connection):
wire_a = full_wire_name(connection.wire_a)
wire_b = full_wire_name(connection.wire_b)
if wire_a not in wires:
wires[wire_a] = set((wire_a,))
if wire_b not in wires:
wires[wire_b] = set((wire_b,))
wire_a_set = wires[wire_a]
wire_b_set = wires[wire_b]
if wire_a_set is wire_b_set:
return
wire_a_set |= wire_b_set
for wire in wire_a_set:
wires[wire] = wire_a_set
def make_connections(db_root):
db = prjxray.db.Database(db_root)
c = db.connections()
wires = {}
for connection in c.get_connections():
make_connection(wires, connection)
nodes = {}
for wire_node in wires.values():
nodes[id(wire_node)] = wire_node
return nodes.values()
def read_json5(fname):
with open(fname, 'r') as f:
return json5.load(f)
def main():
parser = argparse.ArgumentParser(description="Tests database against raw node list.")
parser.add_argument('--db_root', required=True)
parser.add_argument('--raw_node_root', required=True)
parser.add_argument('--error_nodes', default="error_nodes.json")
parser.add_argument('--ignored_wires')
args = parser.parse_args()
processes = min(multiprocessing.cpu_count(), 10)
print('{} Running {} processes'.format(datetime.datetime.now(), processes))
pool = multiprocessing.Pool(processes=processes)
print('{} Reading raw data index'.format(datetime.datetime.now(), processes))
_, nodes = prjxray.lib.read_root_csv(args.raw_node_root)
print('{} Reading raw_node_data'.format(datetime.datetime.now()))
raw_node_data = []
with progressbar.ProgressBar(max_value=len(nodes)) as bar:
for idx, node in enumerate(pool.imap_unordered(
read_json5,
nodes,
chunksize = 20,
)):
bar.update(idx)
raw_node_data.append((node['node'], tuple(wire['wire'] for wire in node['wires'])))
bar.update(idx+1)
print('{} Creating connections'.format(datetime.datetime.now()))
generated_nodes = make_connections(args.db_root)
print('{} Verifying connections'.format(datetime.datetime.now()))
error_nodes = []
prjxray.lib.verify_nodes(raw_node_data, generated_nodes, error_nodes)
if len(error_nodes) > 0:
if args.ignored_wires:
with open(args.ignored_wires, 'r') as f:
ignored_wires = [l.strip() for l in f.readlines()]
print('{} Found {} errors, writing errors to {}'.format(
datetime.datetime.now(),
len(error_nodes),
args.error_nodes,
))
with open(args.error_nodes, 'w') as f:
json.dump(error_nodes, f, indent=2)
if not args.ignored_wires:
sys.exit(1)
if not prjxray.lib.check_errors(error_nodes, ignored_wires):
print('{} Errors were not ignored via ignored_wires {}'.format(
datetime.datetime.now(),
args.ignored_wires,
))
sys.exit(1)
else:
print('{} All errors were via ignored_wires {}'.format(
datetime.datetime.now(),
args.ignored_wires,
))
if __name__ == '__main__':
main()