From 85e14f81a158e1c7d5d751955ce5d37287d0f726 Mon Sep 17 00:00:00 2001 From: Keith Rothman <537074+litghost@users.noreply.github.com> Date: Wed, 19 Sep 2018 14:49:15 -0700 Subject: [PATCH 1/5] Generate tile types, site types, tilegrid, tileconn for entire part. Signed-off-by: Keith Rothman <537074+litghost@users.noreply.github.com> --- .gitignore | 5 + fuzzers/072-ordered_wires/Makefile | 26 + fuzzers/072-ordered_wires/generate.sh | 5 + fuzzers/072-ordered_wires/generate.tcl | 19 + fuzzers/073-get_counts/Makefile | 26 + fuzzers/073-get_counts/generate.sh | 5 + fuzzers/073-get_counts/generate.tcl | 27 + fuzzers/074-dump_all/Makefile | 30 + fuzzers/074-dump_all/analyze_errors.py | 41 ++ fuzzers/074-dump_all/cleanup_site_pins.py | 121 ++++ fuzzers/074-dump_all/create_node_tree.py | 261 ++++++++ fuzzers/074-dump_all/generate.sh | 7 + fuzzers/074-dump_all/generate.tcl | 137 ++++ fuzzers/074-dump_all/generate_after_dump.sh | 13 + fuzzers/074-dump_all/generate_grid.py | 664 +++++++++++++++++++ fuzzers/074-dump_all/generate_ignore_list.py | 33 + fuzzers/074-dump_all/ignored_wires.txt | 110 +++ fuzzers/074-dump_all/reduce_site_types.py | 55 ++ fuzzers/074-dump_all/reduce_tile_types.py | 323 +++++++++ fuzzers/Makefile | 3 + prjxray/__init__.py | 0 prjxray/connections.py | 66 ++ prjxray/db.py | 96 +++ prjxray/grid.py | 51 ++ prjxray/lib.py | 142 ++++ prjxray/tile.py | 85 +++ requirements.txt | 2 + tools/quick_test.py | 30 + tools/verify_tile_connections.py | 119 ++++ 29 files changed, 2502 insertions(+) create mode 100644 fuzzers/072-ordered_wires/Makefile create mode 100644 fuzzers/072-ordered_wires/generate.sh create mode 100644 fuzzers/072-ordered_wires/generate.tcl create mode 100644 fuzzers/073-get_counts/Makefile create mode 100644 fuzzers/073-get_counts/generate.sh create mode 100644 fuzzers/073-get_counts/generate.tcl create mode 100644 fuzzers/074-dump_all/Makefile create mode 100644 fuzzers/074-dump_all/analyze_errors.py create mode 100644 fuzzers/074-dump_all/cleanup_site_pins.py create mode 100644 fuzzers/074-dump_all/create_node_tree.py create mode 100644 fuzzers/074-dump_all/generate.sh create mode 100644 fuzzers/074-dump_all/generate.tcl create mode 100755 fuzzers/074-dump_all/generate_after_dump.sh create mode 100644 fuzzers/074-dump_all/generate_grid.py create mode 100644 fuzzers/074-dump_all/generate_ignore_list.py create mode 100644 fuzzers/074-dump_all/ignored_wires.txt create mode 100644 fuzzers/074-dump_all/reduce_site_types.py create mode 100644 fuzzers/074-dump_all/reduce_tile_types.py create mode 100644 prjxray/__init__.py create mode 100644 prjxray/connections.py create mode 100644 prjxray/db.py create mode 100644 prjxray/grid.py create mode 100644 prjxray/lib.py create mode 100644 prjxray/tile.py create mode 100644 tools/quick_test.py create mode 100644 tools/verify_tile_connections.py diff --git a/.gitignore b/.gitignore index 28f10758..1e7f24db 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,8 @@ # Ignore database directories _except_ for their settings database/*/* !database/*/settings.sh +**/specimen_* +**/output +run.ok +__pycache__ +*.pyc diff --git a/fuzzers/072-ordered_wires/Makefile b/fuzzers/072-ordered_wires/Makefile new file mode 100644 index 00000000..a46dd96e --- /dev/null +++ b/fuzzers/072-ordered_wires/Makefile @@ -0,0 +1,26 @@ + +N := 1 +SPECIMENS := $(addprefix specimen_,$(shell seq -f '%03.0f' $(N))) +SPECIMENS_OK := $(addsuffix /OK,$(SPECIMENS)) + +database: $(SPECIMENS_OK) + true + +pushdb: + true + +$(SPECIMENS_OK): + bash generate.sh $(subst /OK,,$@) + touch $@ + +run: + $(MAKE) clean + $(MAKE) database + $(MAKE) pushdb + touch run.ok + +clean: + rm -rf specimen_[0-9][0-9][0-9]/ run.ok + +.PHONY: database pushdb run clean + diff --git a/fuzzers/072-ordered_wires/generate.sh b/fuzzers/072-ordered_wires/generate.sh new file mode 100644 index 00000000..41f7366f --- /dev/null +++ b/fuzzers/072-ordered_wires/generate.sh @@ -0,0 +1,5 @@ +#!/bin/bash -x + +source ${XRAY_GENHEADER} + +vivado -mode batch -source ../generate.tcl diff --git a/fuzzers/072-ordered_wires/generate.tcl b/fuzzers/072-ordered_wires/generate.tcl new file mode 100644 index 00000000..bcc0e568 --- /dev/null +++ b/fuzzers/072-ordered_wires/generate.tcl @@ -0,0 +1,19 @@ +create_project -force -part $::env(XRAY_PART) design design +set_property design_mode PinPlanning [current_fileset] +open_io_design -name io_1 + +set downhill_fp [open downhill_wires.txt w] +set uphill_fp [open uphill_wires.txt w] +#set_param tcl.collectionResultDisplayLimit 0 +foreach pip [get_pips] { + foreach downhill_node [get_nodes -downhill -of_object $pip] { + set ordered_downhill_wires [get_wires -from $pip -of_object $downhill_node] + puts $downhill_fp "$pip $downhill_node $ordered_downhill_wires" + } + foreach uphill_node [get_nodes -uphill -of_object $pip] { + set ordered_uphill_wires [get_wires -to $pip -of_object $uphill_node] + puts $uphill_fp "$pip $uphill_node $ordered_uphill_wires" + } +} +close $downhill_fp +close $uphill_fp diff --git a/fuzzers/073-get_counts/Makefile b/fuzzers/073-get_counts/Makefile new file mode 100644 index 00000000..da03acc1 --- /dev/null +++ b/fuzzers/073-get_counts/Makefile @@ -0,0 +1,26 @@ + +N := 1 +SPECIMENS := $(addprefix specimen_,$(shell seq -f '%03.0f' $(N))) +SPECIMENS_OK := $(addsuffix /OK,$(SPECIMENS)) + +database: $(SPECIMENS_OK) + true + +pushdb: + cp specimen_001/*.csv ${XRAY_DATABASE_DIR}/$(XRAY_DATABASE)/ + +$(SPECIMENS_OK): + bash generate.sh $(subst /OK,,$@) + touch $@ + +run: + $(MAKE) clean + $(MAKE) database + $(MAKE) pushdb + touch run.ok + +clean: + rm -rf specimen_[0-9][0-9][0-9]/ run.ok + +.PHONY: database pushdb run clean + diff --git a/fuzzers/073-get_counts/generate.sh b/fuzzers/073-get_counts/generate.sh new file mode 100644 index 00000000..41f7366f --- /dev/null +++ b/fuzzers/073-get_counts/generate.sh @@ -0,0 +1,5 @@ +#!/bin/bash -x + +source ${XRAY_GENHEADER} + +vivado -mode batch -source ../generate.tcl diff --git a/fuzzers/073-get_counts/generate.tcl b/fuzzers/073-get_counts/generate.tcl new file mode 100644 index 00000000..881d8eb7 --- /dev/null +++ b/fuzzers/073-get_counts/generate.tcl @@ -0,0 +1,27 @@ +# This script dumps the count of each major object count for sanity checking. +# +# For large parts, this may take a while, hence why it is a seperate generate +# step. + +create_project -force -part $::env(XRAY_PART) design design +set_property design_mode PinPlanning [current_fileset] +open_io_design -name io_1 + +set fp [open element_counts.csv w] +puts $fp "type,count" +puts $fp "tiles,[llength [get_tiles]]" +set sites [get_sites] +set num_site_pins 0 +set num_site_pips 0 +puts $fp "sites,[llength $sites]" +foreach site $sites { + set num_site_pins [expr $num_site_pins + [llength [get_site_pins -of_objects $site]]] + set num_site_pips [expr $num_site_pips + [llength [get_site_pips -of_objects $site]]] +} +puts $fp "site_pins,$num_site_pins" +puts $fp "site_pips,$num_site_pips" +puts $fp "pips,[llength [get_pips]]" +puts $fp "package_pins,[llength [get_package_pins]]" +puts $fp "nodes,[llength [get_nodes]]" +puts $fp "wires,[llength [get_wires]]" +close $fp diff --git a/fuzzers/074-dump_all/Makefile b/fuzzers/074-dump_all/Makefile new file mode 100644 index 00000000..e1a3f60a --- /dev/null +++ b/fuzzers/074-dump_all/Makefile @@ -0,0 +1,30 @@ + +N := 1 +SPECIMENS := $(addprefix specimen_,$(shell seq -f '%03.0f' $(N))) +SPECIMENS_OK := $(addsuffix /OK,$(SPECIMENS)) + +database: $(SPECIMENS_OK) + true + +pushdb: + cp output/tile_type_*.json ${XRAY_DATABASE_DIR}/$(XRAY_DATABASE)/ + rm ${XRAY_DATABASE_DIR}/$(XRAY_DATABASE)/tile_type_*_site_type_*.json + cp output/site_type_*.json ${XRAY_DATABASE_DIR}/$(XRAY_DATABASE)/ + cp output/tileconn.json ${XRAY_DATABASE_DIR}/$(XRAY_DATABASE)/ + cp output/tilegrid.json ${XRAY_DATABASE_DIR}/$(XRAY_DATABASE)/ + +$(SPECIMENS_OK): + bash generate.sh $(subst /OK,,$@) + touch $@ + +run: + $(MAKE) clean + $(MAKE) database + $(MAKE) pushdb + touch run.ok + +clean: + rm -rf specimen_[0-9][0-9][0-9]/ output/ run.ok + +.PHONY: database pushdb run clean + diff --git a/fuzzers/074-dump_all/analyze_errors.py b/fuzzers/074-dump_all/analyze_errors.py new file mode 100644 index 00000000..f145204c --- /dev/null +++ b/fuzzers/074-dump_all/analyze_errors.py @@ -0,0 +1,41 @@ +import json +with open('output/error_nodes.json') as f: + flat_error_nodes = json.load(f) + +error_nodes = {} +for node, raw_node, generated_nodes in flat_error_nodes: + if node not in error_nodes: + error_nodes[node] = { + 'raw_node': set(raw_node), + 'generated_nodes': set(), + } + + + assert error_nodes[node]['raw_node'] == set(raw_node) + error_nodes[node]['generated_nodes'].add(tuple(sorted(generated_nodes))) + +for node, error in error_nodes.items(): + combined_generated_nodes = set() + for generated_node in error['generated_nodes']: + combined_generated_nodes |= set(generated_node) + + assert error['raw_node'] == combined_generated_nodes, (node, error) + + good_node = max(error['generated_nodes'], key=lambda x: len(x)) + bad_nodes = error['generated_nodes'] - set((good_node,)) + + if max(len(generated_node) for generated_node in bad_nodes) > 1: + assert False, node + else: + not_pcie = False + for generated_node in bad_nodes: + for wire in generated_node: + if not wire.startswith('PCIE'): + not_pcie = True + if not_pcie: + #print(node, good_node, map(tuple, bad_nodes)) + print(repr((node, tuple(map(tuple, bad_nodes))))) + pass + else: + #print(repr((node, map(tuple, bad_nodes)))) + pass diff --git a/fuzzers/074-dump_all/cleanup_site_pins.py b/fuzzers/074-dump_all/cleanup_site_pins.py new file mode 100644 index 00000000..bf22869b --- /dev/null +++ b/fuzzers/074-dump_all/cleanup_site_pins.py @@ -0,0 +1,121 @@ +""" Tool to cleanup site pins JSON dumps. + +This tool has two behaviors. This first is to rename site names from global +coordinates to site local coordinates. The second is remove the tile prefix +from node names. + +For example CLBLM_L_X8Y149 contains two sites named SLICE_X10Y149 and +SLICE_X11Y149. SLICE_X10Y149 becomes X0Y0 and SLICE_X11Y149 becomes X1Y0. +""" + +from __future__ import print_function +import json +import json5 +import re +import sys +import copy + +# All site names appear to follow the pattern _XY. +# Generally speaking, only the tile relatively coordinates are required to +# assemble arch defs, so we re-origin the coordinates to be relative to the tile +# (e.g. start at X0Y0) and discard the prefix from the name. +SITE_COORDINATE_PATTERN = re.compile('^(.+)_X([0-9]+)Y([0-9]+)$') + + +def find_origin_coordinate(sites): + """ Find the coordinates of each site within the tile, and then subtract the + smallest coordinate to re-origin them all to be relative to the tile. + """ + + if len(sites) == 0: + return 0, 0 + + def inner_(): + for site in sites: + coordinate = SITE_COORDINATE_PATTERN.match(site['name']) + assert coordinate is not None, site + + x_coord = int(coordinate.group(2)) + y_coord = int(coordinate.group(3)) + yield x_coord, y_coord + + x_coords, y_coords = zip(*inner_()) + min_x_coord = min(x_coords) + min_y_coord = min(y_coords) + + return min_x_coord, min_y_coord + + +def create_site_pin_to_wire_maps(tile_name, nodes): + """ Create a map from site_pin names to nodes. + + Create a mapping from site pins to tile local wires. For each node that is + attached to a site pin, there should only be 1 tile local wire. + + """ + + # Remove tile prefix (e.g. CLBLM_L_X8Y149/) from node names. + # Routing resources will not have the prefix. + tile_prefix = tile_name + '/' + site_pin_to_wires = {} + + for node in nodes: + if len(node['site_pins']) == 0: + continue + + wire_names = [ + wire for wire in node['wires'] if wire.startswith(tile_prefix) + ] + assert len(wire_names) == 1, (node, tile_prefix) + + for site_pin in node["site_pins"]: + assert site_pin not in site_pin_to_wires + site_pin_to_wires[site_pin] = wire_names[0] + + return site_pin_to_wires + + +def main(): + site_pins = json5.load(sys.stdin) + + output_site_pins = {} + output_site_pins["tile_type"] = site_pins["tile_type"] + output_site_pins["sites"] = copy.deepcopy(site_pins["sites"]) + + site_pin_to_wires = create_site_pin_to_wire_maps( + site_pins['tile_name'], site_pins['nodes']) + min_x_coord, min_y_coord = find_origin_coordinate(site_pins['sites']) + + for site in output_site_pins['sites']: + orig_site_name = site['name'] + coordinate = SITE_COORDINATE_PATTERN.match(orig_site_name) + + x_coord = int(coordinate.group(2)) + y_coord = int(coordinate.group(3)) + site['name'] = 'X{}Y{}'.format( + x_coord - min_x_coord, y_coord - min_y_coord) + site['prefix'] = coordinate.group(1) + site['x_coord'] = x_coord - min_x_coord + site['y_coord'] = y_coord - min_y_coord + + for site_pin in site['site_pins']: + assert site_pin['name'].startswith(orig_site_name + '/') + if site_pin['name'] in site_pin_to_wires: + site_pin['wire'] = site_pin_to_wires[site_pin['name']] + else: + print( + ( + '***WARNING***: Site pin {} for tile type {} is not connected, ' + 'make sure all instaces of this tile type has this site_pin ' + 'disconnected.').format( + site_pin['name'], site_pins['tile_type']), + file=sys.stderr) + + site_pin['name'] = site_pin['name'][len(orig_site_name) + 1:] + + json.dump(output_site_pins, sys.stdout, indent=2) + sys.stdout.write('\n') + + +if __name__ == "__main__": + main() diff --git a/fuzzers/074-dump_all/create_node_tree.py b/fuzzers/074-dump_all/create_node_tree.py new file mode 100644 index 00000000..94c3ca72 --- /dev/null +++ b/fuzzers/074-dump_all/create_node_tree.py @@ -0,0 +1,261 @@ +import argparse +import datetime +import progressbar +import json +import os.path +import prjxray.lib +import pickle +import collections + +def build_node_index(fname): + node_index = {} + with open(fname, 'rb') as f: + f.seek(0, 2) + bytes = f.tell() + f.seek(0, 0) + with progressbar.ProgressBar(max_value=bytes) as bar: + end_of_line = 0 + for l in f: + parts = l.decode('utf8').split(' ') + pip, node = parts[0:2] + + if node not in node_index: + node_index[node] = [] + + node_index[node].append(end_of_line) + end_of_line = f.tell() + bar.update(end_of_line) + + return node_index + +def read_node(expected_node, wire_file, node_index): + with open(wire_file, 'rb') as f: + for index in node_index: + f.seek(index, 0) + + parts = f.readline().decode('utf8').strip().split(' ') + + pip, node = parts[0:2] + wires = parts[2:] + + assert node == expected_node, repr((node, expected_node, index)) + + yield wires + +def generate_edges(graph, root, graph_nodes): + """ Starting from root, generate an edge in dir and insert into graph. + + If the tree forks, simply insert a joins to indicate the split. + + """ + edge = [root] + prev_root = None + + while True: + outbound_edges = graph_nodes[root] + outbound_edges -= set((prev_root,)) + if len(outbound_edges) > 1: + graph['edges'].append(edge) + if root not in graph['joins']: + graph['joins'][root] = set() + graph['joins'][root] |= outbound_edges + + for element in graph_nodes[root]: + if element not in graph['joins']: + graph['joins'][element] = set() + graph['joins'][element].add(root) + + break + else: + if len(outbound_edges) == 0: + graph['edges'].append(edge) + break + + next_root = tuple(outbound_edges)[0] + edge.append(next_root) + prev_root, root = root, next_root + +def create_ordered_wires_for_node(node, wires_in_node, downhill, uphill): + if len(wires_in_node) <= 2: + return {'edges': [wires_in_node], 'joins': {}} + + downhill = set(tuple(l) for l in downhill) + uphill = set(tuple(l) for l in uphill) + + roots = set() + all_wires = set() + + for wire in downhill: + if len(wire) > 0: + roots |= set((wire[0], wire[-1])) + all_wires |= set(wire) + + for wire in uphill: + if len(wire) > 0: + roots |= set((wire[0], wire[-1])) + all_wires |= set(wire) + + assert len(wires_in_node) >= len(all_wires) + + if len(all_wires) <= 2: + return {'edges': tuple(all_wires), 'joins': {}} + + graph_nodes = dict((wire, set()) for wire in all_wires) + + for wire in all_wires: + for down in downhill: + try: + idx = down.index(wire) + if idx+1 < len(down): + graph_nodes[wire].add(down[idx+1]) + if idx-1 >= 0: + graph_nodes[wire].add(down[idx-1]) + except ValueError: + continue + + for up in uphill: + try: + idx = up.index(wire) + if idx+1 < len(up): + graph_nodes[wire].add(up[idx+1]) + if idx-1 >= 0: + graph_nodes[wire].add(up[idx-1]) + except ValueError: + continue + + graph = {'edges': [], 'joins': {}} + + while len(roots) > 0: + root = roots.pop() + + if len(graph_nodes[root]) > 0: + generate_edges(graph, root, graph_nodes) + + # Dedup identical edges. + final_edges = set() + + for edge in graph['edges']: + edge1 = tuple(edge) + edge2 = tuple(edge[::-1]) + + if edge1 > edge2: + final_edges.add((edge2, edge1)) + else: + final_edges.add((edge1, edge2)) + + edges = [edge[0] for edge in final_edges] + + element_index = {} + for edge in edges: + for idx, element in enumerate(edge): + if element not in element_index: + element_index[element] = [] + element_index[element].append((idx, edge)) + + new_edges = [] + for edge in edges: + starts = element_index[edge[0]] + ends = element_index[edge[-1]] + + found_any = False + for start in starts: + start_idx, other_edge = start + if other_edge is edge: + continue + + + for end in ends: + if other_edge is not end[1]: + continue + + found_any = True + end_idx, _ = end + # check if the interior elements are the same. + if start_idx > end_idx: + step = -1 + else: + step = 1 + + other_edge_slice = slice(start_idx, end_idx+step if end_idx+step >= 0 else None, step) + if edge != other_edge[other_edge_slice]: + new_edges.append(edge) + + if not found_any: + new_edges.append(edge) + + output = { + 'edges': new_edges, + 'joins': dict((key, tuple(value)) + for key, value in graph['joins'].items()), + 'wires': wires_in_node, + } + + all_wires_in_output = set() + for edge in output['edges']: + all_wires_in_output |= set(edge) + + for element in output['joins']: + all_wires_in_output.add(element) + + return output + +def main(): + parser = argparse.ArgumentParser(description="") + parser.add_argument('--dump_all_root_dir', required=True) + parser.add_argument('--ordered_wires_root_dir', required=True) + parser.add_argument('--output_dir', required=True) + + args = parser.parse_args() + + downhill_wires = os.path.join(args.ordered_wires_root_dir, 'downhill_wires.txt') + uphill_wires = os.path.join(args.ordered_wires_root_dir, 'uphill_wires.txt') + + assert os.path.exists(downhill_wires) + assert os.path.exists(uphill_wires) + + print('{} Reading root.csv'.format(datetime.datetime.now())) + tiles, nodes = prjxray.lib.read_root_csv(args.dump_all_root_dir) + + print('{} Loading node<->wire mapping'.format(datetime.datetime.now())) + node_lookup = prjxray.lib.NodeLookup() + node_lookup_file = os.path.join(args.output_dir, 'nodes.pickle') + if os.path.exists(node_lookup_file): + node_lookup.load_from_file(node_lookup_file) + else: + node_lookup.load_from_root_csv(nodes) + node_lookup.save_to_file(node_lookup_file) + + wire_index_file = os.path.join(args.output_dir, 'wire_index.pickle') + if os.path.exists(wire_index_file): + print('{} Reading wire<->node index'.format(datetime.datetime.now())) + with open(wire_index_file, 'rb') as f: + wire_index = pickle.load(f) + + downhill_wire_node_index = wire_index['downhill'] + uphill_wire_node_index = wire_index['uphill'] + else: + print('{} Creating wire<->node index'.format(datetime.datetime.now())) + downhill_wire_node_index = build_node_index(downhill_wires) + uphill_wire_node_index = build_node_index(uphill_wires) + + with open(wire_index_file, 'wb') as f: + pickle.dump({ + 'downhill': downhill_wire_node_index, + 'uphill': uphill_wire_node_index, + }, f) + + print('{} Creating node tree'.format(datetime.datetime.now())) + nodes = collections.OrderedDict() + for node in progressbar.progressbar(sorted(node_lookup.nodes)): + nodes[node] = create_ordered_wires_for_node( + node, + tuple(wire['wire'] for wire in node_lookup.nodes[node]), + tuple(read_node(node, downhill_wires, downhill_wire_node_index[node] if node in downhill_wire_node_index else [])), + tuple(read_node(node, uphill_wires, uphill_wire_node_index[node] if node in uphill_wire_node_index else []))) + + print('{} Writing node tree'.format(datetime.datetime.now())) + with open(os.path.join(args.output_dir, 'node_tree.json'), 'w') as f: + json.dump(nodes, f, indent=2) + +if __name__ == '__main__': + main() diff --git a/fuzzers/074-dump_all/generate.sh b/fuzzers/074-dump_all/generate.sh new file mode 100644 index 00000000..9dbd8f3c --- /dev/null +++ b/fuzzers/074-dump_all/generate.sh @@ -0,0 +1,7 @@ +#!/bin/bash -x + +source ${XRAY_GENHEADER} + +vivado -mode batch -source ../generate.tcl + +cd .. && ./generate_after_dump.sh diff --git a/fuzzers/074-dump_all/generate.tcl b/fuzzers/074-dump_all/generate.tcl new file mode 100644 index 00000000..3aeaf90f --- /dev/null +++ b/fuzzers/074-dump_all/generate.tcl @@ -0,0 +1,137 @@ +create_project -force -part $::env(XRAY_PART) design design +set_property design_mode PinPlanning [current_fileset] +open_io_design -name io_1 + +set root_fp [open root.csv w] +puts $root_fp "filetype,subtype,filename" +foreach tile [get_tiles] { + set fname tile_$tile.json5 + set tile_type [get_property TYPE $tile] + puts $root_fp "tile,$tile_type,$fname" + + set fp [open $fname w] + puts $fp "\{" + puts $fp "\t\"tile\": \"$tile\"," + # tile properties: + # CLASS COLUMN DEVICE_ID FIRST_SITE_ID GRID_POINT_X GRID_POINT_Y INDEX + # INT_TILE_X INT_TILE_Y IS_CENTER_TILE IS_DCM_TILE IS_GT_CLOCK_SITE_TILE + # IS_GT_SITE_TILE NAME NUM_ARCS NUM_SITES ROW SLR_REGION_ID + # TILE_PATTERN_IDX TILE_TYPE TILE_TYPE_INDEX TILE_X TILE_Y TYPE + puts $fp "\t\"type\": \"$tile_type\"," + puts $fp "\t\"x\": [get_property GRID_POINT_X $tile]," + puts $fp "\t\"y\": [get_property GRID_POINT_Y $tile]," + puts $fp "\t\"sites\": \[" + foreach site [get_sites -of_objects $tile] { + # site properties: + # ALTERNATE_SITE_TYPES CLASS CLOCK_REGION IS_BONDED IS_CLOCK_BUFFER + # IS_CLOCK_PAD IS_GLOBAL_CLOCK_BUFFER IS_GLOBAL_CLOCK_PAD IS_PAD + # IS_REGIONAL_CLOCK_BUFFER IS_REGIONAL_CLOCK_PAD IS_RESERVED IS_TEST + # IS_USED MANUAL_ROUTING NAME NUM_ARCS NUM_BELS NUM_INPUTS NUM_OUTPUTS + # NUM_PINS PRIMITIVE_COUNT PROHIBIT PROHIBIT_FROM_PERSIST RPM_X RPM_Y + # SITE_PIPS SITE_TYPE + + puts $fp "\t\t\{" + puts $fp "\t\t\t\"site\":\"$site\"," + puts $fp "\t\t\t\"type\":\"[get_property SITE_TYPE $site]\"," + puts $fp "\t\t\t\"site_pins\": \[" + foreach site_pin [get_site_pins -of_objects $site] { + # site_pin properties: + # CLASS DIRECTION INDEX INDEX_IN_BUS INDEX_IN_SITE INDEX_IN_TILE IS_BAD + # IS_INPUT IS_OUTPUT IS_PART_OF_BUS IS_TEST IS_USED NAME SITE_ID + # SPEED_INDEX + puts $fp "\t\t\t\{" + puts $fp "\t\t\t\t\"site_pin\":\"$site_pin\"," + puts $fp "\t\t\t\t\"index_in_site\":\"[get_property INDEX_IN_SITE $site_pin]\"," + puts $fp "\t\t\t\t\"direction\":\"[get_property DIRECTION $site_pin]\"," + set site_pin_node [get_nodes -of_objects $site_pin] + if {[llength $site_pin_node] == 0} { + puts $fp "\t\t\t\t\"node\":null," + } else { + puts $fp "\t\t\t\t\"node\":\"$site_pin_node\"," + } + puts $fp "\t\t\t\}," + } + puts $fp "\t\t\t\]," + puts $fp "\t\t\t\"site_pips\": \[" + foreach site_pip [get_site_pips -of_objects $site] { + puts $fp "\t\t\t\{" + # site_pips properties: + # CLASS FROM_PIN IS_FIXED IS_USED NAME SITE TO_PIN + puts $fp "\t\t\t\t\"site_pip\":\"$site_pip\"," + puts $fp "\t\t\t\t\"to_pin\":\"[get_property TO_PIN $site_pip]\"," + puts $fp "\t\t\t\t\"from_pin\":\"[get_property FROM_PIN $site_pip]\"," + puts $fp "\t\t\t\}," + } + puts $fp "\t\t\t\]," + + puts $fp "\t\t\t\"package_pins\": \[" + foreach package_pin [get_package_pins -of_objects $site] { + puts $fp "\t\t\t\t\{" + puts $fp "\t\t\t\t\t\"package_pin\":\"$package_pin\"," + puts $fp "\t\t\t\t\}," + } + puts $fp "\t\t\t\]," + + puts $fp "\t\t\}," + } + puts $fp "\t\]," + puts $fp "\t\"pips\": \[" + foreach pip [get_pips -of_objects $tile] { + # pip properties: + # CAN_INVERT CLASS IS_BUFFERED_2_0 IS_BUFFERED_2_1 IS_DIRECTIONAL + # IS_EXCLUDED_PIP IS_FIXED_INVERSION IS_INVERTED IS_PSEUDO IS_SITE_PIP + # IS_TEST_PIP NAME SPEED_INDEX TILE + puts $fp "\t\t\{" + puts $fp "\t\t\t\"pip\":\"$pip\"," + puts $fp "\t\t\t\"src_wire\":\"[get_wires -uphill -of_objects $pip]\"," + puts $fp "\t\t\t\"dst_wire\":\"[get_wires -downhill -of_objects $pip]\"," + puts $fp "\t\t\t\"is_pseudo\":\"[get_property IS_PSEUDO $pip]\"," + puts $fp "\t\t\t\"is_directional\":\"[get_property IS_DIRECTIONAL $pip]\"," + puts $fp "\t\t\t\"can_invert\":\"[get_property CAN_INVERT $pip]\"," + puts $fp "\t\t\}," + } + puts $fp "\t\]," + + puts $fp "\t\"wires\": \[" + foreach wire [get_wires -of_objects $tile] { + # wire properties: + # CLASS COST_CODE ID_IN_TILE_TYPE IS_CONNECTED IS_INPUT_PIN IS_OUTPUT_PIN + # IS_PART_OF_BUS NAME NUM_DOWNHILL_PIPS NUM_INTERSECTS NUM_PIPS + # NUM_TILE_PORTS NUM_UPHILL_PIPS SPEED_INDEX TILE_NAME TILE_PATTERN_OFFSET + puts $fp "\t\t\{" + puts $fp "\t\t\t\"wire\":\"$wire\"," + puts $fp "\t\t\}," + } + puts $fp "\t\]," + puts $fp "\}" + close $fp +} + +foreach node [get_nodes] { + file mkdir [file dirname $node] + set fname $node.json5 + puts $root_fp "node,,$fname" + + set fp [open $fname w] + # node properties: + # BASE_CLOCK_REGION CLASS COST_CODE COST_CODE_NAME IS_BAD IS_COMPLETE + # IS_GND IS_INPUT_PIN IS_OUTPUT_PIN IS_PIN IS_VCC NAME NUM_WIRES PIN_WIRE + # SPEED_CLASS + puts $fp "\{" + puts $fp "\t\"node\": \"$node\"," + puts $fp "\t\"wires\": \[" + foreach wire [get_wires -of_objects $node] { + # wire properties: + # CLASS COST_CODE ID_IN_TILE_TYPE IS_CONNECTED IS_INPUT_PIN IS_OUTPUT_PIN + # IS_PART_OF_BUS NAME NUM_DOWNHILL_PIPS NUM_INTERSECTS NUM_PIPS + # NUM_TILE_PORTS NUM_UPHILL_PIPS SPEED_INDEX TILE_NAME TILE_PATTERN_OFFSET + puts $fp "\t\t\{" + puts $fp "\t\t\t\"wire\":\"$wire\"," + puts $fp "\t\t\}," + } + puts $fp "\t\]" + puts $fp "\}" + close $fp +} + +close $root_fp diff --git a/fuzzers/074-dump_all/generate_after_dump.sh b/fuzzers/074-dump_all/generate_after_dump.sh new file mode 100755 index 00000000..a3226f52 --- /dev/null +++ b/fuzzers/074-dump_all/generate_after_dump.sh @@ -0,0 +1,13 @@ +#!/bin/bash -xe + +rm -r output +mkdir -p output +python3 reduce_tile_types.py \ + --root_dir specimen_001/ \ + --output_dir output +python3 create_node_tree.py \ + --dump_all_root_dir specimen_001/ \ + --ordered_wires_root_dir ../072-ordered_wires/specimen_001/ \ + --output_dir output +python3 reduce_site_types.py --output_dir output +python3 generate_grid.py --root_dir specimen_001/ --output_dir output diff --git a/fuzzers/074-dump_all/generate_grid.py b/fuzzers/074-dump_all/generate_grid.py new file mode 100644 index 00000000..b1e6d717 --- /dev/null +++ b/fuzzers/074-dump_all/generate_grid.py @@ -0,0 +1,664 @@ +""" Generate grid from database dump """ + +from __future__ import print_function +import argparse +import prjxray.lib +import pyjson5 as json5 +import multiprocessing +import progressbar +import os.path +import json +import datetime +import pickle +import inspect +import sys + +def get_tile_grid_info(fname): + with open(fname, 'r') as f: + tile = json5.load(f) + + return { + tile['tile']: { + 'type': tile['type'], + 'grid_x': tile['x'], + 'grid_y': tile['y'], + 'sites': dict( + (site['site'], site['type']) for site in tile['sites'] + ), + 'wires': set( + wire['wire'] for wire in tile['wires'], + ) + }, + } + +def read_json5(fname): + with open(fname, 'r') as f: + return json5.load(f) + +def generate_tilesizes(grid): + """ ***BROKEN DO NOT USE*** """ + assert False + + tilesizes = {} + tiles = grid['tiles'] + coord_to_tile = create_coord_to_tile(tiles) + + for tile in grid['tiles']: + tilesizes[grid['tiles'][tile]['type']] = { + 'grid_x_size': 1, + 'grid_y_size': None, + } + + x, y = zip(*coord_to_tile.keys()) + min_x = min(x) + max_x = max(x) + min_y = min(y) + max_y = max(y) + + for x in range(min_x, max_x+1): + tiles_slice = [(y, tiles[coord_to_tile[(x, y)]]['type']) for y in range(min_y, max_y+1) if tiles[coord_to_tile[(x, y)]]['type'] != 'NULL'] + + for (y1, tile_type), (y2, _) in zip(tiles_slice[::-1], tiles_slice[-2::-1]): + grid_y_size = y1-y2 + if tilesizes[tile_type]['grid_y_size'] is None: + tilesizes[tile_type]['grid_y_size'] = grid_y_size + else: + tilesizes[tile_type]['grid_y_size'] = min(tilesizes[tile_type]['grid_y_size'], grid_y_size) + + for tile_type in tilesizes: + if tilesizes[tile_type]['grid_y_size'] is None: + tilesizes[tile_type]['grid_y_size'] = 1 + + return tilesizes + +def is_edge_shared(edge1, edge2): + """ Returns true if edge1 or edge2 overlap + + >>> is_edge_shared((0, 1), (0, 1)) + True + >>> is_edge_shared((0, 2), (0, 1)) + True + >>> is_edge_shared((0, 1), (0, 2)) + True + >>> is_edge_shared((1, 2), (0, 3)) + True + >>> is_edge_shared((0, 3), (1, 2)) + True + >>> is_edge_shared((1, 2), (0, 2)) + True + >>> is_edge_shared((0, 2), (1, 2)) + True + >>> is_edge_shared((0, 2), (1, 3)) + True + >>> is_edge_shared((1, 3), (0, 2)) + True + >>> is_edge_shared((0, 1), (1, 2)) + False + >>> is_edge_shared((1, 2), (0, 1)) + False + >>> is_edge_shared((0, 1), (2, 3)) + False + >>> is_edge_shared((2, 3), (0, 1)) + False + """ + assert edge1[0] < edge1[1], edge1 + assert edge2[0] < edge2[1], edge2 + + if edge1[0] <= edge2[0]: + return edge2[0] < edge1[1] + else: + return edge1[0] < edge2[1] + +def share_edge(a, b): + """ Returns true if box defined by a and b share any edge. + + Box is defined as (x-min, y-min, x-max, y-max). + + >>> share_edge((0, 0, 1, 1), (1, 0, 2, 1)) + True + >>> share_edge((1, 0, 2, 1), (0, 0, 1, 1)) + True + >>> share_edge((0, 0, 1, 1), (0, 1, 1, 2)) + True + >>> share_edge((0, 1, 1, 2), (0, 0, 1, 1)) + True + >>> share_edge((0, 0, 1, 3), (1, 0, 2, 1)) + True + >>> share_edge((1, 0, 2, 1), (0, 0, 1, 3)) + True + >>> share_edge((0, 0, 3, 1), (0, 1, 1, 2)) + True + >>> share_edge((0, 1, 1, 2), (0, 0, 3, 1)) + True + >>> share_edge((0, 0, 1, 1), (1, 1, 2, 2)) + False + >>> share_edge((1, 1, 2, 2), (0, 0, 1, 1)) + False + >>> share_edge((0, 0, 1, 3), (1, 3, 2, 4)) + False + >>> share_edge((0, 0, 1, 3), (1, 2, 2, 4)) + True + """ + + a_x_min, a_y_min, a_x_max, a_y_max = a + b_x_min, b_y_min, b_x_max, b_y_max = b + + if a_x_min == b_x_max or a_x_max == b_x_min: + return is_edge_shared((a_y_min, a_y_max), (b_y_min, b_y_max)) + if a_y_min == b_y_max or a_y_max == b_y_min: + return is_edge_shared((a_x_min, a_x_max), (b_x_min, b_x_max)) + +def next_wire_in_dimension(wire1, tile1, wire2, tile2, tiles, x_wires, y_wires, wire_map, wires_in_node): + """ next_wire_in_dimension returns true if tile1 and tile2 are in the same + row and column, and must be adjcent. + """ + tile1_info = tiles[tile1] + tile2_info = tiles[tile2] + + tile1_x = tile1_info['grid_x'] + tile2_x = tile2_info['grid_x'] + tile1_y = tile1_info['grid_y'] + tile2_y = tile2_info['grid_y'] + + # All wires are in the same row or column or if the each wire lies in its own + # row or column. + if len(y_wires) == 1 or len(x_wires) == len(wires_in_node) or abs(tile1_y-tile2_y) == 0: + ordered_wires = sorted(x_wires.keys()) + + idx1 = ordered_wires.index(tile1_x) + idx2 = ordered_wires.index(tile2_x) + + if len(x_wires[tile1_x]) == 1 and len(x_wires[tile2_x]) == 1: + return abs(idx1-idx2) == 1 + + if len(x_wires) == 1 or len(y_wires) == len(wires_in_node) or abs(tile1_x-tile2_x) == 0: + ordered_wires = sorted(y_wires.keys()) + + idx1 = ordered_wires.index(tile1_y) + idx2 = ordered_wires.index(tile2_y) + + if len(y_wires[tile1_y]) == 1 and len(y_wires[tile2_y]) == 1: + return abs(idx1-idx2) == 1 + + return None + +def only_wire(tile1, tile2, tiles, x_wires, y_wires): + """ only_wire returns true if tile1 and tile2 only have 1 wire in their respective x or y dimension. + """ + tile1_info = tiles[tile1] + tile2_info = tiles[tile2] + + tile1_x = tile1_info['grid_x'] + tile2_x = tile2_info['grid_x'] + + tiles_x_adjacent = abs(tile1_x-tile2_x) == 1 + if tiles_x_adjacent and len(x_wires[tile1_x]) == 1 and len(x_wires[tile2_x]) == 1: + return True + + tile1_y = tile1_info['grid_y'] + tile2_y = tile2_info['grid_y'] + + tiles_y_adjacent = abs(tile1_y-tile2_y) == 1 + if tiles_y_adjacent and len(y_wires[tile1_y]) == 1 and len(y_wires[tile2_y]) == 1: + return True + + return None + +def is_directly_connected(node, node_tree, wire1, wire2): + if 'wires' in node_tree: + node_tree_wires = node_tree['wires'] + else: + if len(node_tree['edges']) == 1 and len(node_tree['joins']) == 0: + node_tree_wires = node_tree['edges'][0] + else: + return None + + if wire1 not in node_tree_wires: + return None + if wire2 not in node_tree_wires: + return None + + # Is there than edge that has wire1 next to wire2? + for edge in node_tree['edges']: + idx1 = None + idx2 = None + try: + idx1 = edge.index(wire1) + except ValueError: + pass + + try: + idx2 = edge.index(wire2) + except ValueError: + pass + + if idx1 is not None and idx2 is not None: + return abs(idx1 - idx2) == 1 + + if idx1 is not None and (idx1 != 0 and idx1 != len(edge)-1): + return False + + if idx2 is not None and (idx2 != 0 and idx2 != len(edge)-1): + return False + + # Is there a join of nodes between wire1 and wire2? + if wire1 in node_tree['joins']: + return wire2 in node_tree['joins'][wire1] + + if wire2 in node_tree['joins']: + assert wire1 not in node_tree['joins'][wire2] + + return None + +def is_connected(wire1, tile1, wire2, tile2, node, wires_in_tiles, wire_map, node_tree, tiles, x_wires, y_wires, wires_in_node): + """ Check if two wires are directly connected. """ + + next_wire_in_dim = next_wire_in_dimension(wire1, tile1, wire2, tile2, tiles, + x_wires, y_wires, + wire_map, wires_in_node) + if next_wire_in_dim is not None: + return next_wire_in_dim + + # Because there are multiple possible wire connections between these two + # tiles, consult the node_tree to determine if the two wires are actually connected. + # + # Warning: The node_tree is incomplete because it is not know how to extract + # ordered wire information from the node. + # + # Example node CLK_BUFG_REBUF_X60Y142/CLK_BUFG_REBUF_R_CK_GCLK0_BOT + # It does not appear to be possible to get ordered wire connection information + # for the first two wires connected to PIP + # CLK_BUFG_REBUF_X60Y117/CLK_BUFG_REBUF.CLK_BUFG_REBUF_R_CK_GCLK0_BOT<<->>CLK_BUFG_REBUF_R_CK_GCLK0_TOP + # + # However, it happens to be that theses wires are the only wires in their + # tiles, so the earlier "only wires in tile" check will pass. + + connected = is_directly_connected(node['node'], node_tree[node['node']], wire1, wire2) + if connected is not None: + return connected + + is_only_wire = only_wire(tile1, tile2, tiles, x_wires, y_wires) + if is_only_wire is not None: + return is_only_wire + + # The node_tree didn't specify these wires, and the wires are not + # unambiguously connected. + return False + +def process_node(tileconn, key_history, node, wire_map, node_tree, tiles): + wires = [wire['wire'] for wire in node['wires']] + + wires_in_tiles = {} + x_wires = {} + y_wires = {} + for wire in wires: + wire_info = wire_map[wire] + + if wire_info['tile'] not in wires_in_tiles: + wires_in_tiles[wire_info['tile']] = [] + wires_in_tiles[wire_info['tile']].append(wire) + + + grid_x = tiles[wire_info['tile']]['grid_x'] + if grid_x not in x_wires: + x_wires[grid_x] = [] + x_wires[grid_x].append(wire) + + grid_y = tiles[wire_info['tile']]['grid_y'] + if grid_y not in y_wires: + y_wires[grid_y] = [] + y_wires[grid_y].append(wire) + + if len(wires) == 2: + wire1 = wires[0] + wire_info1 = wire_map[wire1] + wire2 = wires[1] + wire_info2 = wire_map[wire2] + update_tile_conn(tileconn, key_history, wire1, wire_info1, wire2, wire_info2, tiles) + return + + for idx, wire1 in enumerate(wires): + wire_info1 = wire_map[wire1] + for wire2 in wires[idx+1:]: + wire_info2 = wire_map[wire2] + + if not is_connected( + wire1, wire_info1['tile'], + wire2, wire_info2['tile'], + node, wires_in_tiles, wire_map, node_tree, tiles, x_wires, y_wires, wires): + continue + + update_tile_conn(tileconn, key_history, wire1, wire_info1, wire2, wire_info2, tiles) + +def update_tile_conn(tileconn, key_history, wirename1, wire1, wirename2, wire2, tiles): + # Ensure that (wire1, wire2) is sorted, so we can easy check if a connection + # already exists. + + tile1 = tiles[wire1['tile']] + tile2 = tiles[wire2['tile']] + if ( + (wire1['type'], wire1['shortname'], tile1['grid_x'], tile1['grid_y']) > + (wire2['type'], wire2['shortname'], tile2['grid_x'], tile2['grid_y']) + ): + wire1, tile1, wire2, tile2 = wire2, tile2, wire1, tile1 + + tileconn.append({ + "grid_deltas": [ + tile2['grid_x'] - tile1['grid_x'], + tile2['grid_y'] - tile1['grid_y'], + ], + "tile_types": [ + tile1['type'], + tile2['type'], + ], + "wire_pair": [ + wire1['shortname'], + wire2['shortname'], + ], + }) + +def flatten_tile_conn(tileconn): + """ Convert tileconn that is key'd to identify specific wire pairs between tiles + key (tile1_type, wire1_name, tile2_type, wire2_name) to flat tile connect list + that relates tile types and relative coordinates and a full list of wires to + connect. """ + flat_tileconn = {} + + for conn in tileconn: + key = (tuple(conn['tile_types']), tuple(conn['grid_deltas'])) + + if key not in flat_tileconn: + flat_tileconn[key] = { + 'tile_types': conn['tile_types'], + 'grid_deltas': conn['grid_deltas'], + 'wire_pairs': set() + } + + flat_tileconn[key]['wire_pairs'].add(tuple(conn['wire_pair'])) + + def inner(): + for output in flat_tileconn.values(): + yield { + 'tile_types': output['tile_types'], + 'grid_deltas': output['grid_deltas'], + 'wire_pairs': tuple(output['wire_pairs']), + } + + return tuple(inner()) + +def is_tile_type(tiles, coord_to_tile, coord, tile_type): + if coord not in coord_to_tile: + return False + + target_tile = tiles[coord_to_tile[coord]] + return target_tile['type'] == tile_type + +def get_connections(wire, wire_info, conn, idx, coord_to_tile, tiles): + """ Yields (tile_coord, wire) for each wire that should be connected to specified wire. """ + pair = conn['wire_pairs'][idx] + wire_tile_type = wire_info['type'] + tile_types = conn['tile_types'] + shortname = wire_info['shortname'] + grid_deltas = conn['grid_deltas'] + + wire1 = tile_types[0] == wire_tile_type and shortname == pair[0] + wire2 = tile_types[1] == wire_tile_type and shortname == pair[1] + assert wire1 or wire2, (wire, conn) + + tile_of_wire = wire_info['tile'] + start_coord_x = tiles[tile_of_wire]['grid_x'] + start_coord_y = tiles[tile_of_wire]['grid_y'] + if wire1: + target_coord_x = start_coord_x + grid_deltas[0] + target_coord_y = start_coord_y + grid_deltas[1] + target_tile_type = tile_types[1] + + target_wire = pair[1] + target_tile = (target_coord_x, target_coord_y) + + if is_tile_type(tiles, coord_to_tile, target_tile, target_tile_type): + yield target_tile, target_wire + + if wire2: + target_coord_x = start_coord_x - grid_deltas[0] + target_coord_y = start_coord_y - grid_deltas[1] + target_tile_type = tile_types[0] + + target_wire = pair[0] + target_tile = (target_coord_x, target_coord_y) + + if is_tile_type(tiles, coord_to_tile, target_tile, target_tile_type): + yield target_tile, target_wire + +def make_connection(wire_nodes, wire1, wire2): + if wire_nodes[wire1] is wire_nodes[wire2]: + assert wire1 in wire_nodes[wire1] + assert wire2 in wire_nodes[wire2] + return + + new_node = wire_nodes[wire1] | wire_nodes[wire2] + + for wire in new_node: + wire_nodes[wire] = new_node + +def create_coord_to_tile(tiles): + coord_to_tile = {} + for tile, tileinfo in tiles.items(): + coord_to_tile[(tileinfo['grid_x'], tileinfo['grid_y'])] = tile + + return coord_to_tile + +def connect_wires(tiles, tileconn, wire_map): + """ Connect individual wires into groups of wires called nodes. """ + + # Initialize all nodes to originally only contain the wire by itself. + wire_nodes = {} + for wire in wire_map: + wire_nodes[wire] = set([wire]) + + wire_connection_map = {} + for conn in tileconn: + for idx, (wire1, wire2) in enumerate(conn['wire_pairs']): + key1 = (conn['tile_types'][0], wire1) + if key1 not in wire_connection_map: + wire_connection_map[key1] = [] + wire_connection_map[key1].append((conn, idx)) + + key2 = (conn['tile_types'][1], wire2) + if key2 not in wire_connection_map: + wire_connection_map[key2] = [] + wire_connection_map[key2].append((conn, idx)) + + coord_to_tile = create_coord_to_tile(tiles) + + for wire, wire_info in progressbar.progressbar(wire_map.items()): + key = (wire_info['type'], wire_info['shortname']) + if key not in wire_connection_map: + continue + + for conn, idx in wire_connection_map[key]: + for target_tile, target_wire in get_connections(wire, wire_info, conn, idx, coord_to_tile, tiles): + + full_wire_name = coord_to_tile[target_tile] + '/' + target_wire + assert wire_map[full_wire_name]['shortname'] == target_wire, ( + target_tile, target_wire, wire, conn + ) + assert wire_map[full_wire_name]['tile'] == coord_to_tile[target_tile], ( + wire_map[full_wire_name]['tile'], coord_to_tile[target_tile] + ) + + make_connection(wire_nodes, wire, full_wire_name) + + # Find unique nodes + nodes = {} + for node in wire_nodes.values(): + nodes[id(node)] = node + + # Flatten to list of lists. + return tuple(tuple(node) for node in nodes.values()) + + +def generate_tilegrid(pool, tiles): + wire_map = {} + + grid = { + 'segments': {}, + 'tiles': {}, + } + + num_tiles = 0 + for tile_type in tiles: + num_tiles += len(tiles[tile_type]) + + idx = 0 + with progressbar.ProgressBar(max_value=num_tiles) as bar: + for tile_type in tiles: + for tile in pool.imap_unordered( + get_tile_grid_info, + tiles[tile_type], + chunksize = 20, + ): + bar.update(idx) + + assert len(tile) == 1, tile + tilename = tuple(tile.keys())[0] + + for wire in tile[tilename]['wires']: + assert wire not in wire_map, (wire, wire_map) + assert wire.startswith(tilename + '/'), (wire, tilename) + + wire_map[wire] = { + 'tile': tilename, + 'type': tile[tilename]['type'], + 'shortname': wire[len(tilename)+1:], + } + + del tile[tilename]['wires'] + grid['tiles'].update(tile) + + idx += 1 + bar.update(idx) + + return grid, wire_map + +def generate_tileconn(pool, node_tree, nodes, wire_map, grid): + tileconn = [] + key_history = {} + raw_node_data = [] + with progressbar.ProgressBar(max_value=len(nodes)) as bar: + for idx, node in enumerate(pool.imap_unordered( + read_json5, + nodes, + chunksize = 20, + )): + bar.update(idx) + raw_node_data.append(node) + process_node(tileconn, key_history, node, wire_map, node_tree, grid['tiles']) + bar.update(idx+1) + + tileconn = flatten_tile_conn(tileconn) + + return tileconn, raw_node_data + +def main(): + parser = argparse.ArgumentParser(description="Reduces raw database dump into prototype tiles, grid, and connections.") + parser.add_argument('--root_dir', required=True) + parser.add_argument('--output_dir', required=True) + parser.add_argument('--verify_only', action='store_true') + + args = parser.parse_args() + + tiles, nodes = prjxray.lib.read_root_csv(args.root_dir) + + processes = min(multiprocessing.cpu_count(), 10) + print('{} Running {} processes'.format(datetime.datetime.now(), processes)) + pool = multiprocessing.Pool(processes=processes) + + node_tree_file = os.path.join(args.output_dir, 'node_tree.json') + + tilegrid_file = os.path.join(args.output_dir, 'tilegrid.json') + tileconn_file = os.path.join(args.output_dir, 'tileconn.json') + wire_map_file = os.path.join(args.output_dir, 'wiremap.pickle') + + if not args.verify_only: + print('{} Creating tile map'.format(datetime.datetime.now())) + grid, wire_map = generate_tilegrid(pool, tiles) + + with open(tilegrid_file, 'w') as f: + json.dump(grid, f, indent=2) + + with open(wire_map_file, 'wb') as f: + pickle.dump(wire_map, f) + + print('{} Reading node tree'.format(datetime.datetime.now())) + with open(node_tree_file) as f: + node_tree = json.load(f) + + print('{} Creating tile connections'.format(datetime.datetime.now())) + tileconn, raw_node_data = generate_tileconn(pool, node_tree, nodes, wire_map, grid) + + print('{} Writing tileconn'.format(datetime.datetime.now())) + with open(tileconn_file, 'w') as f: + json.dump(tileconn, f, indent=2) + else: + print('{} Reading tilegrid'.format(datetime.datetime.now())) + with open(tilegrid_file) as f: + grid = json.load(f) + + with open(wire_map_file, 'rb') as f: + wire_map = pickle.load(f) + + print('{} Reading raw_node_data'.format(datetime.datetime.now())) + raw_node_data = [] + with progressbar.ProgressBar(max_value=len(nodes)) as bar: + for idx, node in enumerate(pool.imap_unordered( + read_json5, + nodes, + chunksize = 20, + )): + bar.update(idx) + raw_node_data.append(node) + bar.update(idx+1) + + print('{} Reading tileconn'.format(datetime.datetime.now())) + with open(tileconn_file) as f: + tileconn = json.load(f) + + wire_nodes_file = os.path.join(args.output_dir, 'wire_nodes.pickle') + if os.path.exists(wire_nodes_file) and args.verify_only: + with open(wire_nodes_file, 'rb') as f: + wire_nodes = pickle.load(f) + else: + print("{} Connecting wires to verify tileconn".format(datetime.datetime.now())) + wire_nodes = connect_wires(grid['tiles'], tileconn, wire_map) + with open(wire_nodes_file, 'wb') as f: + pickle.dump(wire_nodes, f) + + print('{} Verifing tileconn'.format(datetime.datetime.now())) + error_nodes = [] + prjxray.lib.verify_nodes([ + (node['node'], tuple(wire['wire'] for wire in node['wires'])) + for node in raw_node_data + ], wire_nodes, error_nodes) + + if len(error_nodes) > 0: + error_nodes_file = os.path.join(args.output_dir, 'error_nodes.json') + with open(error_nodes_file, 'w') as f: + json.dump(error_nodes, f, indent=2) + + ignored_wires = [] + path_to_file = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + ignored_wires_file = os.path.join(path_to_file, 'ignored_wires.txt') + if os.path.exists(ignored_wires_file): + with open(ignored_wires_file) as f: + ignored_wires = set(l.strip() for l in f) + + if not prjxray.lib.check_errors(error_nodes, ignored_wires): + print('{} errors detected, see {} for details.'.format(len(error_nodes), error_nodes_file)) + sys.exit(1) + else: + print('{} errors ignored because of {}\nSee {} for details.'.format( + len(error_nodes), ignored_wires_file, error_nodes_file)) + +if __name__ == '__main__': + main() diff --git a/fuzzers/074-dump_all/generate_ignore_list.py b/fuzzers/074-dump_all/generate_ignore_list.py new file mode 100644 index 00000000..30b837d9 --- /dev/null +++ b/fuzzers/074-dump_all/generate_ignore_list.py @@ -0,0 +1,33 @@ +import json + +with open('output/error_nodes.json') as f: + flat_error_nodes = json.load(f) + +error_nodes = {} +for node, raw_node, generated_nodes in flat_error_nodes: + if node not in error_nodes: + error_nodes[node] = { + 'raw_node': set(raw_node), + 'generated_nodes': set(), + } + + + assert error_nodes[node]['raw_node'] == set(raw_node) + error_nodes[node]['generated_nodes'].add(tuple(sorted(generated_nodes))) + +for node, error in error_nodes.items(): + combined_generated_nodes = set() + for generated_node in error['generated_nodes']: + combined_generated_nodes |= set(generated_node) + + assert error['raw_node'] == combined_generated_nodes, (node, error) + + good_node = max(error['generated_nodes'], key=lambda x: len(x)) + bad_nodes = error['generated_nodes'] - set((good_node,)) + + if max(len(generated_node) for generated_node in bad_nodes) > 1: + assert False, node + else: + for generated_node in bad_nodes: + for wire in generated_node: + print(wire) diff --git a/fuzzers/074-dump_all/ignored_wires.txt b/fuzzers/074-dump_all/ignored_wires.txt new file mode 100644 index 00000000..533e5072 --- /dev/null +++ b/fuzzers/074-dump_all/ignored_wires.txt @@ -0,0 +1,110 @@ +LIOI3_X0Y141/LIOI_I2GCLK_TOP1 +CMT_TOP_R_UPPER_B_X8Y135/CMT_PHASER_UP_DQS_TO_PHASER_D +LIOI3_X0Y145/LIOI_I2GCLK_BOT1 +LIOI3_TBYTESRC_X0Y19/LIOI_I2GCLK_TOP1 +RIOI3_TBYTESRC_X43Y7/RIOI_I2GCLK_BOT1 +LIOI3_TBYTESRC_X0Y43/LIOI_I2GCLK_TOP1 +LIOI3_TBYTESRC_X0Y7/LIOI_I2GCLK_BOT1 +LIOI3_TBYTETERM_X0Y13/LIOI_I2GCLK_TOP1 +RIOI3_X43Y29/RIOI_I2GCLK_TOP1 +RIOI3_X43Y33/RIOI_I2GCLK_BOT1 +LIOI3_X0Y33/LIOI_I2GCLK_BOT1 +LIOI3_X0Y29/LIOI_I2GCLK_TOP1 +LIOI3_TBYTESRC_X0Y31/LIOI_I2GCLK_TOP1 +RIOI3_TBYTESRC_X43Y19/RIOI_I2GCLK_TOP1 +LIOI3_TBYTESRC_X0Y43/LIOI_I2GCLK_BOT1 +LIOI3_TBYTETERM_X0Y63/LIOI_I2GCLK_TOP1 +RIOI3_TBYTETERM_X43Y37/RIOI_I2GCLK_TOP1 +LIOI3_TBYTETERM_X0Y113/LIOI_I2GCLK_TOP1 +LIOI3_TBYTESRC_X0Y69/LIOI_I2GCLK_TOP1 +RIOI3_X43Y17/RIOI_I2GCLK_TOP1 +RIOI3_X43Y21/RIOI_I2GCLK_BOT1 +LIOI3_TBYTESRC_X0Y57/LIOI_I2GCLK_BOT1 +RIOI3_TBYTETERM_X43Y13/RIOI_I2GCLK_TOP1 +LIOI3_TBYTETERM_X0Y37/LIOI_I2GCLK_TOP1 +LIOI3_X0Y9/LIOI_I2GCLK_BOT1 +CMT_TOP_R_LOWER_T_X8Y18/CMT_PHASER_DOWN_DQS_TO_PHASER_A +LIOI3_X0Y5/LIOI_I2GCLK_TOP1 +RIOI3_TBYTESRC_X43Y57/RIOI_I2GCLK_BOT1 +RIOI3_TBYTESRC_X43Y31/RIOI_I2GCLK_BOT1 +LIOI3_TBYTETERM_X0Y87/LIOI_I2GCLK_BOT1 +LIOI3_TBYTESRC_X0Y81/LIOI_I2GCLK_BOT1 +RIOI3_TBYTESRC_X43Y43/RIOI_I2GCLK_BOT1 +LIOI3_TBYTESRC_X0Y93/LIOI_I2GCLK_BOT1 +RIOI3_TBYTESRC_X43Y69/RIOI_I2GCLK_BOT1 +LIOI3_TBYTETERM_X0Y13/LIOI_I2GCLK_BOT1 +RIOI3_TBYTESRC_X43Y31/RIOI_I2GCLK_TOP1 +LIOI3_TBYTETERM_X0Y63/LIOI_I2GCLK_BOT1 +LIOI3_TBYTESRC_X0Y143/LIOI_I2GCLK_BOT1 +LIOI3_X0Y91/LIOI_I2GCLK_TOP1 +LIOI3_X0Y95/LIOI_I2GCLK_BOT1 +CMT_TOP_R_UPPER_B_X8Y83/CMT_PHASER_UP_DQS_TO_PHASER_D +LIOI3_TBYTETERM_X0Y137/LIOI_I2GCLK_BOT1 +LIOI3_TBYTESRC_X0Y57/LIOI_I2GCLK_TOP1 +LIOI3_TBYTETERM_X0Y87/LIOI_I2GCLK_TOP1 +CMT_TOP_R_LOWER_T_X8Y70/CMT_PHASER_DOWN_DQS_TO_PHASER_A +LIOI3_X0Y59/LIOI_I2GCLK_BOT1 +LIOI3_X0Y55/LIOI_I2GCLK_TOP1 +LIOI3_TBYTESRC_X0Y131/LIOI_I2GCLK_TOP1 +RIOI3_TBYTESRC_X43Y69/RIOI_I2GCLK_TOP1 +LIOI3_TBYTESRC_X0Y143/LIOI_I2GCLK_TOP1 +LIOI3_TBYTESRC_X0Y7/LIOI_I2GCLK_TOP1 +RIOI3_X43Y67/RIOI_I2GCLK_TOP1 +RIOI3_X43Y71/RIOI_I2GCLK_BOT1 +LIOI3_TBYTETERM_X0Y37/LIOI_I2GCLK_BOT1 +LIOI3_TBYTESRC_X0Y93/LIOI_I2GCLK_TOP1 +RIOI3_TBYTESRC_X43Y57/RIOI_I2GCLK_TOP1 +RIOI3_TBYTETERM_X43Y63/RIOI_I2GCLK_BOT1 +LIOI3_TBYTETERM_X0Y113/LIOI_I2GCLK_BOT1 +RIOI3_TBYTETERM_X43Y87/RIOI_I2GCLK_BOT1 +LIOI3_TBYTETERM_X0Y137/LIOI_I2GCLK_TOP1 +LIOI3_TBYTESRC_X0Y107/LIOI_I2GCLK_BOT1 +RIOI3_TBYTETERM_X43Y13/RIOI_I2GCLK_BOT1 +RIOI3_TBYTETERM_X43Y87/RIOI_I2GCLK_TOP1 +LIOI3_X0Y45/LIOI_I2GCLK_BOT1 +CMT_TOP_R_UPPER_B_X8Y31/CMT_PHASER_UP_DQS_TO_PHASER_D +LIOI3_X0Y41/LIOI_I2GCLK_TOP1 +LIOI3_TBYTESRC_X0Y19/LIOI_I2GCLK_BOT1 +LIOI3_TBYTESRC_X0Y69/LIOI_I2GCLK_BOT1 +RIOI3_X43Y79/RIOI_I2GCLK_TOP1 +RIOI3_X43Y83/RIOI_I2GCLK_BOT1 +RIOI3_X43Y45/RIOI_I2GCLK_BOT1 +RIOI3_X43Y41/RIOI_I2GCLK_TOP1 +CMT_TOP_L_UPPER_B_X106Y31/CMT_PHASER_UP_DQS_TO_PHASER_D +RIOI3_TBYTESRC_X43Y19/RIOI_I2GCLK_BOT1 +LIOI3_X0Y71/LIOI_I2GCLK_BOT1 +LIOI3_X0Y67/LIOI_I2GCLK_TOP1 +LIOI3_X0Y129/LIOI_I2GCLK_TOP1 +LIOI3_X0Y133/LIOI_I2GCLK_BOT1 +RIOI3_TBYTETERM_X43Y37/RIOI_I2GCLK_BOT1 +LIOI3_TBYTESRC_X0Y131/LIOI_I2GCLK_BOT1 +RIOI3_X43Y59/RIOI_I2GCLK_BOT1 +CMT_TOP_L_LOWER_T_X106Y70/CMT_PHASER_DOWN_DQS_TO_PHASER_A +RIOI3_X43Y55/RIOI_I2GCLK_TOP1 +LIOI3_X0Y105/LIOI_I2GCLK_TOP1 +LIOI3_X0Y109/LIOI_I2GCLK_BOT1 +CMT_TOP_R_LOWER_T_X8Y122/CMT_PHASER_DOWN_DQS_TO_PHASER_A +RIOI3_TBYTESRC_X43Y93/RIOI_I2GCLK_TOP1 +LIOI3_TBYTESRC_X0Y31/LIOI_I2GCLK_BOT1 +LIOI3_X0Y17/LIOI_I2GCLK_TOP1 +LIOI3_X0Y21/LIOI_I2GCLK_BOT1 +LIOI3_TBYTESRC_X0Y81/LIOI_I2GCLK_TOP1 +LIOI3_TBYTESRC_X0Y119/LIOI_I2GCLK_BOT1 +LIOI3_TBYTESRC_X0Y107/LIOI_I2GCLK_TOP1 +RIOI3_TBYTETERM_X43Y63/RIOI_I2GCLK_TOP1 +LIOI3_X0Y83/LIOI_I2GCLK_BOT1 +LIOI3_X0Y79/LIOI_I2GCLK_TOP1 +RIOI3_TBYTESRC_X43Y7/RIOI_I2GCLK_TOP1 +RIOI3_X43Y95/RIOI_I2GCLK_BOT1 +CMT_TOP_L_UPPER_B_X106Y83/CMT_PHASER_UP_DQS_TO_PHASER_D +RIOI3_X43Y91/RIOI_I2GCLK_TOP1 +RIOI3_TBYTESRC_X43Y43/RIOI_I2GCLK_TOP1 +CMT_TOP_L_LOWER_T_X106Y18/CMT_PHASER_DOWN_DQS_TO_PHASER_A +RIOI3_X43Y9/RIOI_I2GCLK_BOT1 +RIOI3_X43Y5/RIOI_I2GCLK_TOP1 +RIOI3_TBYTESRC_X43Y81/RIOI_I2GCLK_BOT1 +LIOI3_TBYTESRC_X0Y119/LIOI_I2GCLK_TOP1 +RIOI3_TBYTESRC_X43Y93/RIOI_I2GCLK_BOT1 +RIOI3_TBYTESRC_X43Y81/RIOI_I2GCLK_TOP1 +LIOI3_X0Y121/LIOI_I2GCLK_BOT1 +LIOI3_X0Y117/LIOI_I2GCLK_TOP1 diff --git a/fuzzers/074-dump_all/reduce_site_types.py b/fuzzers/074-dump_all/reduce_site_types.py new file mode 100644 index 00000000..b869c695 --- /dev/null +++ b/fuzzers/074-dump_all/reduce_site_types.py @@ -0,0 +1,55 @@ +""" Reduce sites types to prototypes that are always correct. + +reduce_tile_types.py generates per tile type site types. reduce_site_types.py +takes all site types across all tiles and creates generic site types that are +valid for all tile types. + +""" + +import argparse +import prjxray.lib +import os +import os.path +import re +import json + +def main(): + parser = argparse.ArgumentParser(description="Reduces per tile site types to generic site types.") + parser.add_argument('--output_dir', required=True) + + args = parser.parse_args() + + SITE_TYPE = re.compile('^tile_type_(.+)_site_type_(.+)\.json$') + site_types = {} + for path in os.listdir(args.output_dir): + match = SITE_TYPE.fullmatch(path) + if match is None: + continue + + site_type = match.group(2) + if site_type not in site_types: + site_types[site_type] = [] + + site_types[site_type].append(path) + + for site_type in site_types: + proto_site_type = None + for instance in site_types[site_type]: + with open(os.path.join(args.output_dir, instance)) as f: + instance_site_type = json.load(f) + + if proto_site_type is None: + proto_site_type = instance_site_type + else: + prjxray.lib.compare_prototype_site( + proto_site_type, + instance_site_type, + ) + + with open(os.path.join(args.output_dir, + 'site_type_{}.json'.format(site_type)), 'w') as f: + json.dump(proto_site_type, f, indent=2) + +if __name__ == '__main__': + main() + diff --git a/fuzzers/074-dump_all/reduce_tile_types.py b/fuzzers/074-dump_all/reduce_tile_types.py new file mode 100644 index 00000000..d51528ea --- /dev/null +++ b/fuzzers/074-dump_all/reduce_tile_types.py @@ -0,0 +1,323 @@ +""" Reduce tile types to prototypes that are always correct. + +The dump-all generate.tcl dumps all instances of each tile type. Some tiles +are missing wires. reduce_tile_types.py generates the superset tile that +encompases all tiles of that type. If it is not possible to generate a super +set tile, an error will be generated. + +""" + +import argparse +import prjxray.lib +import datetime +import os.path +import json +import pyjson5 as json5 +import progressbar +import multiprocessing +import os +import functools +import re + +def check_and_strip_prefix(name, prefix): + assert name.startswith(prefix), repr((name, prefix)) + return name[len(prefix):] + +def flatten_site_pins(tile, site, site_pins, site_pin_node_to_wires): + def inner(): + for site_pin in site_pins: + wires = tuple(site_pin_node_to_wires(tile, site_pin['node'])) + + if len(wires) == 0: + yield (check_and_strip_prefix(site_pin['site_pin'], site+'/'), None) + continue + + assert len(wires) == 1, repr(wires) + + yield (check_and_strip_prefix(site_pin['site_pin'], site+'/'), wires[0]) + + return dict(inner()) + + +# All site names appear to follow the pattern _XY. +# Generally speaking, only the tile relatively coordinates are required to +# assemble arch defs, so we re-origin the coordinates to be relative to the tile +# (e.g. start at X0Y0) and discard the prefix from the name. +SITE_COORDINATE_PATTERN = re.compile('^(.+)_X([0-9]+)Y([0-9]+)$') + +def find_origin_coordinate(sites): + """ Find the coordinates of each site within the tile, and then subtract the + smallest coordinate to re-origin them all to be relative to the tile. + """ + + if len(sites) == 0: + return 0, 0 + + def inner_(): + for site in sites: + coordinate = SITE_COORDINATE_PATTERN.match(site['site']) + assert coordinate is not None, site + + x_coord = int(coordinate.group(2)) + y_coord = int(coordinate.group(3)) + yield x_coord, y_coord + + x_coords, y_coords = zip(*inner_()) + min_x_coord = min(x_coords) + min_y_coord = min(y_coords) + + return min_x_coord, min_y_coord + +def get_sites(tile, site_pin_node_to_wires): + min_x_coord, min_y_coord = find_origin_coordinate(tile['sites']) + + for site in tile['sites']: + orig_site_name = site['site'] + coordinate = SITE_COORDINATE_PATTERN.match(orig_site_name) + + x_coord = int(coordinate.group(2)) + y_coord = int(coordinate.group(3)) + + yield ( + { + 'name': 'X{}Y{}'.format(x_coord - min_x_coord, y_coord - min_y_coord), + 'prefix': coordinate.group(1), + 'x_coord': x_coord - min_x_coord, + 'y_coord': y_coord - min_y_coord, + 'type': site['type'], + 'site_pins': dict(flatten_site_pins( + tile['tile'], + site['site'], site['site_pins'], site_pin_node_to_wires)), + } + ) + + +def compare_sites_and_update(tile, sites, new_sites): + for site_a, site_b in zip(sites, new_sites): + assert site_a['type'] == site_b['type'] + assert site_a['site_pins'].keys() == site_b['site_pins'].keys() + + for site_pin in site_a['site_pins']: + if site_a['site_pins'][site_pin] is not None and site_b['site_pins'][site_pin] is not None: + assert site_a['site_pins'][site_pin] == site_b['site_pins'][site_pin] + elif site_a['site_pins'][site_pin] is None and site_b['site_pins'][site_pin] is not None: + site_a['site_pins'][site_pin] = site_b['site_pins'][site_pin] + + +def get_prototype_site(site): + proto = {} + proto['type'] = site['type'] + proto['site_pins'] = {} + proto['site_pips'] = {} + for site_pin in site['site_pins']: + name = check_and_strip_prefix(site_pin['site_pin'], site['site'] + '/') + + proto['site_pins'][name] = { + 'direction': site_pin['direction'], + 'index_in_site': site_pin['index_in_site'], + } + + for site_pip in site['site_pips']: + name = check_and_strip_prefix(site_pip['site_pip'], site['site'] + '/') + + proto['site_pips'][name] = { + 'to_pin': site_pip['to_pin'], + 'from_pin': site_pip['from_pin'], + } + + return proto + +def get_pips(tile, pips): + proto_pips = {} + + for pip in pips: + name = check_and_strip_prefix(pip['pip'], tile + '/') + + proto_pips[name] = { + 'src_wire': check_and_strip_prefix(pip['src_wire'], tile + '/') + if pip['src_wire'] is not None else None, + 'dst_wire': check_and_strip_prefix(pip['dst_wire'], tile + '/') + if pip['dst_wire'] is not None else None, + 'is_pseudo': pip['is_pseudo'], + 'is_directional': pip['is_directional'], + 'can_invert': pip['can_invert'], + } + + return proto_pips + +def compare_and_update_pips(pips, new_pips): + # Pip names are always the same, but sometimes the src_wire or dst_wire + # may be missing. + + assert pips.keys() == new_pips.keys(), repr((pips.keys(), new_pips.keys())) + for name in pips: + if pips[name]['src_wire'] is not None and new_pips[name]['src_wire'] is not None: + assert pips[name]['src_wire'] == new_pips[name]['src_wire'], repr(( + pips[name]['src_wire'], + new_pips[name]['src_wire'], + )) + elif pips[name]['src_wire'] is None and new_pips[name]['src_wire'] is not None: + pips[name]['src_wire'] = new_pips[name]['src_wire'] + + if pips[name]['dst_wire'] is not None and new_pips[name]['dst_wire'] is not None: + assert pips[name]['dst_wire'] == new_pips[name]['dst_wire'], repr(( + pips[name]['dst_wire'], + new_pips[name]['dst_wire'], + )) + elif pips[name]['dst_wire'] is None and new_pips[name]['dst_wire'] is not None: + pips[name]['dst_wire'] = new_pips[name]['dst_wire'] + + for k in ['is_pseudo', 'is_directional', 'can_invert']: + assert pips[name][k] == new_pips[name][k], (k, pips[name][k], new_pips[name][k]) + +def check_wires(wires, sites, pips): + """ Verify that the wires generates from nodes are a superset of wires in + sites and pips """ + if sites is not None: + for site in sites: + for wire_to_site_pin in site['site_pins'].values(): + if wire_to_site_pin is not None: + assert wire_to_site_pin in wires, repr((wire_to_site_pin, wires)) + + if pips is not None: + for pip in pips.values(): + if pip['src_wire'] is not None: + assert pip['src_wire'] in wires, repr((pip['src_wire'], wires)) + if pip['dst_wire'] is not None: + assert pip['dst_wire'] in wires, repr((pip['dst_wire'], wires)) + +def read_json5(fname, nodes): + node_lookup = prjxray.lib.NodeLookup() + node_lookup.load_from_nodes(nodes) + + #print('{} Reading {} (in pid {})'.format(datetime.datetime.now(), fname, os.getpid())) + with open(fname) as f: + tile = json5.load(f) + + #print('{} Done reading {}'.format(datetime.datetime.now(), fname)) + def get_site_types(): + for site in tile['sites']: + yield get_prototype_site(site) + + site_types = tuple(get_site_types()) + sites = tuple(get_sites(tile, node_lookup.site_pin_node_to_wires)) + pips = get_pips(tile['tile'], tile['pips']) + def inner(): + for wire in tile['wires']: + assert wire['wire'].startswith(tile['tile'] + '/') + yield wire['wire'][len(tile['tile'])+1:] + + wires = set(inner()) + wires_from_nodes = set(node_lookup.wires_for_tile(tile['tile'])) + assert len(wires_from_nodes - wires) == 0, repr((wires, wires_from_nodes)) + + return fname, tile, site_types, sites, pips, wires + +def reduce_tile(pool, site_types, tile_type, tile_instances, node_lookup): + sites = None + pips = None + wires = set() + + with progressbar.ProgressBar(max_value=len(tile_instances)) as bar: + chunksize = 20 + if len(tile_instances) < chunksize*2: + iter = map(lambda file: read_json5(file, node_lookup.nodes), tile_instances) + else: + print('{} Using pool.imap_unordered'.format(datetime.datetime.now())) + iter = pool.imap_unordered( + functools.partial(read_json5, nodes=node_lookup.nodes), + tile_instances, + chunksize=chunksize, + ) + + for idx, (fname, tile, new_site_types, new_sites, new_pips, new_wires) in enumerate(iter): + bar.update(idx) + + assert tile['type'] == tile_type, repr((tile['tile'], tile_type)) + + for site_type in new_site_types: + if site_type['type'] in site_types: + prjxray.lib.compare_prototype_site(site_type, site_types[site_type['type']]) + else: + site_types[site_type['type']] = site_type + + # Sites are expect to always be the same + if sites is None: + sites = new_sites + else: + compare_sites_and_update(tile['tile'], sites, new_sites) + + if pips is None: + pips = new_pips + else: + compare_and_update_pips(pips, new_pips) + + wires |= new_wires + + bar.update(idx+1) + + check_wires(wires, sites, pips) + + return { + 'tile_type': tile_type, + 'sites': sites, + 'pips': pips, + 'wires': tuple(wires), + } + + +def main(): + parser = argparse.ArgumentParser(description="Reduces raw database dump into prototype tiles, grid, and connections.") + parser.add_argument('--root_dir', required=True) + parser.add_argument('--output_dir', required=True) + parser.add_argument('--ignore_cache', action='store_true') + + args = parser.parse_args() + + print('{} Reading root.csv'.format(datetime.datetime.now())) + tiles, nodes = prjxray.lib.read_root_csv(args.root_dir) + + print('{} Loading node<->wire mapping'.format(datetime.datetime.now())) + node_lookup = prjxray.lib.NodeLookup() + node_lookup_file = os.path.join(args.output_dir, 'nodes.pickle') + if os.path.exists(node_lookup_file) and not args.ignore_cache: + node_lookup.load_from_file(node_lookup_file) + else: + node_lookup.load_from_root_csv(nodes) + node_lookup.save_to_file(node_lookup_file) + + site_types = {} + + processes = min(multiprocessing.cpu_count(), 10) + print('Running {} processes'.format(processes)) + pool = multiprocessing.Pool(processes=processes) + + for tile_type in sorted(tiles.keys()): + #for tile_type in ['CLBLL_L', 'CLBLL_R', 'CLBLM_L', 'CLBLM_R', 'INT_L', 'INT_L']: + tile_type_file = os.path.join(args.output_dir, 'tile_type_{}.json'.format(tile_type)) + site_types = {} + if os.path.exists(tile_type_file): + print('{} Skip reduced tile for {}'.format(datetime.datetime.now(), tile_type)) + continue + print('{} Generating reduced tile for {}'.format(datetime.datetime.now(), tile_type)) + reduced_tile = reduce_tile( + pool, + site_types, + tile_type, tiles[tile_type], + node_lookup) + for site_type in site_types: + with open(os.path.join( + args.output_dir, + 'tile_type_{}_site_type_{}.json'.format( + tile_type, + site_types[site_type]['type'] + )), 'w') as f: + json.dump(site_types[site_type], f, indent=2) + + with open(tile_type_file, 'w') as f: + json.dump(reduced_tile, f, indent=2) + + +if __name__ == '__main__': + main() + diff --git a/fuzzers/Makefile b/fuzzers/Makefile index cc84552a..d388990d 100644 --- a/fuzzers/Makefile +++ b/fuzzers/Makefile @@ -29,3 +29,6 @@ $(eval $(call fuzzer,057-bipips,056-rempips)) $(eval $(call fuzzer,058-hclkpips,056-rempips)) $(eval $(call fuzzer,070-tileconn,005-tilegrid)) $(eval $(call fuzzer,071-ppips,057-bipips 058-hclkpips)) +$(eval $(call fuzzer,072-ordered_wires,)) +$(eval $(call fuzzer,073-get_counts,)) +$(eval $(call fuzzer,074-dump_all,072-ordered_wires)) diff --git a/prjxray/__init__.py b/prjxray/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/prjxray/connections.py b/prjxray/connections.py new file mode 100644 index 00000000..493759f2 --- /dev/null +++ b/prjxray/connections.py @@ -0,0 +1,66 @@ +from collections import namedtuple + +WireInGrid = namedtuple('WireInGrid', 'tile grid_x grid_y wire') +Connection = namedtuple('Connection', 'wire_a wire_b') + +class Connections(object): + def __init__(self, tilegrid, tileconn, tile_wires): + self.grid = tilegrid['tiles'] + self.tile_wires = tile_wires + self.coord_to_tile = {} + self.coord_to_tile_type = {} + + for tile, tile_info in self.grid.items(): + self.coord_to_tile[(tile_info['grid_x'], tile_info['grid_y'])] = tile + self.coord_to_tile_type[(tile_info['grid_x'], tile_info['grid_y'])] = tile_info['type'] + + # Make sure we have tile type info for every tile in the grid. + assert tile_info['type'] in self.tile_wires, (tile_info['type'], self.tile_wires.keys()) + + self.potential_connections = {} + + for conn in tileconn: + grid_deltas = conn['grid_deltas'] + tile_types = conn['tile_types'] + + for pairs in conn['wire_pairs']: + key = (tile_types[0], pairs[0]) + if key not in self.potential_connections: + self.potential_connections[key] = [] + self.potential_connections[key].append(( + grid_deltas, tile_types[1], pairs[1] + )) + + def all_possible_connections_from(self, wire_in_grid): + tile_type = self.coord_to_tile_type[(wire_in_grid.grid_x, wire_in_grid.grid_y)] + + key = (tile_type, wire_in_grid.wire) + + if key not in self.potential_connections: + return + + for relative_coord, target_tile_type, target_wire in ( + self.potential_connections[key]): + rel_x, rel_y = relative_coord + target_coord = (wire_in_grid.grid_x+rel_x, wire_in_grid.grid_y+rel_y) + + if target_coord in self.coord_to_tile_type: + if self.coord_to_tile_type[target_coord] == target_tile_type: + yield Connection(wire_in_grid, WireInGrid( + tile = self.coord_to_tile[target_coord], + grid_x = target_coord[0], + grid_y = target_coord[1], + wire = target_wire)) + + def get_connections(self): + """ Yields Connection objects that represent all connections present in + the grid based on tileconn """ + for tile, tile_info in self.grid.items(): + for wire in self.tile_wires[tile_info['type']]: + wire_in_grid = WireInGrid( + tile = tile, + grid_x = tile_info['grid_x'], + grid_y = tile_info['grid_y'], + wire = wire) + for potential_connection in self.all_possible_connections_from(wire_in_grid): + yield potential_connection diff --git a/prjxray/db.py b/prjxray/db.py new file mode 100644 index 00000000..2f194c93 --- /dev/null +++ b/prjxray/db.py @@ -0,0 +1,96 @@ +import os.path +import json +from prjxray import grid +from prjxray import tile +from prjxray import connections + +def get_available_databases(prjxray_root): + """ Return set of available directory to databases given the root directory + of prjxray-db + """ + db_types = set() + for d in os.listdir(prjxray_root): + if d.startswith("."): + continue + + dpath = os.path.join(prjxray_root, d) + + if os.path.exists(os.path.join(dpath, "settings.sh")): + db_types.add(dpath) + + return db_types + +class Database(object): + def __init__(self, db_root): + """ Create project x-ray Database at given db_root. + + db_root: Path to directory containing settings.sh, *.db, tilegrid.json and + tileconn.json + + """ + self.db_root = db_root + self.tilegrid = None + self.tileconn = None + self.tile_types = None + + self.tile_types = {} + for f in os.listdir(self.db_root): + if f.endswith('.json') and f.startswith('tile_type_'): + tile_type = f[len('tile_type_'):-len('.json')].lower() + + segbits = os.path.join(self.db_root, 'segbits_{}.db'.format(tile_type)) + if not os.path.isfile(segbits): + segbits = None + + mask = os.path.join(self.db_root, 'mask_{}.db'.format(tile_type)) + if not os.path.isfile(mask): + mask = None + + tile_type_file = os.path.join(self.db_root, 'tile_type_{}.json'.format(tile_type.upper())) + if not os.path.isfile(tile_type_file): + tile_type_file = None + + self.tile_types[tile_type.upper()] = tile.TileDbs( + segbits = segbits, + mask = mask, + tile_type = tile_type_file, + ) + + def get_tile_types(self): + """ Return list of tile types """ + return self.tile_types.keys() + + def get_tile_type(self, tile_type): + """ Return Tile object for given tilename. """ + return tile.Tile(tile_type, self.tile_types[tile_type]) + + def _read_tilegrid(self): + """ Read tilegrid database if not already read. """ + if not self.tilegrid: + with open(os.path.join(self.db_root, 'tilegrid.json')) as f: + self.tilegrid = json.load(f) + + def _read_tileconn(self): + """ Read tileconn database if not already read. """ + if not self.tileconn: + with open(os.path.join(self.db_root, 'tileconn.json')) as f: + self.tileconn = json.load(f) + + def grid(self): + """ Return Grid object for database. """ + self._read_tilegrid() + return grid.Grid(self.tilegrid) + + def _read_tile_types(self): + for tile_type, db in self.tile_types.items(): + with open(db.tile_type) as f: + self.tile_types[tile_type] = json.load(f) + + def connections(self): + self._read_tilegrid() + self._read_tileconn() + self._read_tile_types() + + tile_wires = dict((tile_type, db['wires']) + for tile_type, db in self.tile_types.items()) + return connections.Connections(self.tilegrid, self.tileconn, tile_wires) diff --git a/prjxray/grid.py b/prjxray/grid.py new file mode 100644 index 00000000..36f47025 --- /dev/null +++ b/prjxray/grid.py @@ -0,0 +1,51 @@ +from collections import namedtuple + +GridLoc = namedtuple('GridLoc', 'grid_x grid_y') +GridInfo = namedtuple('GridInfo', 'segment sites tile_type') + +class Grid(object): + """ Object that represents grid for a given database. + + Provides methods to inspect grid by name or location. Also provides mapping + of segment offsets for particular grid locations and their tile types. + """ + def __init__(self, tilegrid): + self.tilegrid = tilegrid + self.loc = {} + self.tileinfo = {} + + for tile in self.tilegrid['tiles']: + tileinfo = self.tilegrid['tiles'][tile] + grid_loc = GridLoc(tileinfo['grid_x'], tileinfo['grid_y']) + self.loc[grid_loc] = tile + self.tileinfo[tile] = GridInfo( + segment = tileinfo['segment'] if 'segment' in tileinfo else None, + sites = tileinfo['sites'], + tile_type = tileinfo['type']) + + x, y = zip(*self.loc.keys()) + self._dims = (min(x), max(x), min(y), max(y)) + + def tile_locations(self): + """ Return list of tile locations. """ + return self.loc.keys() + + def dims(self): + """ Returns (x_min, x_max, y_min, y_max) for given Grid. """ + return self._dims + + def is_populated(self, grid_loc): + return grid_loc in self.loc + + def loc_of_tilename(self, tilename): + tileinfo = self.tilegrid['tiles'][tilename] + return GridLoc(tileinfo['grid_x'], tileinfo['grid_y']) + + def tilename_at_loc(self, grid_loc): + return self.loc[grid_loc] + + def gridinfo_at_loc(self, grid_loc): + return self.tileinfo[self.loc[grid_loc]] + + def gridinfo_at_tilename(self, tilename): + return self.tileinfo[tilename] diff --git a/prjxray/lib.py b/prjxray/lib.py new file mode 100644 index 00000000..d2a64d47 --- /dev/null +++ b/prjxray/lib.py @@ -0,0 +1,142 @@ +import os.path +import csv +import pickle +import pyjson5 as json5 +import progressbar + +def read_root_csv(root_dir): + """ Reads root.csv from raw db directory. + + This should only be used during database generation. + + """ + tiles = {} + nodes = [] + + with open(os.path.join(root_dir, 'root.csv')) as f: + for d in csv.DictReader(f): + if d['filetype'] == 'tile': + if d['subtype'] not in tiles: + tiles[d['subtype']] = [] + + tiles[d['subtype']].append(os.path.join(root_dir, d['filename'])) + elif d['filetype'] == 'node': + nodes.append(os.path.join(root_dir, d['filename'])) + + return tiles, nodes + +def verify_nodes(raw_nodes, nodes, error_nodes): + """ Compares raw_nodes with generated_nodes and adds errors to error_nodes. + + Args: + raw_nodes - Iterable of (node name, iterable of wires in node). + nodes - Iterable of iterable of wires in nodes. + error_nodes - List to be appended to when an error occurs. Elements will + be 3 tuple of raw node name, raw node, and generated node + that did not match. + + """ + wire_nodes = {} + for node in nodes: + node_set = set(node) + for wire in node: + wire_nodes[wire] = node_set + + for node, raw_node_wires in raw_nodes: + raw_node_set = set(raw_node_wires) + + for wire in sorted(raw_node_set): + if wire not in wire_nodes: + if set((wire,)) != raw_node_set: + error_nodes.append((node, tuple(raw_node_set), (wire,))) + elif wire_nodes[wire] != raw_node_set: + error_nodes.append((node, tuple(raw_node_set), tuple(wire_nodes[wire]))) + +def check_errors(flat_error_nodes, ignored_wires): + """ Check if error_nodes has errors that are not covered in ignored_wires. + + Args: + flat_error_nodes - List of error_nodes generated from verify_nodes. + ignored_wires - List of wires that should be ignored if they were generated. + + """ + + error_nodes = {} + for node, raw_node, generated_nodes in flat_error_nodes: + if node not in error_nodes: + error_nodes[node] = { + 'raw_node': set(raw_node), + 'generated_nodes': set(), + } + + # Make sure all raw nodes are the same. + assert error_nodes[node]['raw_node'] == set(raw_node) + + error_nodes[node]['generated_nodes'].add(tuple(sorted(generated_nodes))) + + for node, error in error_nodes.items(): + combined_generated_nodes = set() + for generated_node in error['generated_nodes']: + combined_generated_nodes |= set(generated_node) + + # Make sure there are not extra wires in nodes. + assert error['raw_node'] == combined_generated_nodes, (node, error) + + good_node = max(error['generated_nodes'], key=lambda x: len(x)) + bad_nodes = error['generated_nodes'] - set((good_node,)) + + # Max sure only single wires are stranded + assert max(len(generated_node) for generated_node in bad_nodes) == 1 + + for generate_node in bad_nodes: + for wire in generate_node: + if wire not in ignored_wires: + return False + + return True + +class NodeLookup(object): + def __init__(self): + self.nodes = {} + + def load_from_nodes(self, nodes): + self.nodes = nodes + + def load_from_root_csv(self, nodes): + for node in progressbar.progressbar(nodes): + with open(node) as f: + node_wires = json5.load(f) + assert node_wires['node'] not in self.nodes + self.nodes[node_wires['node']] = node_wires['wires'] + + def load_from_file(self, fname): + with open(fname, 'rb') as f: + self.nodes = pickle.load(f) + + def save_to_file(self, fname): + with open(fname, 'wb') as f: + pickle.dump(self.nodes, f) + + def site_pin_node_to_wires(self, tile, node): + if node is None: + return + + node_wires = self.nodes[node] + + for wire in node_wires: + if wire['wire'].startswith(tile + '/'): + yield wire['wire'][len(tile)+1:] + + def wires_for_tile(self, tile): + for node in self.nodes.values(): + for wire in node: + if wire['wire'].startswith(tile + '/'): + yield wire['wire'][len(tile)+1:] + +def compare_prototype_site(proto_a, proto_b): + """ Compare two proto site type. + + Will assert if prototypes are not equivalent. + + """ + assert proto_a == proto_b, repr((proto_a, proto_b)) diff --git a/prjxray/tile.py b/prjxray/tile.py new file mode 100644 index 00000000..cbccc18a --- /dev/null +++ b/prjxray/tile.py @@ -0,0 +1,85 @@ +from collections import namedtuple +import json + +""" Database files available for a tile """ +TileDbs = namedtuple('TileDbs', 'segbits mask tile_type') + +Pip = namedtuple('Pip', 'net_to net_from can_invert is_directional is_pseudo') + +""" Site - Represents an instance of a site within a tile. + +name - Name of site within tile, instance specific. +prefix - Prefix of site naming in Xilinx parlance. +type - What type of slice this instance presents. +pins - Instaces of site pins within this site and tile. This is an tuple of + SitePin tuples, and is specific to this instance of the site within + the tile. + +""" +Site = namedtuple('Site', 'name x y type site_pins') + +""" SitePin - Tuple representing a site pin within a tile. + +Sites are generic based on type, however sites are instanced +within a tile 1 or more times. The SitePin contains both site type generic +information and tile type specific information. + +name - Site type specific name. This name is expected to be the same for all + sites of the same type. +direction - Direction of this site pin. This direction is expected to be the + same for all sites of the same type. +wire - Wire name within the tile. This name is site instance specific. + +""" +SitePin = namedtuple('SitePin', 'name wire direction') + +class Tile(object): + """ Provides abstration of a tile in the database. """ + def __init__(self, tilename, tile_dbs): + self.tilename = tilename + self.tilename_upper = self.tilename.upper() + self.tile_dbs = tile_dbs + + self.wires = None + self.sites = None + self.pips = None + + def yield_sites(sites): + for site in sites: + yield Site( + name = None, + type = site['type'], + x = None, + y = None, + site_pins = site['site_pins'], + ) + + def yield_pips(pips): + for pip in pips: + yield Pip( + net_to = pip['dst_wire'], + net_from = pip['src_wire'], + can_invert = bool(int(pip['can_invert'])), + is_directional = bool(int(pip['is_directional'])), + is_pseudo = bool(int(pip['is_pseudo'])), + ) + + with open(self.tile_dbs.tile_type) as f: + tile_type = json.load(f) + assert self.tilename_upper == tile_type['tile_type'] + self.wires = tile_type['wires'] + self.sites = tuple(yield_sites(tile_type['sites'])) + self.pips = tuple(yield_pips(tile_type['pips'])) + + def get_wires(self): + """Returns a set of wire names present in this tile.""" + return self.wires + + def get_sites(self): + """ Returns tuple of Site namedtuple's present in this tile. """ + return self.sites + + def get_pips(self): + """ Returns tuple of Pip namedtuple's representing the PIPs in this tile. + """ + return self.pips diff --git a/requirements.txt b/requirements.txt index ab61583a..aa6413ef 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,4 @@ futures yapf +pyjson5 +progressbar2 diff --git a/tools/quick_test.py b/tools/quick_test.py new file mode 100644 index 00000000..9596d50f --- /dev/null +++ b/tools/quick_test.py @@ -0,0 +1,30 @@ +from __future__ import print_function +import prjxray.db +import argparse + +def quick_test(db_root): + db = prjxray.db.Database(db_root) + g = db.grid() + +# Verify that we have some tile information for every tile in grid. + tile_types_in_grid = set(g.gridinfo_at_loc(loc).tile_type for loc in g.tile_locations()) + tile_types_in_db = set(db.get_tile_types()) + assert len(tile_types_in_grid - tile_types_in_db) == 0 + +# Verify that all tile types can be loaded. + for tile_type in db.get_tile_types(): + tile = db.get_tile_type(tile_type) + tile.get_wires() + tile.get_sites() + tile.get_pips() + +def main(): + parser = argparse.ArgumentParser(description="Runs a sanity check on a prjxray database.") + parser.add_argument('--db_root', required=True) + + args = parser.parse_args() + + quick_test(args.db_root) + +if __name__ == '__main__': + main() diff --git a/tools/verify_tile_connections.py b/tools/verify_tile_connections.py new file mode 100644 index 00000000..e9262be5 --- /dev/null +++ b/tools/verify_tile_connections.py @@ -0,0 +1,119 @@ +from __future__ import print_function +import prjxray.db +import prjxray.lib +import argparse +import datetime +import progressbar +import multiprocessing +import pyjson5 as json5 +import json +import sys + +def full_wire_name(wire_in_grid): + return '{}/{}'.format(wire_in_grid.tile, wire_in_grid.wire) + +def make_connection(wires, connection): + wire_a = full_wire_name(connection.wire_a) + wire_b = full_wire_name(connection.wire_b) + + if wire_a not in wires: + wires[wire_a] = set((wire_a,)) + + if wire_b not in wires: + wires[wire_b] = set((wire_b,)) + + wire_a_set = wires[wire_a] + wire_b_set = wires[wire_b] + + if wire_a_set is wire_b_set: + return + + wire_a_set |= wire_b_set + + for wire in wire_a_set: + wires[wire] = wire_a_set + +def make_connections(db_root): + db = prjxray.db.Database(db_root) + c = db.connections() + + wires = {} + for connection in c.get_connections(): + make_connection(wires, connection) + + nodes = {} + + for wire_node in wires.values(): + nodes[id(wire_node)] = wire_node + + return nodes.values() + +def read_json5(fname): + with open(fname, 'r') as f: + return json5.load(f) + +def main(): + parser = argparse.ArgumentParser(description="Tests database against raw node list.") + parser.add_argument('--db_root', required=True) + parser.add_argument('--raw_node_root', required=True) + parser.add_argument('--error_nodes', default="error_nodes.json") + parser.add_argument('--ignored_wires') + + args = parser.parse_args() + + processes = min(multiprocessing.cpu_count(), 10) + + print('{} Running {} processes'.format(datetime.datetime.now(), processes)) + pool = multiprocessing.Pool(processes=processes) + print('{} Reading raw data index'.format(datetime.datetime.now(), processes)) + _, nodes = prjxray.lib.read_root_csv(args.raw_node_root) + print('{} Reading raw_node_data'.format(datetime.datetime.now())) + raw_node_data = [] + with progressbar.ProgressBar(max_value=len(nodes)) as bar: + for idx, node in enumerate(pool.imap_unordered( + read_json5, + nodes, + chunksize = 20, + )): + bar.update(idx) + raw_node_data.append((node['node'], tuple(wire['wire'] for wire in node['wires']))) + bar.update(idx+1) + + print('{} Creating connections'.format(datetime.datetime.now())) + generated_nodes = make_connections(args.db_root) + + print('{} Verifying connections'.format(datetime.datetime.now())) + error_nodes = [] + prjxray.lib.verify_nodes(raw_node_data, generated_nodes, error_nodes) + + if len(error_nodes) > 0: + if args.ignored_wires: + with open(args.ignored_wires, 'r') as f: + ignored_wires = [l.strip() for l in f.readlines()] + + print('{} Found {} errors, writing errors to {}'.format( + datetime.datetime.now(), + len(error_nodes), + args.error_nodes, + )) + + with open(args.error_nodes, 'w') as f: + json.dump(error_nodes, f, indent=2) + + if not args.ignored_wires: + sys.exit(1) + + if not prjxray.lib.check_errors(error_nodes, ignored_wires): + print('{} Errors were not ignored via ignored_wires {}'.format( + datetime.datetime.now(), + args.ignored_wires, + )) + sys.exit(1) + else: + print('{} All errors were via ignored_wires {}'.format( + datetime.datetime.now(), + args.ignored_wires, + )) + +if __name__ == '__main__': + main() From c4a62fb315b3a7819cae2a4259f54599d2e343d1 Mon Sep 17 00:00:00 2001 From: Keith Rothman <537074+litghost@users.noreply.github.com> Date: Thu, 27 Sep 2018 08:53:39 -0700 Subject: [PATCH 2/5] Run make format. Signed-off-by: Keith Rothman <537074+litghost@users.noreply.github.com> --- fuzzers/074-dump_all/analyze_errors.py | 57 +- fuzzers/074-dump_all/create_node_tree.py | 405 ++++---- fuzzers/074-dump_all/generate_grid.py | 952 ++++++++++--------- fuzzers/074-dump_all/generate_ignore_list.py | 41 +- fuzzers/074-dump_all/reduce_site_types.py | 63 +- fuzzers/074-dump_all/reduce_tile_types.py | 454 +++++---- prjxray/connections.py | 108 ++- prjxray/db.py | 136 +-- prjxray/grid.py | 66 +- prjxray/lib.py | 172 ++-- prjxray/tile.py | 87 +- tools/quick_test.py | 39 +- tools/verify_tile_connections.py | 166 ++-- 13 files changed, 1427 insertions(+), 1319 deletions(-) diff --git a/fuzzers/074-dump_all/analyze_errors.py b/fuzzers/074-dump_all/analyze_errors.py index f145204c..137267d8 100644 --- a/fuzzers/074-dump_all/analyze_errors.py +++ b/fuzzers/074-dump_all/analyze_errors.py @@ -1,41 +1,40 @@ import json with open('output/error_nodes.json') as f: - flat_error_nodes = json.load(f) + flat_error_nodes = json.load(f) error_nodes = {} for node, raw_node, generated_nodes in flat_error_nodes: - if node not in error_nodes: - error_nodes[node] = { - 'raw_node': set(raw_node), - 'generated_nodes': set(), - } + if node not in error_nodes: + error_nodes[node] = { + 'raw_node': set(raw_node), + 'generated_nodes': set(), + } - - assert error_nodes[node]['raw_node'] == set(raw_node) - error_nodes[node]['generated_nodes'].add(tuple(sorted(generated_nodes))) + assert error_nodes[node]['raw_node'] == set(raw_node) + error_nodes[node]['generated_nodes'].add(tuple(sorted(generated_nodes))) for node, error in error_nodes.items(): - combined_generated_nodes = set() - for generated_node in error['generated_nodes']: - combined_generated_nodes |= set(generated_node) + combined_generated_nodes = set() + for generated_node in error['generated_nodes']: + combined_generated_nodes |= set(generated_node) - assert error['raw_node'] == combined_generated_nodes, (node, error) + assert error['raw_node'] == combined_generated_nodes, (node, error) - good_node = max(error['generated_nodes'], key=lambda x: len(x)) - bad_nodes = error['generated_nodes'] - set((good_node,)) + good_node = max(error['generated_nodes'], key=lambda x: len(x)) + bad_nodes = error['generated_nodes'] - set((good_node, )) - if max(len(generated_node) for generated_node in bad_nodes) > 1: - assert False, node - else: - not_pcie = False - for generated_node in bad_nodes: - for wire in generated_node: - if not wire.startswith('PCIE'): - not_pcie = True - if not_pcie: - #print(node, good_node, map(tuple, bad_nodes)) - print(repr((node, tuple(map(tuple, bad_nodes))))) - pass + if max(len(generated_node) for generated_node in bad_nodes) > 1: + assert False, node else: - #print(repr((node, map(tuple, bad_nodes)))) - pass + not_pcie = False + for generated_node in bad_nodes: + for wire in generated_node: + if not wire.startswith('PCIE'): + not_pcie = True + if not_pcie: + #print(node, good_node, map(tuple, bad_nodes)) + print(repr((node, tuple(map(tuple, bad_nodes))))) + pass + else: + #print(repr((node, map(tuple, bad_nodes)))) + pass diff --git a/fuzzers/074-dump_all/create_node_tree.py b/fuzzers/074-dump_all/create_node_tree.py index 94c3ca72..0721d6b7 100644 --- a/fuzzers/074-dump_all/create_node_tree.py +++ b/fuzzers/074-dump_all/create_node_tree.py @@ -7,255 +7,272 @@ import prjxray.lib import pickle import collections + def build_node_index(fname): - node_index = {} - with open(fname, 'rb') as f: - f.seek(0, 2) - bytes = f.tell() - f.seek(0, 0) - with progressbar.ProgressBar(max_value=bytes) as bar: - end_of_line = 0 - for l in f: - parts = l.decode('utf8').split(' ') - pip, node = parts[0:2] + node_index = {} + with open(fname, 'rb') as f: + f.seek(0, 2) + bytes = f.tell() + f.seek(0, 0) + with progressbar.ProgressBar(max_value=bytes) as bar: + end_of_line = 0 + for l in f: + parts = l.decode('utf8').split(' ') + pip, node = parts[0:2] - if node not in node_index: - node_index[node] = [] + if node not in node_index: + node_index[node] = [] - node_index[node].append(end_of_line) - end_of_line = f.tell() - bar.update(end_of_line) + node_index[node].append(end_of_line) + end_of_line = f.tell() + bar.update(end_of_line) + + return node_index - return node_index def read_node(expected_node, wire_file, node_index): - with open(wire_file, 'rb') as f: - for index in node_index: - f.seek(index, 0) + with open(wire_file, 'rb') as f: + for index in node_index: + f.seek(index, 0) - parts = f.readline().decode('utf8').strip().split(' ') + parts = f.readline().decode('utf8').strip().split(' ') - pip, node = parts[0:2] - wires = parts[2:] + pip, node = parts[0:2] + wires = parts[2:] - assert node == expected_node, repr((node, expected_node, index)) + assert node == expected_node, repr((node, expected_node, index)) + + yield wires - yield wires def generate_edges(graph, root, graph_nodes): - """ Starting from root, generate an edge in dir and insert into graph. + """ Starting from root, generate an edge in dir and insert into graph. If the tree forks, simply insert a joins to indicate the split. """ - edge = [root] - prev_root = None + edge = [root] + prev_root = None - while True: - outbound_edges = graph_nodes[root] - outbound_edges -= set((prev_root,)) - if len(outbound_edges) > 1: - graph['edges'].append(edge) - if root not in graph['joins']: - graph['joins'][root] = set() - graph['joins'][root] |= outbound_edges + while True: + outbound_edges = graph_nodes[root] + outbound_edges -= set((prev_root, )) + if len(outbound_edges) > 1: + graph['edges'].append(edge) + if root not in graph['joins']: + graph['joins'][root] = set() + graph['joins'][root] |= outbound_edges - for element in graph_nodes[root]: - if element not in graph['joins']: - graph['joins'][element] = set() - graph['joins'][element].add(root) + for element in graph_nodes[root]: + if element not in graph['joins']: + graph['joins'][element] = set() + graph['joins'][element].add(root) - break - else: - if len(outbound_edges) == 0: - graph['edges'].append(edge) - break + break + else: + if len(outbound_edges) == 0: + graph['edges'].append(edge) + break + + next_root = tuple(outbound_edges)[0] + edge.append(next_root) + prev_root, root = root, next_root - next_root = tuple(outbound_edges)[0] - edge.append(next_root) - prev_root, root = root, next_root def create_ordered_wires_for_node(node, wires_in_node, downhill, uphill): - if len(wires_in_node) <= 2: - return {'edges': [wires_in_node], 'joins': {}} + if len(wires_in_node) <= 2: + return {'edges': [wires_in_node], 'joins': {}} - downhill = set(tuple(l) for l in downhill) - uphill = set(tuple(l) for l in uphill) + downhill = set(tuple(l) for l in downhill) + uphill = set(tuple(l) for l in uphill) - roots = set() - all_wires = set() + roots = set() + all_wires = set() - for wire in downhill: - if len(wire) > 0: - roots |= set((wire[0], wire[-1])) - all_wires |= set(wire) + for wire in downhill: + if len(wire) > 0: + roots |= set((wire[0], wire[-1])) + all_wires |= set(wire) - for wire in uphill: - if len(wire) > 0: - roots |= set((wire[0], wire[-1])) - all_wires |= set(wire) + for wire in uphill: + if len(wire) > 0: + roots |= set((wire[0], wire[-1])) + all_wires |= set(wire) - assert len(wires_in_node) >= len(all_wires) + assert len(wires_in_node) >= len(all_wires) - if len(all_wires) <= 2: - return {'edges': tuple(all_wires), 'joins': {}} + if len(all_wires) <= 2: + return {'edges': tuple(all_wires), 'joins': {}} - graph_nodes = dict((wire, set()) for wire in all_wires) + graph_nodes = dict((wire, set()) for wire in all_wires) - for wire in all_wires: - for down in downhill: - try: - idx = down.index(wire) - if idx+1 < len(down): - graph_nodes[wire].add(down[idx+1]) - if idx-1 >= 0: - graph_nodes[wire].add(down[idx-1]) - except ValueError: - continue + for wire in all_wires: + for down in downhill: + try: + idx = down.index(wire) + if idx + 1 < len(down): + graph_nodes[wire].add(down[idx + 1]) + if idx - 1 >= 0: + graph_nodes[wire].add(down[idx - 1]) + except ValueError: + continue - for up in uphill: - try: - idx = up.index(wire) - if idx+1 < len(up): - graph_nodes[wire].add(up[idx+1]) - if idx-1 >= 0: - graph_nodes[wire].add(up[idx-1]) - except ValueError: - continue + for up in uphill: + try: + idx = up.index(wire) + if idx + 1 < len(up): + graph_nodes[wire].add(up[idx + 1]) + if idx - 1 >= 0: + graph_nodes[wire].add(up[idx - 1]) + except ValueError: + continue - graph = {'edges': [], 'joins': {}} + graph = {'edges': [], 'joins': {}} - while len(roots) > 0: - root = roots.pop() + while len(roots) > 0: + root = roots.pop() - if len(graph_nodes[root]) > 0: - generate_edges(graph, root, graph_nodes) + if len(graph_nodes[root]) > 0: + generate_edges(graph, root, graph_nodes) - # Dedup identical edges. - final_edges = set() + # Dedup identical edges. + final_edges = set() - for edge in graph['edges']: - edge1 = tuple(edge) - edge2 = tuple(edge[::-1]) + for edge in graph['edges']: + edge1 = tuple(edge) + edge2 = tuple(edge[::-1]) - if edge1 > edge2: - final_edges.add((edge2, edge1)) - else: - final_edges.add((edge1, edge2)) - - edges = [edge[0] for edge in final_edges] - - element_index = {} - for edge in edges: - for idx, element in enumerate(edge): - if element not in element_index: - element_index[element] = [] - element_index[element].append((idx, edge)) - - new_edges = [] - for edge in edges: - starts = element_index[edge[0]] - ends = element_index[edge[-1]] - - found_any = False - for start in starts: - start_idx, other_edge = start - if other_edge is edge: - continue - - - for end in ends: - if other_edge is not end[1]: - continue - - found_any = True - end_idx, _ = end - # check if the interior elements are the same. - if start_idx > end_idx: - step = -1 + if edge1 > edge2: + final_edges.add((edge2, edge1)) else: - step = 1 + final_edges.add((edge1, edge2)) - other_edge_slice = slice(start_idx, end_idx+step if end_idx+step >= 0 else None, step) - if edge != other_edge[other_edge_slice]: - new_edges.append(edge) + edges = [edge[0] for edge in final_edges] - if not found_any: - new_edges.append(edge) + element_index = {} + for edge in edges: + for idx, element in enumerate(edge): + if element not in element_index: + element_index[element] = [] + element_index[element].append((idx, edge)) - output = { - 'edges': new_edges, - 'joins': dict((key, tuple(value)) - for key, value in graph['joins'].items()), - 'wires': wires_in_node, - } + new_edges = [] + for edge in edges: + starts = element_index[edge[0]] + ends = element_index[edge[-1]] - all_wires_in_output = set() - for edge in output['edges']: - all_wires_in_output |= set(edge) + found_any = False + for start in starts: + start_idx, other_edge = start + if other_edge is edge: + continue - for element in output['joins']: - all_wires_in_output.add(element) + for end in ends: + if other_edge is not end[1]: + continue + + found_any = True + end_idx, _ = end + # check if the interior elements are the same. + if start_idx > end_idx: + step = -1 + else: + step = 1 + + other_edge_slice = slice( + start_idx, end_idx + step if end_idx + step >= 0 else None, + step) + if edge != other_edge[other_edge_slice]: + new_edges.append(edge) + + if not found_any: + new_edges.append(edge) + + output = { + 'edges': + new_edges, + 'joins': + dict((key, tuple(value)) for key, value in graph['joins'].items()), + 'wires': + wires_in_node, + } + + all_wires_in_output = set() + for edge in output['edges']: + all_wires_in_output |= set(edge) + + for element in output['joins']: + all_wires_in_output.add(element) + + return output - return output def main(): - parser = argparse.ArgumentParser(description="") - parser.add_argument('--dump_all_root_dir', required=True) - parser.add_argument('--ordered_wires_root_dir', required=True) - parser.add_argument('--output_dir', required=True) + parser = argparse.ArgumentParser(description="") + parser.add_argument('--dump_all_root_dir', required=True) + parser.add_argument('--ordered_wires_root_dir', required=True) + parser.add_argument('--output_dir', required=True) - args = parser.parse_args() + args = parser.parse_args() - downhill_wires = os.path.join(args.ordered_wires_root_dir, 'downhill_wires.txt') - uphill_wires = os.path.join(args.ordered_wires_root_dir, 'uphill_wires.txt') + downhill_wires = os.path.join( + args.ordered_wires_root_dir, 'downhill_wires.txt') + uphill_wires = os.path.join( + args.ordered_wires_root_dir, 'uphill_wires.txt') - assert os.path.exists(downhill_wires) - assert os.path.exists(uphill_wires) + assert os.path.exists(downhill_wires) + assert os.path.exists(uphill_wires) - print('{} Reading root.csv'.format(datetime.datetime.now())) - tiles, nodes = prjxray.lib.read_root_csv(args.dump_all_root_dir) + print('{} Reading root.csv'.format(datetime.datetime.now())) + tiles, nodes = prjxray.lib.read_root_csv(args.dump_all_root_dir) - print('{} Loading node<->wire mapping'.format(datetime.datetime.now())) - node_lookup = prjxray.lib.NodeLookup() - node_lookup_file = os.path.join(args.output_dir, 'nodes.pickle') - if os.path.exists(node_lookup_file): - node_lookup.load_from_file(node_lookup_file) - else: - node_lookup.load_from_root_csv(nodes) - node_lookup.save_to_file(node_lookup_file) + print('{} Loading node<->wire mapping'.format(datetime.datetime.now())) + node_lookup = prjxray.lib.NodeLookup() + node_lookup_file = os.path.join(args.output_dir, 'nodes.pickle') + if os.path.exists(node_lookup_file): + node_lookup.load_from_file(node_lookup_file) + else: + node_lookup.load_from_root_csv(nodes) + node_lookup.save_to_file(node_lookup_file) - wire_index_file = os.path.join(args.output_dir, 'wire_index.pickle') - if os.path.exists(wire_index_file): - print('{} Reading wire<->node index'.format(datetime.datetime.now())) - with open(wire_index_file, 'rb') as f: - wire_index = pickle.load(f) + wire_index_file = os.path.join(args.output_dir, 'wire_index.pickle') + if os.path.exists(wire_index_file): + print('{} Reading wire<->node index'.format(datetime.datetime.now())) + with open(wire_index_file, 'rb') as f: + wire_index = pickle.load(f) - downhill_wire_node_index = wire_index['downhill'] - uphill_wire_node_index = wire_index['uphill'] - else: - print('{} Creating wire<->node index'.format(datetime.datetime.now())) - downhill_wire_node_index = build_node_index(downhill_wires) - uphill_wire_node_index = build_node_index(uphill_wires) + downhill_wire_node_index = wire_index['downhill'] + uphill_wire_node_index = wire_index['uphill'] + else: + print('{} Creating wire<->node index'.format(datetime.datetime.now())) + downhill_wire_node_index = build_node_index(downhill_wires) + uphill_wire_node_index = build_node_index(uphill_wires) - with open(wire_index_file, 'wb') as f: - pickle.dump({ - 'downhill': downhill_wire_node_index, - 'uphill': uphill_wire_node_index, - }, f) + with open(wire_index_file, 'wb') as f: + pickle.dump( + { + 'downhill': downhill_wire_node_index, + 'uphill': uphill_wire_node_index, + }, f) - print('{} Creating node tree'.format(datetime.datetime.now())) - nodes = collections.OrderedDict() - for node in progressbar.progressbar(sorted(node_lookup.nodes)): - nodes[node] = create_ordered_wires_for_node( - node, - tuple(wire['wire'] for wire in node_lookup.nodes[node]), - tuple(read_node(node, downhill_wires, downhill_wire_node_index[node] if node in downhill_wire_node_index else [])), - tuple(read_node(node, uphill_wires, uphill_wire_node_index[node] if node in uphill_wire_node_index else []))) + print('{} Creating node tree'.format(datetime.datetime.now())) + nodes = collections.OrderedDict() + for node in progressbar.progressbar(sorted(node_lookup.nodes)): + nodes[node] = create_ordered_wires_for_node( + node, tuple(wire['wire'] for wire in node_lookup.nodes[node]), + tuple( + read_node( + node, downhill_wires, downhill_wire_node_index[node] + if node in downhill_wire_node_index else [])), + tuple( + read_node( + node, uphill_wires, uphill_wire_node_index[node] + if node in uphill_wire_node_index else []))) + + print('{} Writing node tree'.format(datetime.datetime.now())) + with open(os.path.join(args.output_dir, 'node_tree.json'), 'w') as f: + json.dump(nodes, f, indent=2) - print('{} Writing node tree'.format(datetime.datetime.now())) - with open(os.path.join(args.output_dir, 'node_tree.json'), 'w') as f: - json.dump(nodes, f, indent=2) if __name__ == '__main__': - main() + main() diff --git a/fuzzers/074-dump_all/generate_grid.py b/fuzzers/074-dump_all/generate_grid.py index b1e6d717..c1fd2468 100644 --- a/fuzzers/074-dump_all/generate_grid.py +++ b/fuzzers/074-dump_all/generate_grid.py @@ -13,66 +13,30 @@ import pickle import inspect import sys + def get_tile_grid_info(fname): - with open(fname, 'r') as f: - tile = json5.load(f) + with open(fname, 'r') as f: + tile = json5.load(f) - return { - tile['tile']: { - 'type': tile['type'], - 'grid_x': tile['x'], - 'grid_y': tile['y'], - 'sites': dict( - (site['site'], site['type']) for site in tile['sites'] - ), - 'wires': set( - wire['wire'] for wire in tile['wires'], - ) - }, - } - -def read_json5(fname): - with open(fname, 'r') as f: - return json5.load(f) - -def generate_tilesizes(grid): - """ ***BROKEN DO NOT USE*** """ - assert False - - tilesizes = {} - tiles = grid['tiles'] - coord_to_tile = create_coord_to_tile(tiles) - - for tile in grid['tiles']: - tilesizes[grid['tiles'][tile]['type']] = { - 'grid_x_size': 1, - 'grid_y_size': None, + return { + tile['tile']: { + 'type': tile['type'], + 'grid_x': tile['x'], + 'grid_y': tile['y'], + 'sites': dict( + (site['site'], site['type']) for site in tile['sites']), + 'wires': set((wire['wire'] for wire in tile['wires'])) + }, } - x, y = zip(*coord_to_tile.keys()) - min_x = min(x) - max_x = max(x) - min_y = min(y) - max_y = max(y) - for x in range(min_x, max_x+1): - tiles_slice = [(y, tiles[coord_to_tile[(x, y)]]['type']) for y in range(min_y, max_y+1) if tiles[coord_to_tile[(x, y)]]['type'] != 'NULL'] +def read_json5(fname): + with open(fname, 'r') as f: + return json5.load(f) - for (y1, tile_type), (y2, _) in zip(tiles_slice[::-1], tiles_slice[-2::-1]): - grid_y_size = y1-y2 - if tilesizes[tile_type]['grid_y_size'] is None: - tilesizes[tile_type]['grid_y_size'] = grid_y_size - else: - tilesizes[tile_type]['grid_y_size'] = min(tilesizes[tile_type]['grid_y_size'], grid_y_size) - - for tile_type in tilesizes: - if tilesizes[tile_type]['grid_y_size'] is None: - tilesizes[tile_type]['grid_y_size'] = 1 - - return tilesizes def is_edge_shared(edge1, edge2): - """ Returns true if edge1 or edge2 overlap + """ Returns true if edge1 or edge2 overlap >>> is_edge_shared((0, 1), (0, 1)) True @@ -101,16 +65,17 @@ def is_edge_shared(edge1, edge2): >>> is_edge_shared((2, 3), (0, 1)) False """ - assert edge1[0] < edge1[1], edge1 - assert edge2[0] < edge2[1], edge2 + assert edge1[0] < edge1[1], edge1 + assert edge2[0] < edge2[1], edge2 + + if edge1[0] <= edge2[0]: + return edge2[0] < edge1[1] + else: + return edge1[0] < edge2[1] - if edge1[0] <= edge2[0]: - return edge2[0] < edge1[1] - else: - return edge1[0] < edge2[1] def share_edge(a, b): - """ Returns true if box defined by a and b share any edge. + """ Returns true if box defined by a and b share any edge. Box is defined as (x-min, y-min, x-max, y-max). @@ -140,525 +105,566 @@ def share_edge(a, b): True """ - a_x_min, a_y_min, a_x_max, a_y_max = a - b_x_min, b_y_min, b_x_max, b_y_max = b + a_x_min, a_y_min, a_x_max, a_y_max = a + b_x_min, b_y_min, b_x_max, b_y_max = b - if a_x_min == b_x_max or a_x_max == b_x_min: - return is_edge_shared((a_y_min, a_y_max), (b_y_min, b_y_max)) - if a_y_min == b_y_max or a_y_max == b_y_min: - return is_edge_shared((a_x_min, a_x_max), (b_x_min, b_x_max)) + if a_x_min == b_x_max or a_x_max == b_x_min: + return is_edge_shared((a_y_min, a_y_max), (b_y_min, b_y_max)) + if a_y_min == b_y_max or a_y_max == b_y_min: + return is_edge_shared((a_x_min, a_x_max), (b_x_min, b_x_max)) -def next_wire_in_dimension(wire1, tile1, wire2, tile2, tiles, x_wires, y_wires, wire_map, wires_in_node): - """ next_wire_in_dimension returns true if tile1 and tile2 are in the same + +def next_wire_in_dimension( + wire1, tile1, wire2, tile2, tiles, x_wires, y_wires, wire_map, + wires_in_node): + """ next_wire_in_dimension returns true if tile1 and tile2 are in the same row and column, and must be adjcent. """ - tile1_info = tiles[tile1] - tile2_info = tiles[tile2] + tile1_info = tiles[tile1] + tile2_info = tiles[tile2] - tile1_x = tile1_info['grid_x'] - tile2_x = tile2_info['grid_x'] - tile1_y = tile1_info['grid_y'] - tile2_y = tile2_info['grid_y'] + tile1_x = tile1_info['grid_x'] + tile2_x = tile2_info['grid_x'] + tile1_y = tile1_info['grid_y'] + tile2_y = tile2_info['grid_y'] - # All wires are in the same row or column or if the each wire lies in its own - # row or column. - if len(y_wires) == 1 or len(x_wires) == len(wires_in_node) or abs(tile1_y-tile2_y) == 0: - ordered_wires = sorted(x_wires.keys()) + # All wires are in the same row or column or if the each wire lies in its own + # row or column. + if len(y_wires) == 1 or len(x_wires) == len(wires_in_node) or abs( + tile1_y - tile2_y) == 0: + ordered_wires = sorted(x_wires.keys()) - idx1 = ordered_wires.index(tile1_x) - idx2 = ordered_wires.index(tile2_x) + idx1 = ordered_wires.index(tile1_x) + idx2 = ordered_wires.index(tile2_x) - if len(x_wires[tile1_x]) == 1 and len(x_wires[tile2_x]) == 1: - return abs(idx1-idx2) == 1 + if len(x_wires[tile1_x]) == 1 and len(x_wires[tile2_x]) == 1: + return abs(idx1 - idx2) == 1 - if len(x_wires) == 1 or len(y_wires) == len(wires_in_node) or abs(tile1_x-tile2_x) == 0: - ordered_wires = sorted(y_wires.keys()) + if len(x_wires) == 1 or len(y_wires) == len(wires_in_node) or abs( + tile1_x - tile2_x) == 0: + ordered_wires = sorted(y_wires.keys()) - idx1 = ordered_wires.index(tile1_y) - idx2 = ordered_wires.index(tile2_y) + idx1 = ordered_wires.index(tile1_y) + idx2 = ordered_wires.index(tile2_y) - if len(y_wires[tile1_y]) == 1 and len(y_wires[tile2_y]) == 1: - return abs(idx1-idx2) == 1 + if len(y_wires[tile1_y]) == 1 and len(y_wires[tile2_y]) == 1: + return abs(idx1 - idx2) == 1 + + return None - return None def only_wire(tile1, tile2, tiles, x_wires, y_wires): - """ only_wire returns true if tile1 and tile2 only have 1 wire in their respective x or y dimension. + """ only_wire returns true if tile1 and tile2 only have 1 wire in their respective x or y dimension. """ - tile1_info = tiles[tile1] - tile2_info = tiles[tile2] + tile1_info = tiles[tile1] + tile2_info = tiles[tile2] - tile1_x = tile1_info['grid_x'] - tile2_x = tile2_info['grid_x'] + tile1_x = tile1_info['grid_x'] + tile2_x = tile2_info['grid_x'] - tiles_x_adjacent = abs(tile1_x-tile2_x) == 1 - if tiles_x_adjacent and len(x_wires[tile1_x]) == 1 and len(x_wires[tile2_x]) == 1: - return True + tiles_x_adjacent = abs(tile1_x - tile2_x) == 1 + if tiles_x_adjacent and len(x_wires[tile1_x]) == 1 and len( + x_wires[tile2_x]) == 1: + return True - tile1_y = tile1_info['grid_y'] - tile2_y = tile2_info['grid_y'] + tile1_y = tile1_info['grid_y'] + tile2_y = tile2_info['grid_y'] - tiles_y_adjacent = abs(tile1_y-tile2_y) == 1 - if tiles_y_adjacent and len(y_wires[tile1_y]) == 1 and len(y_wires[tile2_y]) == 1: - return True + tiles_y_adjacent = abs(tile1_y - tile2_y) == 1 + if tiles_y_adjacent and len(y_wires[tile1_y]) == 1 and len( + y_wires[tile2_y]) == 1: + return True + + return None - return None def is_directly_connected(node, node_tree, wire1, wire2): - if 'wires' in node_tree: - node_tree_wires = node_tree['wires'] - else: - if len(node_tree['edges']) == 1 and len(node_tree['joins']) == 0: - node_tree_wires = node_tree['edges'][0] + if 'wires' in node_tree: + node_tree_wires = node_tree['wires'] else: - return None + if len(node_tree['edges']) == 1 and len(node_tree['joins']) == 0: + node_tree_wires = node_tree['edges'][0] + else: + return None + + if wire1 not in node_tree_wires: + return None + if wire2 not in node_tree_wires: + return None + + # Is there than edge that has wire1 next to wire2? + for edge in node_tree['edges']: + idx1 = None + idx2 = None + try: + idx1 = edge.index(wire1) + except ValueError: + pass + + try: + idx2 = edge.index(wire2) + except ValueError: + pass + + if idx1 is not None and idx2 is not None: + return abs(idx1 - idx2) == 1 + + if idx1 is not None and (idx1 != 0 and idx1 != len(edge) - 1): + return False + + if idx2 is not None and (idx2 != 0 and idx2 != len(edge) - 1): + return False + + # Is there a join of nodes between wire1 and wire2? + if wire1 in node_tree['joins']: + return wire2 in node_tree['joins'][wire1] + + if wire2 in node_tree['joins']: + assert wire1 not in node_tree['joins'][wire2] - if wire1 not in node_tree_wires: - return None - if wire2 not in node_tree_wires: return None - # Is there than edge that has wire1 next to wire2? - for edge in node_tree['edges']: - idx1 = None - idx2 = None - try: - idx1 = edge.index(wire1) - except ValueError: - pass - try: - idx2 = edge.index(wire2) - except ValueError: - pass +def is_connected( + wire1, tile1, wire2, tile2, node, wires_in_tiles, wire_map, node_tree, + tiles, x_wires, y_wires, wires_in_node): + """ Check if two wires are directly connected. """ - if idx1 is not None and idx2 is not None: - return abs(idx1 - idx2) == 1 + next_wire_in_dim = next_wire_in_dimension( + wire1, tile1, wire2, tile2, tiles, x_wires, y_wires, wire_map, + wires_in_node) + if next_wire_in_dim is not None: + return next_wire_in_dim - if idx1 is not None and (idx1 != 0 and idx1 != len(edge)-1): - return False + # Because there are multiple possible wire connections between these two + # tiles, consult the node_tree to determine if the two wires are actually connected. + # + # Warning: The node_tree is incomplete because it is not know how to extract + # ordered wire information from the node. + # + # Example node CLK_BUFG_REBUF_X60Y142/CLK_BUFG_REBUF_R_CK_GCLK0_BOT + # It does not appear to be possible to get ordered wire connection information + # for the first two wires connected to PIP + # CLK_BUFG_REBUF_X60Y117/CLK_BUFG_REBUF.CLK_BUFG_REBUF_R_CK_GCLK0_BOT<<->>CLK_BUFG_REBUF_R_CK_GCLK0_TOP + # + # However, it happens to be that theses wires are the only wires in their + # tiles, so the earlier "only wires in tile" check will pass. - if idx2 is not None and (idx2 != 0 and idx2 != len(edge)-1): - return False + connected = is_directly_connected( + node['node'], node_tree[node['node']], wire1, wire2) + if connected is not None: + return connected - # Is there a join of nodes between wire1 and wire2? - if wire1 in node_tree['joins']: - return wire2 in node_tree['joins'][wire1] + is_only_wire = only_wire(tile1, tile2, tiles, x_wires, y_wires) + if is_only_wire is not None: + return is_only_wire - if wire2 in node_tree['joins']: - assert wire1 not in node_tree['joins'][wire2] + # The node_tree didn't specify these wires, and the wires are not + # unambiguously connected. + return False - return None - -def is_connected(wire1, tile1, wire2, tile2, node, wires_in_tiles, wire_map, node_tree, tiles, x_wires, y_wires, wires_in_node): - """ Check if two wires are directly connected. """ - - next_wire_in_dim = next_wire_in_dimension(wire1, tile1, wire2, tile2, tiles, - x_wires, y_wires, - wire_map, wires_in_node) - if next_wire_in_dim is not None: - return next_wire_in_dim - - # Because there are multiple possible wire connections between these two - # tiles, consult the node_tree to determine if the two wires are actually connected. - # - # Warning: The node_tree is incomplete because it is not know how to extract - # ordered wire information from the node. - # - # Example node CLK_BUFG_REBUF_X60Y142/CLK_BUFG_REBUF_R_CK_GCLK0_BOT - # It does not appear to be possible to get ordered wire connection information - # for the first two wires connected to PIP - # CLK_BUFG_REBUF_X60Y117/CLK_BUFG_REBUF.CLK_BUFG_REBUF_R_CK_GCLK0_BOT<<->>CLK_BUFG_REBUF_R_CK_GCLK0_TOP - # - # However, it happens to be that theses wires are the only wires in their - # tiles, so the earlier "only wires in tile" check will pass. - - connected = is_directly_connected(node['node'], node_tree[node['node']], wire1, wire2) - if connected is not None: - return connected - - is_only_wire = only_wire(tile1, tile2, tiles, x_wires, y_wires) - if is_only_wire is not None: - return is_only_wire - - # The node_tree didn't specify these wires, and the wires are not - # unambiguously connected. - return False def process_node(tileconn, key_history, node, wire_map, node_tree, tiles): - wires = [wire['wire'] for wire in node['wires']] + wires = [wire['wire'] for wire in node['wires']] - wires_in_tiles = {} - x_wires = {} - y_wires = {} - for wire in wires: - wire_info = wire_map[wire] + wires_in_tiles = {} + x_wires = {} + y_wires = {} + for wire in wires: + wire_info = wire_map[wire] - if wire_info['tile'] not in wires_in_tiles: - wires_in_tiles[wire_info['tile']] = [] - wires_in_tiles[wire_info['tile']].append(wire) + if wire_info['tile'] not in wires_in_tiles: + wires_in_tiles[wire_info['tile']] = [] + wires_in_tiles[wire_info['tile']].append(wire) + + grid_x = tiles[wire_info['tile']]['grid_x'] + if grid_x not in x_wires: + x_wires[grid_x] = [] + x_wires[grid_x].append(wire) + + grid_y = tiles[wire_info['tile']]['grid_y'] + if grid_y not in y_wires: + y_wires[grid_y] = [] + y_wires[grid_y].append(wire) + + if len(wires) == 2: + wire1 = wires[0] + wire_info1 = wire_map[wire1] + wire2 = wires[1] + wire_info2 = wire_map[wire2] + update_tile_conn( + tileconn, key_history, wire1, wire_info1, wire2, wire_info2, tiles) + return + + for idx, wire1 in enumerate(wires): + wire_info1 = wire_map[wire1] + for wire2 in wires[idx + 1:]: + wire_info2 = wire_map[wire2] + + if not is_connected(wire1, wire_info1['tile'], wire2, + wire_info2['tile'], node, wires_in_tiles, + wire_map, node_tree, tiles, x_wires, y_wires, + wires): + continue + + update_tile_conn( + tileconn, key_history, wire1, wire_info1, wire2, wire_info2, + tiles) - grid_x = tiles[wire_info['tile']]['grid_x'] - if grid_x not in x_wires: - x_wires[grid_x] = [] - x_wires[grid_x].append(wire) +def update_tile_conn( + tileconn, key_history, wirename1, wire1, wirename2, wire2, tiles): + # Ensure that (wire1, wire2) is sorted, so we can easy check if a connection + # already exists. - grid_y = tiles[wire_info['tile']]['grid_y'] - if grid_y not in y_wires: - y_wires[grid_y] = [] - y_wires[grid_y].append(wire) + tile1 = tiles[wire1['tile']] + tile2 = tiles[wire2['tile']] + if ((wire1['type'], wire1['shortname'], tile1['grid_x'], tile1['grid_y']) > + (wire2['type'], wire2['shortname'], tile2['grid_x'], tile2['grid_y'])): + wire1, tile1, wire2, tile2 = wire2, tile2, wire1, tile1 - if len(wires) == 2: - wire1 = wires[0] - wire_info1 = wire_map[wire1] - wire2 = wires[1] - wire_info2 = wire_map[wire2] - update_tile_conn(tileconn, key_history, wire1, wire_info1, wire2, wire_info2, tiles) - return + tileconn.append( + { + "grid_deltas": [ + tile2['grid_x'] - tile1['grid_x'], + tile2['grid_y'] - tile1['grid_y'], + ], + "tile_types": [ + tile1['type'], + tile2['type'], + ], + "wire_pair": [ + wire1['shortname'], + wire2['shortname'], + ], + }) - for idx, wire1 in enumerate(wires): - wire_info1 = wire_map[wire1] - for wire2 in wires[idx+1:]: - wire_info2 = wire_map[wire2] - - if not is_connected( - wire1, wire_info1['tile'], - wire2, wire_info2['tile'], - node, wires_in_tiles, wire_map, node_tree, tiles, x_wires, y_wires, wires): - continue - - update_tile_conn(tileconn, key_history, wire1, wire_info1, wire2, wire_info2, tiles) - -def update_tile_conn(tileconn, key_history, wirename1, wire1, wirename2, wire2, tiles): - # Ensure that (wire1, wire2) is sorted, so we can easy check if a connection - # already exists. - - tile1 = tiles[wire1['tile']] - tile2 = tiles[wire2['tile']] - if ( - (wire1['type'], wire1['shortname'], tile1['grid_x'], tile1['grid_y']) > - (wire2['type'], wire2['shortname'], tile2['grid_x'], tile2['grid_y']) - ): - wire1, tile1, wire2, tile2 = wire2, tile2, wire1, tile1 - - tileconn.append({ - "grid_deltas": [ - tile2['grid_x'] - tile1['grid_x'], - tile2['grid_y'] - tile1['grid_y'], - ], - "tile_types": [ - tile1['type'], - tile2['type'], - ], - "wire_pair": [ - wire1['shortname'], - wire2['shortname'], - ], - }) def flatten_tile_conn(tileconn): - """ Convert tileconn that is key'd to identify specific wire pairs between tiles + """ Convert tileconn that is key'd to identify specific wire pairs between tiles key (tile1_type, wire1_name, tile2_type, wire2_name) to flat tile connect list that relates tile types and relative coordinates and a full list of wires to connect. """ - flat_tileconn = {} + flat_tileconn = {} - for conn in tileconn: - key = (tuple(conn['tile_types']), tuple(conn['grid_deltas'])) + for conn in tileconn: + key = (tuple(conn['tile_types']), tuple(conn['grid_deltas'])) - if key not in flat_tileconn: - flat_tileconn[key] = { - 'tile_types': conn['tile_types'], - 'grid_deltas': conn['grid_deltas'], - 'wire_pairs': set() - } + if key not in flat_tileconn: + flat_tileconn[key] = { + 'tile_types': conn['tile_types'], + 'grid_deltas': conn['grid_deltas'], + 'wire_pairs': set() + } - flat_tileconn[key]['wire_pairs'].add(tuple(conn['wire_pair'])) + flat_tileconn[key]['wire_pairs'].add(tuple(conn['wire_pair'])) - def inner(): - for output in flat_tileconn.values(): - yield { - 'tile_types': output['tile_types'], - 'grid_deltas': output['grid_deltas'], - 'wire_pairs': tuple(output['wire_pairs']), - } + def inner(): + for output in flat_tileconn.values(): + yield { + 'tile_types': output['tile_types'], + 'grid_deltas': output['grid_deltas'], + 'wire_pairs': tuple(output['wire_pairs']), + } + + return tuple(inner()) - return tuple(inner()) def is_tile_type(tiles, coord_to_tile, coord, tile_type): - if coord not in coord_to_tile: - return False + if coord not in coord_to_tile: + return False + + target_tile = tiles[coord_to_tile[coord]] + return target_tile['type'] == tile_type - target_tile = tiles[coord_to_tile[coord]] - return target_tile['type'] == tile_type def get_connections(wire, wire_info, conn, idx, coord_to_tile, tiles): - """ Yields (tile_coord, wire) for each wire that should be connected to specified wire. """ - pair = conn['wire_pairs'][idx] - wire_tile_type = wire_info['type'] - tile_types = conn['tile_types'] - shortname = wire_info['shortname'] - grid_deltas = conn['grid_deltas'] + """ Yields (tile_coord, wire) for each wire that should be connected to specified wire. """ + pair = conn['wire_pairs'][idx] + wire_tile_type = wire_info['type'] + tile_types = conn['tile_types'] + shortname = wire_info['shortname'] + grid_deltas = conn['grid_deltas'] - wire1 = tile_types[0] == wire_tile_type and shortname == pair[0] - wire2 = tile_types[1] == wire_tile_type and shortname == pair[1] - assert wire1 or wire2, (wire, conn) + wire1 = tile_types[0] == wire_tile_type and shortname == pair[0] + wire2 = tile_types[1] == wire_tile_type and shortname == pair[1] + assert wire1 or wire2, (wire, conn) - tile_of_wire = wire_info['tile'] - start_coord_x = tiles[tile_of_wire]['grid_x'] - start_coord_y = tiles[tile_of_wire]['grid_y'] - if wire1: - target_coord_x = start_coord_x + grid_deltas[0] - target_coord_y = start_coord_y + grid_deltas[1] - target_tile_type = tile_types[1] + tile_of_wire = wire_info['tile'] + start_coord_x = tiles[tile_of_wire]['grid_x'] + start_coord_y = tiles[tile_of_wire]['grid_y'] + if wire1: + target_coord_x = start_coord_x + grid_deltas[0] + target_coord_y = start_coord_y + grid_deltas[1] + target_tile_type = tile_types[1] - target_wire = pair[1] - target_tile = (target_coord_x, target_coord_y) + target_wire = pair[1] + target_tile = (target_coord_x, target_coord_y) - if is_tile_type(tiles, coord_to_tile, target_tile, target_tile_type): - yield target_tile, target_wire + if is_tile_type(tiles, coord_to_tile, target_tile, target_tile_type): + yield target_tile, target_wire - if wire2: - target_coord_x = start_coord_x - grid_deltas[0] - target_coord_y = start_coord_y - grid_deltas[1] - target_tile_type = tile_types[0] + if wire2: + target_coord_x = start_coord_x - grid_deltas[0] + target_coord_y = start_coord_y - grid_deltas[1] + target_tile_type = tile_types[0] - target_wire = pair[0] - target_tile = (target_coord_x, target_coord_y) + target_wire = pair[0] + target_tile = (target_coord_x, target_coord_y) + + if is_tile_type(tiles, coord_to_tile, target_tile, target_tile_type): + yield target_tile, target_wire - if is_tile_type(tiles, coord_to_tile, target_tile, target_tile_type): - yield target_tile, target_wire def make_connection(wire_nodes, wire1, wire2): - if wire_nodes[wire1] is wire_nodes[wire2]: - assert wire1 in wire_nodes[wire1] - assert wire2 in wire_nodes[wire2] - return + if wire_nodes[wire1] is wire_nodes[wire2]: + assert wire1 in wire_nodes[wire1] + assert wire2 in wire_nodes[wire2] + return - new_node = wire_nodes[wire1] | wire_nodes[wire2] + new_node = wire_nodes[wire1] | wire_nodes[wire2] + + for wire in new_node: + wire_nodes[wire] = new_node - for wire in new_node: - wire_nodes[wire] = new_node def create_coord_to_tile(tiles): - coord_to_tile = {} - for tile, tileinfo in tiles.items(): - coord_to_tile[(tileinfo['grid_x'], tileinfo['grid_y'])] = tile + coord_to_tile = {} + for tile, tileinfo in tiles.items(): + coord_to_tile[(tileinfo['grid_x'], tileinfo['grid_y'])] = tile + + return coord_to_tile - return coord_to_tile def connect_wires(tiles, tileconn, wire_map): - """ Connect individual wires into groups of wires called nodes. """ + """ Connect individual wires into groups of wires called nodes. """ - # Initialize all nodes to originally only contain the wire by itself. - wire_nodes = {} - for wire in wire_map: - wire_nodes[wire] = set([wire]) + # Initialize all nodes to originally only contain the wire by itself. + wire_nodes = {} + for wire in wire_map: + wire_nodes[wire] = set([wire]) - wire_connection_map = {} - for conn in tileconn: - for idx, (wire1, wire2) in enumerate(conn['wire_pairs']): - key1 = (conn['tile_types'][0], wire1) - if key1 not in wire_connection_map: - wire_connection_map[key1] = [] - wire_connection_map[key1].append((conn, idx)) + wire_connection_map = {} + for conn in tileconn: + for idx, (wire1, wire2) in enumerate(conn['wire_pairs']): + key1 = (conn['tile_types'][0], wire1) + if key1 not in wire_connection_map: + wire_connection_map[key1] = [] + wire_connection_map[key1].append((conn, idx)) - key2 = (conn['tile_types'][1], wire2) - if key2 not in wire_connection_map: - wire_connection_map[key2] = [] - wire_connection_map[key2].append((conn, idx)) + key2 = (conn['tile_types'][1], wire2) + if key2 not in wire_connection_map: + wire_connection_map[key2] = [] + wire_connection_map[key2].append((conn, idx)) - coord_to_tile = create_coord_to_tile(tiles) + coord_to_tile = create_coord_to_tile(tiles) - for wire, wire_info in progressbar.progressbar(wire_map.items()): - key = (wire_info['type'], wire_info['shortname']) - if key not in wire_connection_map: - continue + for wire, wire_info in progressbar.progressbar(wire_map.items()): + key = (wire_info['type'], wire_info['shortname']) + if key not in wire_connection_map: + continue - for conn, idx in wire_connection_map[key]: - for target_tile, target_wire in get_connections(wire, wire_info, conn, idx, coord_to_tile, tiles): + for conn, idx in wire_connection_map[key]: + for target_tile, target_wire in get_connections( + wire, wire_info, conn, idx, coord_to_tile, tiles): - full_wire_name = coord_to_tile[target_tile] + '/' + target_wire - assert wire_map[full_wire_name]['shortname'] == target_wire, ( - target_tile, target_wire, wire, conn - ) - assert wire_map[full_wire_name]['tile'] == coord_to_tile[target_tile], ( - wire_map[full_wire_name]['tile'], coord_to_tile[target_tile] - ) + full_wire_name = coord_to_tile[target_tile] + '/' + target_wire + assert wire_map[full_wire_name]['shortname'] == target_wire, ( + target_tile, target_wire, wire, conn) + assert wire_map[full_wire_name]['tile'] == coord_to_tile[ + target_tile], ( + wire_map[full_wire_name]['tile'], + coord_to_tile[target_tile]) - make_connection(wire_nodes, wire, full_wire_name) + make_connection(wire_nodes, wire, full_wire_name) - # Find unique nodes - nodes = {} - for node in wire_nodes.values(): - nodes[id(node)] = node + # Find unique nodes + nodes = {} + for node in wire_nodes.values(): + nodes[id(node)] = node - # Flatten to list of lists. - return tuple(tuple(node) for node in nodes.values()) + # Flatten to list of lists. + return tuple(tuple(node) for node in nodes.values()) def generate_tilegrid(pool, tiles): - wire_map = {} + wire_map = {} - grid = { - 'segments': {}, - 'tiles': {}, - } + grid = { + 'segments': {}, + 'tiles': {}, + } - num_tiles = 0 - for tile_type in tiles: - num_tiles += len(tiles[tile_type]) - - idx = 0 - with progressbar.ProgressBar(max_value=num_tiles) as bar: + num_tiles = 0 for tile_type in tiles: - for tile in pool.imap_unordered( - get_tile_grid_info, - tiles[tile_type], - chunksize = 20, - ): - bar.update(idx) + num_tiles += len(tiles[tile_type]) - assert len(tile) == 1, tile - tilename = tuple(tile.keys())[0] + idx = 0 + with progressbar.ProgressBar(max_value=num_tiles) as bar: + for tile_type in tiles: + for tile in pool.imap_unordered( + get_tile_grid_info, + tiles[tile_type], + chunksize=20, + ): + bar.update(idx) - for wire in tile[tilename]['wires']: - assert wire not in wire_map, (wire, wire_map) - assert wire.startswith(tilename + '/'), (wire, tilename) + assert len(tile) == 1, tile + tilename = tuple(tile.keys())[0] - wire_map[wire] = { - 'tile': tilename, - 'type': tile[tilename]['type'], - 'shortname': wire[len(tilename)+1:], - } + for wire in tile[tilename]['wires']: + assert wire not in wire_map, (wire, wire_map) + assert wire.startswith(tilename + '/'), (wire, tilename) - del tile[tilename]['wires'] - grid['tiles'].update(tile) + wire_map[wire] = { + 'tile': tilename, + 'type': tile[tilename]['type'], + 'shortname': wire[len(tilename) + 1:], + } - idx += 1 - bar.update(idx) + del tile[tilename]['wires'] + grid['tiles'].update(tile) + + idx += 1 + bar.update(idx) + + return grid, wire_map - return grid, wire_map def generate_tileconn(pool, node_tree, nodes, wire_map, grid): - tileconn = [] - key_history = {} - raw_node_data = [] - with progressbar.ProgressBar(max_value=len(nodes)) as bar: - for idx, node in enumerate(pool.imap_unordered( - read_json5, - nodes, - chunksize = 20, - )): - bar.update(idx) - raw_node_data.append(node) - process_node(tileconn, key_history, node, wire_map, node_tree, grid['tiles']) - bar.update(idx+1) - - tileconn = flatten_tile_conn(tileconn) - - return tileconn, raw_node_data - -def main(): - parser = argparse.ArgumentParser(description="Reduces raw database dump into prototype tiles, grid, and connections.") - parser.add_argument('--root_dir', required=True) - parser.add_argument('--output_dir', required=True) - parser.add_argument('--verify_only', action='store_true') - - args = parser.parse_args() - - tiles, nodes = prjxray.lib.read_root_csv(args.root_dir) - - processes = min(multiprocessing.cpu_count(), 10) - print('{} Running {} processes'.format(datetime.datetime.now(), processes)) - pool = multiprocessing.Pool(processes=processes) - - node_tree_file = os.path.join(args.output_dir, 'node_tree.json') - - tilegrid_file = os.path.join(args.output_dir, 'tilegrid.json') - tileconn_file = os.path.join(args.output_dir, 'tileconn.json') - wire_map_file = os.path.join(args.output_dir, 'wiremap.pickle') - - if not args.verify_only: - print('{} Creating tile map'.format(datetime.datetime.now())) - grid, wire_map = generate_tilegrid(pool, tiles) - - with open(tilegrid_file, 'w') as f: - json.dump(grid, f, indent=2) - - with open(wire_map_file, 'wb') as f: - pickle.dump(wire_map, f) - - print('{} Reading node tree'.format(datetime.datetime.now())) - with open(node_tree_file) as f: - node_tree = json.load(f) - - print('{} Creating tile connections'.format(datetime.datetime.now())) - tileconn, raw_node_data = generate_tileconn(pool, node_tree, nodes, wire_map, grid) - - print('{} Writing tileconn'.format(datetime.datetime.now())) - with open(tileconn_file, 'w') as f: - json.dump(tileconn, f, indent=2) - else: - print('{} Reading tilegrid'.format(datetime.datetime.now())) - with open(tilegrid_file) as f: - grid = json.load(f) - - with open(wire_map_file, 'rb') as f: - wire_map = pickle.load(f) - - print('{} Reading raw_node_data'.format(datetime.datetime.now())) + tileconn = [] + key_history = {} raw_node_data = [] with progressbar.ProgressBar(max_value=len(nodes)) as bar: - for idx, node in enumerate(pool.imap_unordered( - read_json5, - nodes, - chunksize = 20, - )): - bar.update(idx) - raw_node_data.append(node) - bar.update(idx+1) + for idx, node in enumerate(pool.imap_unordered( + read_json5, + nodes, + chunksize=20, + )): + bar.update(idx) + raw_node_data.append(node) + process_node( + tileconn, key_history, node, wire_map, node_tree, + grid['tiles']) + bar.update(idx + 1) - print('{} Reading tileconn'.format(datetime.datetime.now())) - with open(tileconn_file) as f: - tileconn = json.load(f) + tileconn = flatten_tile_conn(tileconn) - wire_nodes_file = os.path.join(args.output_dir, 'wire_nodes.pickle') - if os.path.exists(wire_nodes_file) and args.verify_only: - with open(wire_nodes_file, 'rb') as f: - wire_nodes = pickle.load(f) - else: - print("{} Connecting wires to verify tileconn".format(datetime.datetime.now())) - wire_nodes = connect_wires(grid['tiles'], tileconn, wire_map) - with open(wire_nodes_file, 'wb') as f: - pickle.dump(wire_nodes, f) + return tileconn, raw_node_data - print('{} Verifing tileconn'.format(datetime.datetime.now())) - error_nodes = [] - prjxray.lib.verify_nodes([ - (node['node'], tuple(wire['wire'] for wire in node['wires'])) - for node in raw_node_data - ], wire_nodes, error_nodes) - if len(error_nodes) > 0: - error_nodes_file = os.path.join(args.output_dir, 'error_nodes.json') - with open(error_nodes_file, 'w') as f: - json.dump(error_nodes, f, indent=2) +def main(): + parser = argparse.ArgumentParser( + description= + "Reduces raw database dump into prototype tiles, grid, and connections." + ) + parser.add_argument('--root_dir', required=True) + parser.add_argument('--output_dir', required=True) + parser.add_argument('--verify_only', action='store_true') - ignored_wires = [] - path_to_file = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) - ignored_wires_file = os.path.join(path_to_file, 'ignored_wires.txt') - if os.path.exists(ignored_wires_file): - with open(ignored_wires_file) as f: - ignored_wires = set(l.strip() for l in f) + args = parser.parse_args() - if not prjxray.lib.check_errors(error_nodes, ignored_wires): - print('{} errors detected, see {} for details.'.format(len(error_nodes), error_nodes_file)) - sys.exit(1) + tiles, nodes = prjxray.lib.read_root_csv(args.root_dir) + + processes = min(multiprocessing.cpu_count(), 10) + print('{} Running {} processes'.format(datetime.datetime.now(), processes)) + pool = multiprocessing.Pool(processes=processes) + + node_tree_file = os.path.join(args.output_dir, 'node_tree.json') + + tilegrid_file = os.path.join(args.output_dir, 'tilegrid.json') + tileconn_file = os.path.join(args.output_dir, 'tileconn.json') + wire_map_file = os.path.join(args.output_dir, 'wiremap.pickle') + + if not args.verify_only: + print('{} Creating tile map'.format(datetime.datetime.now())) + grid, wire_map = generate_tilegrid(pool, tiles) + + with open(tilegrid_file, 'w') as f: + json.dump(grid, f, indent=2) + + with open(wire_map_file, 'wb') as f: + pickle.dump(wire_map, f) + + print('{} Reading node tree'.format(datetime.datetime.now())) + with open(node_tree_file) as f: + node_tree = json.load(f) + + print('{} Creating tile connections'.format(datetime.datetime.now())) + tileconn, raw_node_data = generate_tileconn( + pool, node_tree, nodes, wire_map, grid) + + print('{} Writing tileconn'.format(datetime.datetime.now())) + with open(tileconn_file, 'w') as f: + json.dump(tileconn, f, indent=2) else: - print('{} errors ignored because of {}\nSee {} for details.'.format( - len(error_nodes), ignored_wires_file, error_nodes_file)) + print('{} Reading tilegrid'.format(datetime.datetime.now())) + with open(tilegrid_file) as f: + grid = json.load(f) + + with open(wire_map_file, 'rb') as f: + wire_map = pickle.load(f) + + print('{} Reading raw_node_data'.format(datetime.datetime.now())) + raw_node_data = [] + with progressbar.ProgressBar(max_value=len(nodes)) as bar: + for idx, node in enumerate(pool.imap_unordered( + read_json5, + nodes, + chunksize=20, + )): + bar.update(idx) + raw_node_data.append(node) + bar.update(idx + 1) + + print('{} Reading tileconn'.format(datetime.datetime.now())) + with open(tileconn_file) as f: + tileconn = json.load(f) + + wire_nodes_file = os.path.join(args.output_dir, 'wire_nodes.pickle') + if os.path.exists(wire_nodes_file) and args.verify_only: + with open(wire_nodes_file, 'rb') as f: + wire_nodes = pickle.load(f) + else: + print( + "{} Connecting wires to verify tileconn".format( + datetime.datetime.now())) + wire_nodes = connect_wires(grid['tiles'], tileconn, wire_map) + with open(wire_nodes_file, 'wb') as f: + pickle.dump(wire_nodes, f) + + print('{} Verifing tileconn'.format(datetime.datetime.now())) + error_nodes = [] + prjxray.lib.verify_nodes( + [ + (node['node'], tuple(wire['wire'] + for wire in node['wires'])) + for node in raw_node_data + ], wire_nodes, error_nodes) + + if len(error_nodes) > 0: + error_nodes_file = os.path.join(args.output_dir, 'error_nodes.json') + with open(error_nodes_file, 'w') as f: + json.dump(error_nodes, f, indent=2) + + ignored_wires = [] + path_to_file = os.path.dirname( + os.path.abspath(inspect.getfile(inspect.currentframe()))) + ignored_wires_file = os.path.join(path_to_file, 'ignored_wires.txt') + if os.path.exists(ignored_wires_file): + with open(ignored_wires_file) as f: + ignored_wires = set(l.strip() for l in f) + + if not prjxray.lib.check_errors(error_nodes, ignored_wires): + print( + '{} errors detected, see {} for details.'.format( + len(error_nodes), error_nodes_file)) + sys.exit(1) + else: + print( + '{} errors ignored because of {}\nSee {} for details.'.format( + len(error_nodes), ignored_wires_file, error_nodes_file)) + if __name__ == '__main__': - main() + main() diff --git a/fuzzers/074-dump_all/generate_ignore_list.py b/fuzzers/074-dump_all/generate_ignore_list.py index 30b837d9..5cd0be1c 100644 --- a/fuzzers/074-dump_all/generate_ignore_list.py +++ b/fuzzers/074-dump_all/generate_ignore_list.py @@ -1,33 +1,32 @@ import json with open('output/error_nodes.json') as f: - flat_error_nodes = json.load(f) + flat_error_nodes = json.load(f) error_nodes = {} for node, raw_node, generated_nodes in flat_error_nodes: - if node not in error_nodes: - error_nodes[node] = { - 'raw_node': set(raw_node), - 'generated_nodes': set(), - } + if node not in error_nodes: + error_nodes[node] = { + 'raw_node': set(raw_node), + 'generated_nodes': set(), + } - - assert error_nodes[node]['raw_node'] == set(raw_node) - error_nodes[node]['generated_nodes'].add(tuple(sorted(generated_nodes))) + assert error_nodes[node]['raw_node'] == set(raw_node) + error_nodes[node]['generated_nodes'].add(tuple(sorted(generated_nodes))) for node, error in error_nodes.items(): - combined_generated_nodes = set() - for generated_node in error['generated_nodes']: - combined_generated_nodes |= set(generated_node) + combined_generated_nodes = set() + for generated_node in error['generated_nodes']: + combined_generated_nodes |= set(generated_node) - assert error['raw_node'] == combined_generated_nodes, (node, error) + assert error['raw_node'] == combined_generated_nodes, (node, error) - good_node = max(error['generated_nodes'], key=lambda x: len(x)) - bad_nodes = error['generated_nodes'] - set((good_node,)) + good_node = max(error['generated_nodes'], key=lambda x: len(x)) + bad_nodes = error['generated_nodes'] - set((good_node, )) - if max(len(generated_node) for generated_node in bad_nodes) > 1: - assert False, node - else: - for generated_node in bad_nodes: - for wire in generated_node: - print(wire) + if max(len(generated_node) for generated_node in bad_nodes) > 1: + assert False, node + else: + for generated_node in bad_nodes: + for wire in generated_node: + print(wire) diff --git a/fuzzers/074-dump_all/reduce_site_types.py b/fuzzers/074-dump_all/reduce_site_types.py index b869c695..36f35a7b 100644 --- a/fuzzers/074-dump_all/reduce_site_types.py +++ b/fuzzers/074-dump_all/reduce_site_types.py @@ -13,43 +13,46 @@ import os.path import re import json + def main(): - parser = argparse.ArgumentParser(description="Reduces per tile site types to generic site types.") - parser.add_argument('--output_dir', required=True) + parser = argparse.ArgumentParser( + description="Reduces per tile site types to generic site types.") + parser.add_argument('--output_dir', required=True) - args = parser.parse_args() + args = parser.parse_args() - SITE_TYPE = re.compile('^tile_type_(.+)_site_type_(.+)\.json$') - site_types = {} - for path in os.listdir(args.output_dir): - match = SITE_TYPE.fullmatch(path) - if match is None: - continue + SITE_TYPE = re.compile('^tile_type_(.+)_site_type_(.+)\.json$') + site_types = {} + for path in os.listdir(args.output_dir): + match = SITE_TYPE.fullmatch(path) + if match is None: + continue - site_type = match.group(2) - if site_type not in site_types: - site_types[site_type] = [] + site_type = match.group(2) + if site_type not in site_types: + site_types[site_type] = [] - site_types[site_type].append(path) + site_types[site_type].append(path) - for site_type in site_types: - proto_site_type = None - for instance in site_types[site_type]: - with open(os.path.join(args.output_dir, instance)) as f: - instance_site_type = json.load(f) + for site_type in site_types: + proto_site_type = None + for instance in site_types[site_type]: + with open(os.path.join(args.output_dir, instance)) as f: + instance_site_type = json.load(f) - if proto_site_type is None: - proto_site_type = instance_site_type - else: - prjxray.lib.compare_prototype_site( - proto_site_type, - instance_site_type, - ) + if proto_site_type is None: + proto_site_type = instance_site_type + else: + prjxray.lib.compare_prototype_site( + proto_site_type, + instance_site_type, + ) + + with open(os.path.join(args.output_dir, + 'site_type_{}.json'.format(site_type)), + 'w') as f: + json.dump(proto_site_type, f, indent=2) - with open(os.path.join(args.output_dir, - 'site_type_{}.json'.format(site_type)), 'w') as f: - json.dump(proto_site_type, f, indent=2) if __name__ == '__main__': - main() - + main() diff --git a/fuzzers/074-dump_all/reduce_tile_types.py b/fuzzers/074-dump_all/reduce_tile_types.py index d51528ea..32754fb5 100644 --- a/fuzzers/074-dump_all/reduce_tile_types.py +++ b/fuzzers/074-dump_all/reduce_tile_types.py @@ -19,24 +19,30 @@ import os import functools import re + def check_and_strip_prefix(name, prefix): - assert name.startswith(prefix), repr((name, prefix)) - return name[len(prefix):] + assert name.startswith(prefix), repr((name, prefix)) + return name[len(prefix):] + def flatten_site_pins(tile, site, site_pins, site_pin_node_to_wires): - def inner(): - for site_pin in site_pins: - wires = tuple(site_pin_node_to_wires(tile, site_pin['node'])) + def inner(): + for site_pin in site_pins: + wires = tuple(site_pin_node_to_wires(tile, site_pin['node'])) - if len(wires) == 0: - yield (check_and_strip_prefix(site_pin['site_pin'], site+'/'), None) - continue + if len(wires) == 0: + yield ( + check_and_strip_prefix(site_pin['site_pin'], site + '/'), + None) + continue - assert len(wires) == 1, repr(wires) + assert len(wires) == 1, repr(wires) - yield (check_and_strip_prefix(site_pin['site_pin'], site+'/'), wires[0]) + yield ( + check_and_strip_prefix(site_pin['site_pin'], site + '/'), + wires[0]) - return dict(inner()) + return dict(inner()) # All site names appear to follow the pattern _XY. @@ -45,6 +51,7 @@ def flatten_site_pins(tile, site, site_pins, site_pin_node_to_wires): # (e.g. start at X0Y0) and discard the prefix from the name. SITE_COORDINATE_PATTERN = re.compile('^(.+)_X([0-9]+)Y([0-9]+)$') + def find_origin_coordinate(sites): """ Find the coordinates of each site within the tile, and then subtract the smallest coordinate to re-origin them all to be relative to the tile. @@ -68,256 +75,291 @@ def find_origin_coordinate(sites): return min_x_coord, min_y_coord + def get_sites(tile, site_pin_node_to_wires): - min_x_coord, min_y_coord = find_origin_coordinate(tile['sites']) + min_x_coord, min_y_coord = find_origin_coordinate(tile['sites']) - for site in tile['sites']: - orig_site_name = site['site'] - coordinate = SITE_COORDINATE_PATTERN.match(orig_site_name) + for site in tile['sites']: + orig_site_name = site['site'] + coordinate = SITE_COORDINATE_PATTERN.match(orig_site_name) - x_coord = int(coordinate.group(2)) - y_coord = int(coordinate.group(3)) + x_coord = int(coordinate.group(2)) + y_coord = int(coordinate.group(3)) - yield ( - { - 'name': 'X{}Y{}'.format(x_coord - min_x_coord, y_coord - min_y_coord), - 'prefix': coordinate.group(1), - 'x_coord': x_coord - min_x_coord, - 'y_coord': y_coord - min_y_coord, - 'type': site['type'], - 'site_pins': dict(flatten_site_pins( - tile['tile'], - site['site'], site['site_pins'], site_pin_node_to_wires)), - } - ) + yield ( + { + 'name': + 'X{}Y{}'.format(x_coord - min_x_coord, y_coord - min_y_coord), + 'prefix': + coordinate.group(1), + 'x_coord': + x_coord - min_x_coord, + 'y_coord': + y_coord - min_y_coord, + 'type': + site['type'], + 'site_pins': + dict( + flatten_site_pins( + tile['tile'], site['site'], site['site_pins'], + site_pin_node_to_wires)), + }) def compare_sites_and_update(tile, sites, new_sites): - for site_a, site_b in zip(sites, new_sites): - assert site_a['type'] == site_b['type'] - assert site_a['site_pins'].keys() == site_b['site_pins'].keys() + for site_a, site_b in zip(sites, new_sites): + assert site_a['type'] == site_b['type'] + assert site_a['site_pins'].keys() == site_b['site_pins'].keys() - for site_pin in site_a['site_pins']: - if site_a['site_pins'][site_pin] is not None and site_b['site_pins'][site_pin] is not None: - assert site_a['site_pins'][site_pin] == site_b['site_pins'][site_pin] - elif site_a['site_pins'][site_pin] is None and site_b['site_pins'][site_pin] is not None: - site_a['site_pins'][site_pin] = site_b['site_pins'][site_pin] + for site_pin in site_a['site_pins']: + if site_a['site_pins'][site_pin] is not None and site_b[ + 'site_pins'][site_pin] is not None: + assert site_a['site_pins'][site_pin] == site_b['site_pins'][ + site_pin] + elif site_a['site_pins'][site_pin] is None and site_b['site_pins'][ + site_pin] is not None: + site_a['site_pins'][site_pin] = site_b['site_pins'][site_pin] def get_prototype_site(site): - proto = {} - proto['type'] = site['type'] - proto['site_pins'] = {} - proto['site_pips'] = {} - for site_pin in site['site_pins']: - name = check_and_strip_prefix(site_pin['site_pin'], site['site'] + '/') + proto = {} + proto['type'] = site['type'] + proto['site_pins'] = {} + proto['site_pips'] = {} + for site_pin in site['site_pins']: + name = check_and_strip_prefix(site_pin['site_pin'], site['site'] + '/') - proto['site_pins'][name] = { - 'direction': site_pin['direction'], - 'index_in_site': site_pin['index_in_site'], - } + proto['site_pins'][name] = { + 'direction': site_pin['direction'], + 'index_in_site': site_pin['index_in_site'], + } - for site_pip in site['site_pips']: - name = check_and_strip_prefix(site_pip['site_pip'], site['site'] + '/') + for site_pip in site['site_pips']: + name = check_and_strip_prefix(site_pip['site_pip'], site['site'] + '/') - proto['site_pips'][name] = { - 'to_pin': site_pip['to_pin'], - 'from_pin': site_pip['from_pin'], - } + proto['site_pips'][name] = { + 'to_pin': site_pip['to_pin'], + 'from_pin': site_pip['from_pin'], + } + + return proto - return proto def get_pips(tile, pips): - proto_pips = {} + proto_pips = {} - for pip in pips: - name = check_and_strip_prefix(pip['pip'], tile + '/') + for pip in pips: + name = check_and_strip_prefix(pip['pip'], tile + '/') - proto_pips[name] = { - 'src_wire': check_and_strip_prefix(pip['src_wire'], tile + '/') - if pip['src_wire'] is not None else None, - 'dst_wire': check_and_strip_prefix(pip['dst_wire'], tile + '/') - if pip['dst_wire'] is not None else None, - 'is_pseudo': pip['is_pseudo'], - 'is_directional': pip['is_directional'], - 'can_invert': pip['can_invert'], - } + proto_pips[name] = { + 'src_wire': + check_and_strip_prefix(pip['src_wire'], tile + '/') + if pip['src_wire'] is not None else None, + 'dst_wire': + check_and_strip_prefix(pip['dst_wire'], tile + '/') + if pip['dst_wire'] is not None else None, + 'is_pseudo': + pip['is_pseudo'], + 'is_directional': + pip['is_directional'], + 'can_invert': + pip['can_invert'], + } + + return proto_pips - return proto_pips def compare_and_update_pips(pips, new_pips): - # Pip names are always the same, but sometimes the src_wire or dst_wire - # may be missing. + # Pip names are always the same, but sometimes the src_wire or dst_wire + # may be missing. - assert pips.keys() == new_pips.keys(), repr((pips.keys(), new_pips.keys())) - for name in pips: - if pips[name]['src_wire'] is not None and new_pips[name]['src_wire'] is not None: - assert pips[name]['src_wire'] == new_pips[name]['src_wire'], repr(( - pips[name]['src_wire'], - new_pips[name]['src_wire'], - )) - elif pips[name]['src_wire'] is None and new_pips[name]['src_wire'] is not None: - pips[name]['src_wire'] = new_pips[name]['src_wire'] + assert pips.keys() == new_pips.keys(), repr((pips.keys(), new_pips.keys())) + for name in pips: + if pips[name]['src_wire'] is not None and new_pips[name][ + 'src_wire'] is not None: + assert pips[name]['src_wire'] == new_pips[name]['src_wire'], repr( + ( + pips[name]['src_wire'], + new_pips[name]['src_wire'], + )) + elif pips[name]['src_wire'] is None and new_pips[name][ + 'src_wire'] is not None: + pips[name]['src_wire'] = new_pips[name]['src_wire'] - if pips[name]['dst_wire'] is not None and new_pips[name]['dst_wire'] is not None: - assert pips[name]['dst_wire'] == new_pips[name]['dst_wire'], repr(( - pips[name]['dst_wire'], - new_pips[name]['dst_wire'], - )) - elif pips[name]['dst_wire'] is None and new_pips[name]['dst_wire'] is not None: - pips[name]['dst_wire'] = new_pips[name]['dst_wire'] + if pips[name]['dst_wire'] is not None and new_pips[name][ + 'dst_wire'] is not None: + assert pips[name]['dst_wire'] == new_pips[name]['dst_wire'], repr( + ( + pips[name]['dst_wire'], + new_pips[name]['dst_wire'], + )) + elif pips[name]['dst_wire'] is None and new_pips[name][ + 'dst_wire'] is not None: + pips[name]['dst_wire'] = new_pips[name]['dst_wire'] + + for k in ['is_pseudo', 'is_directional', 'can_invert']: + assert pips[name][k] == new_pips[name][k], ( + k, pips[name][k], new_pips[name][k]) - for k in ['is_pseudo', 'is_directional', 'can_invert']: - assert pips[name][k] == new_pips[name][k], (k, pips[name][k], new_pips[name][k]) def check_wires(wires, sites, pips): - """ Verify that the wires generates from nodes are a superset of wires in + """ Verify that the wires generates from nodes are a superset of wires in sites and pips """ - if sites is not None: - for site in sites: - for wire_to_site_pin in site['site_pins'].values(): - if wire_to_site_pin is not None: - assert wire_to_site_pin in wires, repr((wire_to_site_pin, wires)) + if sites is not None: + for site in sites: + for wire_to_site_pin in site['site_pins'].values(): + if wire_to_site_pin is not None: + assert wire_to_site_pin in wires, repr( + (wire_to_site_pin, wires)) + + if pips is not None: + for pip in pips.values(): + if pip['src_wire'] is not None: + assert pip['src_wire'] in wires, repr((pip['src_wire'], wires)) + if pip['dst_wire'] is not None: + assert pip['dst_wire'] in wires, repr((pip['dst_wire'], wires)) - if pips is not None: - for pip in pips.values(): - if pip['src_wire'] is not None: - assert pip['src_wire'] in wires, repr((pip['src_wire'], wires)) - if pip['dst_wire'] is not None: - assert pip['dst_wire'] in wires, repr((pip['dst_wire'], wires)) def read_json5(fname, nodes): - node_lookup = prjxray.lib.NodeLookup() - node_lookup.load_from_nodes(nodes) + node_lookup = prjxray.lib.NodeLookup() + node_lookup.load_from_nodes(nodes) - #print('{} Reading {} (in pid {})'.format(datetime.datetime.now(), fname, os.getpid())) - with open(fname) as f: - tile = json5.load(f) + #print('{} Reading {} (in pid {})'.format(datetime.datetime.now(), fname, os.getpid())) + with open(fname) as f: + tile = json5.load(f) - #print('{} Done reading {}'.format(datetime.datetime.now(), fname)) - def get_site_types(): - for site in tile['sites']: - yield get_prototype_site(site) + #print('{} Done reading {}'.format(datetime.datetime.now(), fname)) + def get_site_types(): + for site in tile['sites']: + yield get_prototype_site(site) - site_types = tuple(get_site_types()) - sites = tuple(get_sites(tile, node_lookup.site_pin_node_to_wires)) - pips = get_pips(tile['tile'], tile['pips']) - def inner(): - for wire in tile['wires']: - assert wire['wire'].startswith(tile['tile'] + '/') - yield wire['wire'][len(tile['tile'])+1:] + site_types = tuple(get_site_types()) + sites = tuple(get_sites(tile, node_lookup.site_pin_node_to_wires)) + pips = get_pips(tile['tile'], tile['pips']) - wires = set(inner()) - wires_from_nodes = set(node_lookup.wires_for_tile(tile['tile'])) - assert len(wires_from_nodes - wires) == 0, repr((wires, wires_from_nodes)) + def inner(): + for wire in tile['wires']: + assert wire['wire'].startswith(tile['tile'] + '/') + yield wire['wire'][len(tile['tile']) + 1:] + + wires = set(inner()) + wires_from_nodes = set(node_lookup.wires_for_tile(tile['tile'])) + assert len(wires_from_nodes - wires) == 0, repr((wires, wires_from_nodes)) + + return fname, tile, site_types, sites, pips, wires - return fname, tile, site_types, sites, pips, wires def reduce_tile(pool, site_types, tile_type, tile_instances, node_lookup): - sites = None - pips = None - wires = set() + sites = None + pips = None + wires = set() - with progressbar.ProgressBar(max_value=len(tile_instances)) as bar: - chunksize = 20 - if len(tile_instances) < chunksize*2: - iter = map(lambda file: read_json5(file, node_lookup.nodes), tile_instances) - else: - print('{} Using pool.imap_unordered'.format(datetime.datetime.now())) - iter = pool.imap_unordered( - functools.partial(read_json5, nodes=node_lookup.nodes), - tile_instances, - chunksize=chunksize, - ) - - for idx, (fname, tile, new_site_types, new_sites, new_pips, new_wires) in enumerate(iter): - bar.update(idx) - - assert tile['type'] == tile_type, repr((tile['tile'], tile_type)) - - for site_type in new_site_types: - if site_type['type'] in site_types: - prjxray.lib.compare_prototype_site(site_type, site_types[site_type['type']]) + with progressbar.ProgressBar(max_value=len(tile_instances)) as bar: + chunksize = 20 + if len(tile_instances) < chunksize * 2: + iter = map( + lambda file: read_json5(file, node_lookup.nodes), + tile_instances) else: - site_types[site_type['type']] = site_type + print( + '{} Using pool.imap_unordered'.format(datetime.datetime.now())) + iter = pool.imap_unordered( + functools.partial(read_json5, nodes=node_lookup.nodes), + tile_instances, + chunksize=chunksize, + ) - # Sites are expect to always be the same - if sites is None: - sites = new_sites - else: - compare_sites_and_update(tile['tile'], sites, new_sites) + for idx, (fname, tile, new_site_types, new_sites, new_pips, + new_wires) in enumerate(iter): + bar.update(idx) - if pips is None: - pips = new_pips - else: - compare_and_update_pips(pips, new_pips) + assert tile['type'] == tile_type, repr((tile['tile'], tile_type)) - wires |= new_wires + for site_type in new_site_types: + if site_type['type'] in site_types: + prjxray.lib.compare_prototype_site( + site_type, site_types[site_type['type']]) + else: + site_types[site_type['type']] = site_type - bar.update(idx+1) + # Sites are expect to always be the same + if sites is None: + sites = new_sites + else: + compare_sites_and_update(tile['tile'], sites, new_sites) - check_wires(wires, sites, pips) + if pips is None: + pips = new_pips + else: + compare_and_update_pips(pips, new_pips) - return { - 'tile_type': tile_type, - 'sites': sites, - 'pips': pips, - 'wires': tuple(wires), - } + wires |= new_wires + + bar.update(idx + 1) + + check_wires(wires, sites, pips) + + return { + 'tile_type': tile_type, + 'sites': sites, + 'pips': pips, + 'wires': tuple(wires), + } def main(): - parser = argparse.ArgumentParser(description="Reduces raw database dump into prototype tiles, grid, and connections.") - parser.add_argument('--root_dir', required=True) - parser.add_argument('--output_dir', required=True) - parser.add_argument('--ignore_cache', action='store_true') + parser = argparse.ArgumentParser( + description= + "Reduces raw database dump into prototype tiles, grid, and connections." + ) + parser.add_argument('--root_dir', required=True) + parser.add_argument('--output_dir', required=True) + parser.add_argument('--ignore_cache', action='store_true') - args = parser.parse_args() + args = parser.parse_args() - print('{} Reading root.csv'.format(datetime.datetime.now())) - tiles, nodes = prjxray.lib.read_root_csv(args.root_dir) + print('{} Reading root.csv'.format(datetime.datetime.now())) + tiles, nodes = prjxray.lib.read_root_csv(args.root_dir) - print('{} Loading node<->wire mapping'.format(datetime.datetime.now())) - node_lookup = prjxray.lib.NodeLookup() - node_lookup_file = os.path.join(args.output_dir, 'nodes.pickle') - if os.path.exists(node_lookup_file) and not args.ignore_cache: - node_lookup.load_from_file(node_lookup_file) - else: - node_lookup.load_from_root_csv(nodes) - node_lookup.save_to_file(node_lookup_file) + print('{} Loading node<->wire mapping'.format(datetime.datetime.now())) + node_lookup = prjxray.lib.NodeLookup() + node_lookup_file = os.path.join(args.output_dir, 'nodes.pickle') + if os.path.exists(node_lookup_file) and not args.ignore_cache: + node_lookup.load_from_file(node_lookup_file) + else: + node_lookup.load_from_root_csv(nodes) + node_lookup.save_to_file(node_lookup_file) - site_types = {} - - processes = min(multiprocessing.cpu_count(), 10) - print('Running {} processes'.format(processes)) - pool = multiprocessing.Pool(processes=processes) - - for tile_type in sorted(tiles.keys()): - #for tile_type in ['CLBLL_L', 'CLBLL_R', 'CLBLM_L', 'CLBLM_R', 'INT_L', 'INT_L']: - tile_type_file = os.path.join(args.output_dir, 'tile_type_{}.json'.format(tile_type)) site_types = {} - if os.path.exists(tile_type_file): - print('{} Skip reduced tile for {}'.format(datetime.datetime.now(), tile_type)) - continue - print('{} Generating reduced tile for {}'.format(datetime.datetime.now(), tile_type)) - reduced_tile = reduce_tile( - pool, - site_types, - tile_type, tiles[tile_type], - node_lookup) - for site_type in site_types: - with open(os.path.join( - args.output_dir, - 'tile_type_{}_site_type_{}.json'.format( - tile_type, - site_types[site_type]['type'] - )), 'w') as f: - json.dump(site_types[site_type], f, indent=2) - with open(tile_type_file, 'w') as f: - json.dump(reduced_tile, f, indent=2) + processes = min(multiprocessing.cpu_count(), 10) + print('Running {} processes'.format(processes)) + pool = multiprocessing.Pool(processes=processes) + + for tile_type in sorted(tiles.keys()): + #for tile_type in ['CLBLL_L', 'CLBLL_R', 'CLBLM_L', 'CLBLM_R', 'INT_L', 'INT_L']: + tile_type_file = os.path.join( + args.output_dir, 'tile_type_{}.json'.format(tile_type)) + site_types = {} + if os.path.exists(tile_type_file): + print( + '{} Skip reduced tile for {}'.format( + datetime.datetime.now(), tile_type)) + continue + print( + '{} Generating reduced tile for {}'.format( + datetime.datetime.now(), tile_type)) + reduced_tile = reduce_tile( + pool, site_types, tile_type, tiles[tile_type], node_lookup) + for site_type in site_types: + with open(os.path.join( + args.output_dir, 'tile_type_{}_site_type_{}.json'.format( + tile_type, site_types[site_type]['type'])), 'w') as f: + json.dump(site_types[site_type], f, indent=2) + + with open(tile_type_file, 'w') as f: + json.dump(reduced_tile, f, indent=2) if __name__ == '__main__': - main() - + main() diff --git a/prjxray/connections.py b/prjxray/connections.py index 493759f2..14416fce 100644 --- a/prjxray/connections.py +++ b/prjxray/connections.py @@ -3,64 +3,72 @@ from collections import namedtuple WireInGrid = namedtuple('WireInGrid', 'tile grid_x grid_y wire') Connection = namedtuple('Connection', 'wire_a wire_b') + class Connections(object): - def __init__(self, tilegrid, tileconn, tile_wires): - self.grid = tilegrid['tiles'] - self.tile_wires = tile_wires - self.coord_to_tile = {} - self.coord_to_tile_type = {} + def __init__(self, tilegrid, tileconn, tile_wires): + self.grid = tilegrid['tiles'] + self.tile_wires = tile_wires + self.coord_to_tile = {} + self.coord_to_tile_type = {} - for tile, tile_info in self.grid.items(): - self.coord_to_tile[(tile_info['grid_x'], tile_info['grid_y'])] = tile - self.coord_to_tile_type[(tile_info['grid_x'], tile_info['grid_y'])] = tile_info['type'] + for tile, tile_info in self.grid.items(): + self.coord_to_tile[(tile_info['grid_x'], + tile_info['grid_y'])] = tile + self.coord_to_tile_type[(tile_info['grid_x'], + tile_info['grid_y'])] = tile_info['type'] - # Make sure we have tile type info for every tile in the grid. - assert tile_info['type'] in self.tile_wires, (tile_info['type'], self.tile_wires.keys()) + # Make sure we have tile type info for every tile in the grid. + assert tile_info['type'] in self.tile_wires, ( + tile_info['type'], self.tile_wires.keys()) - self.potential_connections = {} + self.potential_connections = {} - for conn in tileconn: - grid_deltas = conn['grid_deltas'] - tile_types = conn['tile_types'] + for conn in tileconn: + grid_deltas = conn['grid_deltas'] + tile_types = conn['tile_types'] + + for pairs in conn['wire_pairs']: + key = (tile_types[0], pairs[0]) + if key not in self.potential_connections: + self.potential_connections[key] = [] + self.potential_connections[key].append( + (grid_deltas, tile_types[1], pairs[1])) + + def all_possible_connections_from(self, wire_in_grid): + tile_type = self.coord_to_tile_type[( + wire_in_grid.grid_x, wire_in_grid.grid_y)] + + key = (tile_type, wire_in_grid.wire) - for pairs in conn['wire_pairs']: - key = (tile_types[0], pairs[0]) if key not in self.potential_connections: - self.potential_connections[key] = [] - self.potential_connections[key].append(( - grid_deltas, tile_types[1], pairs[1] - )) + return - def all_possible_connections_from(self, wire_in_grid): - tile_type = self.coord_to_tile_type[(wire_in_grid.grid_x, wire_in_grid.grid_y)] + for relative_coord, target_tile_type, target_wire in ( + self.potential_connections[key]): + rel_x, rel_y = relative_coord + target_coord = ( + wire_in_grid.grid_x + rel_x, wire_in_grid.grid_y + rel_y) - key = (tile_type, wire_in_grid.wire) + if target_coord in self.coord_to_tile_type: + if self.coord_to_tile_type[target_coord] == target_tile_type: + yield Connection( + wire_in_grid, + WireInGrid( + tile=self.coord_to_tile[target_coord], + grid_x=target_coord[0], + grid_y=target_coord[1], + wire=target_wire)) - if key not in self.potential_connections: - return - - for relative_coord, target_tile_type, target_wire in ( - self.potential_connections[key]): - rel_x, rel_y = relative_coord - target_coord = (wire_in_grid.grid_x+rel_x, wire_in_grid.grid_y+rel_y) - - if target_coord in self.coord_to_tile_type: - if self.coord_to_tile_type[target_coord] == target_tile_type: - yield Connection(wire_in_grid, WireInGrid( - tile = self.coord_to_tile[target_coord], - grid_x = target_coord[0], - grid_y = target_coord[1], - wire = target_wire)) - - def get_connections(self): - """ Yields Connection objects that represent all connections present in + def get_connections(self): + """ Yields Connection objects that represent all connections present in the grid based on tileconn """ - for tile, tile_info in self.grid.items(): - for wire in self.tile_wires[tile_info['type']]: - wire_in_grid = WireInGrid( - tile = tile, - grid_x = tile_info['grid_x'], - grid_y = tile_info['grid_y'], - wire = wire) - for potential_connection in self.all_possible_connections_from(wire_in_grid): - yield potential_connection + for tile, tile_info in self.grid.items(): + for wire in self.tile_wires[tile_info['type']]: + wire_in_grid = WireInGrid( + tile=tile, + grid_x=tile_info['grid_x'], + grid_y=tile_info['grid_y'], + wire=wire) + for potential_connection in self.all_possible_connections_from( + wire_in_grid): + yield potential_connection diff --git a/prjxray/db.py b/prjxray/db.py index 2f194c93..66e86ce4 100644 --- a/prjxray/db.py +++ b/prjxray/db.py @@ -4,93 +4,101 @@ from prjxray import grid from prjxray import tile from prjxray import connections + def get_available_databases(prjxray_root): - """ Return set of available directory to databases given the root directory + """ Return set of available directory to databases given the root directory of prjxray-db """ - db_types = set() - for d in os.listdir(prjxray_root): - if d.startswith("."): - continue + db_types = set() + for d in os.listdir(prjxray_root): + if d.startswith("."): + continue - dpath = os.path.join(prjxray_root, d) + dpath = os.path.join(prjxray_root, d) - if os.path.exists(os.path.join(dpath, "settings.sh")): - db_types.add(dpath) + if os.path.exists(os.path.join(dpath, "settings.sh")): + db_types.add(dpath) + + return db_types - return db_types class Database(object): - def __init__(self, db_root): - """ Create project x-ray Database at given db_root. + def __init__(self, db_root): + """ Create project x-ray Database at given db_root. db_root: Path to directory containing settings.sh, *.db, tilegrid.json and tileconn.json """ - self.db_root = db_root - self.tilegrid = None - self.tileconn = None - self.tile_types = None + self.db_root = db_root + self.tilegrid = None + self.tileconn = None + self.tile_types = None - self.tile_types = {} - for f in os.listdir(self.db_root): - if f.endswith('.json') and f.startswith('tile_type_'): - tile_type = f[len('tile_type_'):-len('.json')].lower() + self.tile_types = {} + for f in os.listdir(self.db_root): + if f.endswith('.json') and f.startswith('tile_type_'): + tile_type = f[len('tile_type_'):-len('.json')].lower() - segbits = os.path.join(self.db_root, 'segbits_{}.db'.format(tile_type)) - if not os.path.isfile(segbits): - segbits = None + segbits = os.path.join( + self.db_root, 'segbits_{}.db'.format(tile_type)) + if not os.path.isfile(segbits): + segbits = None - mask = os.path.join(self.db_root, 'mask_{}.db'.format(tile_type)) - if not os.path.isfile(mask): - mask = None + mask = os.path.join( + self.db_root, 'mask_{}.db'.format(tile_type)) + if not os.path.isfile(mask): + mask = None - tile_type_file = os.path.join(self.db_root, 'tile_type_{}.json'.format(tile_type.upper())) - if not os.path.isfile(tile_type_file): - tile_type_file = None + tile_type_file = os.path.join( + self.db_root, 'tile_type_{}.json'.format( + tile_type.upper())) + if not os.path.isfile(tile_type_file): + tile_type_file = None - self.tile_types[tile_type.upper()] = tile.TileDbs( - segbits = segbits, - mask = mask, - tile_type = tile_type_file, - ) + self.tile_types[tile_type.upper()] = tile.TileDbs( + segbits=segbits, + mask=mask, + tile_type=tile_type_file, + ) - def get_tile_types(self): - """ Return list of tile types """ - return self.tile_types.keys() + def get_tile_types(self): + """ Return list of tile types """ + return self.tile_types.keys() - def get_tile_type(self, tile_type): - """ Return Tile object for given tilename. """ - return tile.Tile(tile_type, self.tile_types[tile_type]) + def get_tile_type(self, tile_type): + """ Return Tile object for given tilename. """ + return tile.Tile(tile_type, self.tile_types[tile_type]) - def _read_tilegrid(self): - """ Read tilegrid database if not already read. """ - if not self.tilegrid: - with open(os.path.join(self.db_root, 'tilegrid.json')) as f: - self.tilegrid = json.load(f) + def _read_tilegrid(self): + """ Read tilegrid database if not already read. """ + if not self.tilegrid: + with open(os.path.join(self.db_root, 'tilegrid.json')) as f: + self.tilegrid = json.load(f) - def _read_tileconn(self): - """ Read tileconn database if not already read. """ - if not self.tileconn: - with open(os.path.join(self.db_root, 'tileconn.json')) as f: - self.tileconn = json.load(f) + def _read_tileconn(self): + """ Read tileconn database if not already read. """ + if not self.tileconn: + with open(os.path.join(self.db_root, 'tileconn.json')) as f: + self.tileconn = json.load(f) - def grid(self): - """ Return Grid object for database. """ - self._read_tilegrid() - return grid.Grid(self.tilegrid) + def grid(self): + """ Return Grid object for database. """ + self._read_tilegrid() + return grid.Grid(self.tilegrid) - def _read_tile_types(self): - for tile_type, db in self.tile_types.items(): - with open(db.tile_type) as f: - self.tile_types[tile_type] = json.load(f) + def _read_tile_types(self): + for tile_type, db in self.tile_types.items(): + with open(db.tile_type) as f: + self.tile_types[tile_type] = json.load(f) - def connections(self): - self._read_tilegrid() - self._read_tileconn() - self._read_tile_types() + def connections(self): + self._read_tilegrid() + self._read_tileconn() + self._read_tile_types() - tile_wires = dict((tile_type, db['wires']) - for tile_type, db in self.tile_types.items()) - return connections.Connections(self.tilegrid, self.tileconn, tile_wires) + tile_wires = dict( + (tile_type, db['wires']) + for tile_type, db in self.tile_types.items()) + return connections.Connections( + self.tilegrid, self.tileconn, tile_wires) diff --git a/prjxray/grid.py b/prjxray/grid.py index 36f47025..36826c5f 100644 --- a/prjxray/grid.py +++ b/prjxray/grid.py @@ -3,49 +3,51 @@ from collections import namedtuple GridLoc = namedtuple('GridLoc', 'grid_x grid_y') GridInfo = namedtuple('GridInfo', 'segment sites tile_type') + class Grid(object): - """ Object that represents grid for a given database. + """ Object that represents grid for a given database. Provides methods to inspect grid by name or location. Also provides mapping of segment offsets for particular grid locations and their tile types. """ - def __init__(self, tilegrid): - self.tilegrid = tilegrid - self.loc = {} - self.tileinfo = {} - for tile in self.tilegrid['tiles']: - tileinfo = self.tilegrid['tiles'][tile] - grid_loc = GridLoc(tileinfo['grid_x'], tileinfo['grid_y']) - self.loc[grid_loc] = tile - self.tileinfo[tile] = GridInfo( - segment = tileinfo['segment'] if 'segment' in tileinfo else None, - sites = tileinfo['sites'], - tile_type = tileinfo['type']) + def __init__(self, tilegrid): + self.tilegrid = tilegrid + self.loc = {} + self.tileinfo = {} - x, y = zip(*self.loc.keys()) - self._dims = (min(x), max(x), min(y), max(y)) + for tile in self.tilegrid['tiles']: + tileinfo = self.tilegrid['tiles'][tile] + grid_loc = GridLoc(tileinfo['grid_x'], tileinfo['grid_y']) + self.loc[grid_loc] = tile + self.tileinfo[tile] = GridInfo( + segment=tileinfo['segment'] if 'segment' in tileinfo else None, + sites=tileinfo['sites'], + tile_type=tileinfo['type']) - def tile_locations(self): - """ Return list of tile locations. """ - return self.loc.keys() + x, y = zip(*self.loc.keys()) + self._dims = (min(x), max(x), min(y), max(y)) - def dims(self): - """ Returns (x_min, x_max, y_min, y_max) for given Grid. """ - return self._dims + def tile_locations(self): + """ Return list of tile locations. """ + return self.loc.keys() - def is_populated(self, grid_loc): - return grid_loc in self.loc + def dims(self): + """ Returns (x_min, x_max, y_min, y_max) for given Grid. """ + return self._dims - def loc_of_tilename(self, tilename): - tileinfo = self.tilegrid['tiles'][tilename] - return GridLoc(tileinfo['grid_x'], tileinfo['grid_y']) + def is_populated(self, grid_loc): + return grid_loc in self.loc - def tilename_at_loc(self, grid_loc): - return self.loc[grid_loc] + def loc_of_tilename(self, tilename): + tileinfo = self.tilegrid['tiles'][tilename] + return GridLoc(tileinfo['grid_x'], tileinfo['grid_y']) - def gridinfo_at_loc(self, grid_loc): - return self.tileinfo[self.loc[grid_loc]] + def tilename_at_loc(self, grid_loc): + return self.loc[grid_loc] - def gridinfo_at_tilename(self, tilename): - return self.tileinfo[tilename] + def gridinfo_at_loc(self, grid_loc): + return self.tileinfo[self.loc[grid_loc]] + + def gridinfo_at_tilename(self, tilename): + return self.tileinfo[tilename] diff --git a/prjxray/lib.py b/prjxray/lib.py index d2a64d47..eb9ac034 100644 --- a/prjxray/lib.py +++ b/prjxray/lib.py @@ -4,29 +4,32 @@ import pickle import pyjson5 as json5 import progressbar + def read_root_csv(root_dir): - """ Reads root.csv from raw db directory. + """ Reads root.csv from raw db directory. This should only be used during database generation. """ - tiles = {} - nodes = [] + tiles = {} + nodes = [] - with open(os.path.join(root_dir, 'root.csv')) as f: - for d in csv.DictReader(f): - if d['filetype'] == 'tile': - if d['subtype'] not in tiles: - tiles[d['subtype']] = [] + with open(os.path.join(root_dir, 'root.csv')) as f: + for d in csv.DictReader(f): + if d['filetype'] == 'tile': + if d['subtype'] not in tiles: + tiles[d['subtype']] = [] - tiles[d['subtype']].append(os.path.join(root_dir, d['filename'])) - elif d['filetype'] == 'node': - nodes.append(os.path.join(root_dir, d['filename'])) + tiles[d['subtype']].append( + os.path.join(root_dir, d['filename'])) + elif d['filetype'] == 'node': + nodes.append(os.path.join(root_dir, d['filename'])) + + return tiles, nodes - return tiles, nodes def verify_nodes(raw_nodes, nodes, error_nodes): - """ Compares raw_nodes with generated_nodes and adds errors to error_nodes. + """ Compares raw_nodes with generated_nodes and adds errors to error_nodes. Args: raw_nodes - Iterable of (node name, iterable of wires in node). @@ -36,24 +39,26 @@ def verify_nodes(raw_nodes, nodes, error_nodes): that did not match. """ - wire_nodes = {} - for node in nodes: - node_set = set(node) - for wire in node: - wire_nodes[wire] = node_set + wire_nodes = {} + for node in nodes: + node_set = set(node) + for wire in node: + wire_nodes[wire] = node_set - for node, raw_node_wires in raw_nodes: - raw_node_set = set(raw_node_wires) + for node, raw_node_wires in raw_nodes: + raw_node_set = set(raw_node_wires) + + for wire in sorted(raw_node_set): + if wire not in wire_nodes: + if set((wire, )) != raw_node_set: + error_nodes.append((node, tuple(raw_node_set), (wire, ))) + elif wire_nodes[wire] != raw_node_set: + error_nodes.append( + (node, tuple(raw_node_set), tuple(wire_nodes[wire]))) - for wire in sorted(raw_node_set): - if wire not in wire_nodes: - if set((wire,)) != raw_node_set: - error_nodes.append((node, tuple(raw_node_set), (wire,))) - elif wire_nodes[wire] != raw_node_set: - error_nodes.append((node, tuple(raw_node_set), tuple(wire_nodes[wire]))) def check_errors(flat_error_nodes, ignored_wires): - """ Check if error_nodes has errors that are not covered in ignored_wires. + """ Check if error_nodes has errors that are not covered in ignored_wires. Args: flat_error_nodes - List of error_nodes generated from verify_nodes. @@ -61,82 +66,85 @@ def check_errors(flat_error_nodes, ignored_wires): """ - error_nodes = {} - for node, raw_node, generated_nodes in flat_error_nodes: - if node not in error_nodes: - error_nodes[node] = { - 'raw_node': set(raw_node), - 'generated_nodes': set(), - } + error_nodes = {} + for node, raw_node, generated_nodes in flat_error_nodes: + if node not in error_nodes: + error_nodes[node] = { + 'raw_node': set(raw_node), + 'generated_nodes': set(), + } - # Make sure all raw nodes are the same. - assert error_nodes[node]['raw_node'] == set(raw_node) + # Make sure all raw nodes are the same. + assert error_nodes[node]['raw_node'] == set(raw_node) - error_nodes[node]['generated_nodes'].add(tuple(sorted(generated_nodes))) + error_nodes[node]['generated_nodes'].add( + tuple(sorted(generated_nodes))) - for node, error in error_nodes.items(): - combined_generated_nodes = set() - for generated_node in error['generated_nodes']: - combined_generated_nodes |= set(generated_node) + for node, error in error_nodes.items(): + combined_generated_nodes = set() + for generated_node in error['generated_nodes']: + combined_generated_nodes |= set(generated_node) - # Make sure there are not extra wires in nodes. - assert error['raw_node'] == combined_generated_nodes, (node, error) + # Make sure there are not extra wires in nodes. + assert error['raw_node'] == combined_generated_nodes, (node, error) - good_node = max(error['generated_nodes'], key=lambda x: len(x)) - bad_nodes = error['generated_nodes'] - set((good_node,)) + good_node = max(error['generated_nodes'], key=lambda x: len(x)) + bad_nodes = error['generated_nodes'] - set((good_node, )) - # Max sure only single wires are stranded - assert max(len(generated_node) for generated_node in bad_nodes) == 1 + # Max sure only single wires are stranded + assert max(len(generated_node) for generated_node in bad_nodes) == 1 - for generate_node in bad_nodes: - for wire in generate_node: - if wire not in ignored_wires: - return False + for generate_node in bad_nodes: + for wire in generate_node: + if wire not in ignored_wires: + return False + + return True - return True class NodeLookup(object): - def __init__(self): - self.nodes = {} + def __init__(self): + self.nodes = {} - def load_from_nodes(self, nodes): - self.nodes = nodes + def load_from_nodes(self, nodes): + self.nodes = nodes - def load_from_root_csv(self, nodes): - for node in progressbar.progressbar(nodes): - with open(node) as f: - node_wires = json5.load(f) - assert node_wires['node'] not in self.nodes - self.nodes[node_wires['node']] = node_wires['wires'] + def load_from_root_csv(self, nodes): + for node in progressbar.progressbar(nodes): + with open(node) as f: + node_wires = json5.load(f) + assert node_wires['node'] not in self.nodes + self.nodes[node_wires['node']] = node_wires['wires'] - def load_from_file(self, fname): - with open(fname, 'rb') as f: - self.nodes = pickle.load(f) + def load_from_file(self, fname): + with open(fname, 'rb') as f: + self.nodes = pickle.load(f) - def save_to_file(self, fname): - with open(fname, 'wb') as f: - pickle.dump(self.nodes, f) + def save_to_file(self, fname): + with open(fname, 'wb') as f: + pickle.dump(self.nodes, f) - def site_pin_node_to_wires(self, tile, node): - if node is None: - return + def site_pin_node_to_wires(self, tile, node): + if node is None: + return - node_wires = self.nodes[node] + node_wires = self.nodes[node] - for wire in node_wires: - if wire['wire'].startswith(tile + '/'): - yield wire['wire'][len(tile)+1:] + for wire in node_wires: + if wire['wire'].startswith(tile + '/'): + yield wire['wire'][len(tile) + 1:] + + def wires_for_tile(self, tile): + for node in self.nodes.values(): + for wire in node: + if wire['wire'].startswith(tile + '/'): + yield wire['wire'][len(tile) + 1:] - def wires_for_tile(self, tile): - for node in self.nodes.values(): - for wire in node: - if wire['wire'].startswith(tile + '/'): - yield wire['wire'][len(tile)+1:] def compare_prototype_site(proto_a, proto_b): - """ Compare two proto site type. + """ Compare two proto site type. Will assert if prototypes are not equivalent. """ - assert proto_a == proto_b, repr((proto_a, proto_b)) + assert proto_a == proto_b, repr((proto_a, proto_b)) diff --git a/prjxray/tile.py b/prjxray/tile.py index cbccc18a..ffa0069a 100644 --- a/prjxray/tile.py +++ b/prjxray/tile.py @@ -1,11 +1,9 @@ from collections import namedtuple import json - """ Database files available for a tile """ TileDbs = namedtuple('TileDbs', 'segbits mask tile_type') Pip = namedtuple('Pip', 'net_to net_from can_invert is_directional is_pseudo') - """ Site - Represents an instance of a site within a tile. name - Name of site within tile, instance specific. @@ -17,7 +15,6 @@ pins - Instaces of site pins within this site and tile. This is an tuple of """ Site = namedtuple('Site', 'name x y type site_pins') - """ SitePin - Tuple representing a site pin within a tile. Sites are generic based on type, however sites are instanced @@ -33,53 +30,55 @@ wire - Wire name within the tile. This name is site instance specific. """ SitePin = namedtuple('SitePin', 'name wire direction') + class Tile(object): - """ Provides abstration of a tile in the database. """ - def __init__(self, tilename, tile_dbs): - self.tilename = tilename - self.tilename_upper = self.tilename.upper() - self.tile_dbs = tile_dbs + """ Provides abstration of a tile in the database. """ - self.wires = None - self.sites = None - self.pips = None + def __init__(self, tilename, tile_dbs): + self.tilename = tilename + self.tilename_upper = self.tilename.upper() + self.tile_dbs = tile_dbs - def yield_sites(sites): - for site in sites: - yield Site( - name = None, - type = site['type'], - x = None, - y = None, - site_pins = site['site_pins'], - ) + self.wires = None + self.sites = None + self.pips = None - def yield_pips(pips): - for pip in pips: - yield Pip( - net_to = pip['dst_wire'], - net_from = pip['src_wire'], - can_invert = bool(int(pip['can_invert'])), - is_directional = bool(int(pip['is_directional'])), - is_pseudo = bool(int(pip['is_pseudo'])), - ) + def yield_sites(sites): + for site in sites: + yield Site( + name=None, + type=site['type'], + x=None, + y=None, + site_pins=site['site_pins'], + ) - with open(self.tile_dbs.tile_type) as f: - tile_type = json.load(f) - assert self.tilename_upper == tile_type['tile_type'] - self.wires = tile_type['wires'] - self.sites = tuple(yield_sites(tile_type['sites'])) - self.pips = tuple(yield_pips(tile_type['pips'])) + def yield_pips(pips): + for pip in pips: + yield Pip( + net_to=pip['dst_wire'], + net_from=pip['src_wire'], + can_invert=bool(int(pip['can_invert'])), + is_directional=bool(int(pip['is_directional'])), + is_pseudo=bool(int(pip['is_pseudo'])), + ) - def get_wires(self): - """Returns a set of wire names present in this tile.""" - return self.wires + with open(self.tile_dbs.tile_type) as f: + tile_type = json.load(f) + assert self.tilename_upper == tile_type['tile_type'] + self.wires = tile_type['wires'] + self.sites = tuple(yield_sites(tile_type['sites'])) + self.pips = tuple(yield_pips(tile_type['pips'])) - def get_sites(self): - """ Returns tuple of Site namedtuple's present in this tile. """ - return self.sites + def get_wires(self): + """Returns a set of wire names present in this tile.""" + return self.wires - def get_pips(self): - """ Returns tuple of Pip namedtuple's representing the PIPs in this tile. + def get_sites(self): + """ Returns tuple of Site namedtuple's present in this tile. """ + return self.sites + + def get_pips(self): + """ Returns tuple of Pip namedtuple's representing the PIPs in this tile. """ - return self.pips + return self.pips diff --git a/tools/quick_test.py b/tools/quick_test.py index 9596d50f..37343d59 100644 --- a/tools/quick_test.py +++ b/tools/quick_test.py @@ -2,29 +2,34 @@ from __future__ import print_function import prjxray.db import argparse + def quick_test(db_root): - db = prjxray.db.Database(db_root) - g = db.grid() + db = prjxray.db.Database(db_root) + g = db.grid() -# Verify that we have some tile information for every tile in grid. - tile_types_in_grid = set(g.gridinfo_at_loc(loc).tile_type for loc in g.tile_locations()) - tile_types_in_db = set(db.get_tile_types()) - assert len(tile_types_in_grid - tile_types_in_db) == 0 + # Verify that we have some tile information for every tile in grid. + tile_types_in_grid = set( + g.gridinfo_at_loc(loc).tile_type for loc in g.tile_locations()) + tile_types_in_db = set(db.get_tile_types()) + assert len(tile_types_in_grid - tile_types_in_db) == 0 + + # Verify that all tile types can be loaded. + for tile_type in db.get_tile_types(): + tile = db.get_tile_type(tile_type) + tile.get_wires() + tile.get_sites() + tile.get_pips() -# Verify that all tile types can be loaded. - for tile_type in db.get_tile_types(): - tile = db.get_tile_type(tile_type) - tile.get_wires() - tile.get_sites() - tile.get_pips() def main(): - parser = argparse.ArgumentParser(description="Runs a sanity check on a prjxray database.") - parser.add_argument('--db_root', required=True) + parser = argparse.ArgumentParser( + description="Runs a sanity check on a prjxray database.") + parser.add_argument('--db_root', required=True) - args = parser.parse_args() + args = parser.parse_args() + + quick_test(args.db_root) - quick_test(args.db_root) if __name__ == '__main__': - main() + main() diff --git a/tools/verify_tile_connections.py b/tools/verify_tile_connections.py index e9262be5..56a14681 100644 --- a/tools/verify_tile_connections.py +++ b/tools/verify_tile_connections.py @@ -9,111 +9,123 @@ import pyjson5 as json5 import json import sys + def full_wire_name(wire_in_grid): - return '{}/{}'.format(wire_in_grid.tile, wire_in_grid.wire) + return '{}/{}'.format(wire_in_grid.tile, wire_in_grid.wire) + def make_connection(wires, connection): - wire_a = full_wire_name(connection.wire_a) - wire_b = full_wire_name(connection.wire_b) + wire_a = full_wire_name(connection.wire_a) + wire_b = full_wire_name(connection.wire_b) - if wire_a not in wires: - wires[wire_a] = set((wire_a,)) + if wire_a not in wires: + wires[wire_a] = set((wire_a, )) - if wire_b not in wires: - wires[wire_b] = set((wire_b,)) + if wire_b not in wires: + wires[wire_b] = set((wire_b, )) - wire_a_set = wires[wire_a] - wire_b_set = wires[wire_b] + wire_a_set = wires[wire_a] + wire_b_set = wires[wire_b] - if wire_a_set is wire_b_set: - return + if wire_a_set is wire_b_set: + return - wire_a_set |= wire_b_set + wire_a_set |= wire_b_set + + for wire in wire_a_set: + wires[wire] = wire_a_set - for wire in wire_a_set: - wires[wire] = wire_a_set def make_connections(db_root): - db = prjxray.db.Database(db_root) - c = db.connections() + db = prjxray.db.Database(db_root) + c = db.connections() - wires = {} - for connection in c.get_connections(): - make_connection(wires, connection) + wires = {} + for connection in c.get_connections(): + make_connection(wires, connection) - nodes = {} + nodes = {} - for wire_node in wires.values(): - nodes[id(wire_node)] = wire_node + for wire_node in wires.values(): + nodes[id(wire_node)] = wire_node + + return nodes.values() - return nodes.values() def read_json5(fname): - with open(fname, 'r') as f: - return json5.load(f) + with open(fname, 'r') as f: + return json5.load(f) + def main(): - parser = argparse.ArgumentParser(description="Tests database against raw node list.") - parser.add_argument('--db_root', required=True) - parser.add_argument('--raw_node_root', required=True) - parser.add_argument('--error_nodes', default="error_nodes.json") - parser.add_argument('--ignored_wires') + parser = argparse.ArgumentParser( + description="Tests database against raw node list.") + parser.add_argument('--db_root', required=True) + parser.add_argument('--raw_node_root', required=True) + parser.add_argument('--error_nodes', default="error_nodes.json") + parser.add_argument('--ignored_wires') - args = parser.parse_args() + args = parser.parse_args() - processes = min(multiprocessing.cpu_count(), 10) + processes = min(multiprocessing.cpu_count(), 10) - print('{} Running {} processes'.format(datetime.datetime.now(), processes)) - pool = multiprocessing.Pool(processes=processes) - print('{} Reading raw data index'.format(datetime.datetime.now(), processes)) - _, nodes = prjxray.lib.read_root_csv(args.raw_node_root) - print('{} Reading raw_node_data'.format(datetime.datetime.now())) - raw_node_data = [] - with progressbar.ProgressBar(max_value=len(nodes)) as bar: - for idx, node in enumerate(pool.imap_unordered( - read_json5, - nodes, - chunksize = 20, - )): - bar.update(idx) - raw_node_data.append((node['node'], tuple(wire['wire'] for wire in node['wires']))) - bar.update(idx+1) + print('{} Running {} processes'.format(datetime.datetime.now(), processes)) + pool = multiprocessing.Pool(processes=processes) + print( + '{} Reading raw data index'.format(datetime.datetime.now(), processes)) + _, nodes = prjxray.lib.read_root_csv(args.raw_node_root) + print('{} Reading raw_node_data'.format(datetime.datetime.now())) + raw_node_data = [] + with progressbar.ProgressBar(max_value=len(nodes)) as bar: + for idx, node in enumerate(pool.imap_unordered( + read_json5, + nodes, + chunksize=20, + )): + bar.update(idx) + raw_node_data.append( + (node['node'], tuple(wire['wire'] for wire in node['wires']))) + bar.update(idx + 1) - print('{} Creating connections'.format(datetime.datetime.now())) - generated_nodes = make_connections(args.db_root) + print('{} Creating connections'.format(datetime.datetime.now())) + generated_nodes = make_connections(args.db_root) - print('{} Verifying connections'.format(datetime.datetime.now())) - error_nodes = [] - prjxray.lib.verify_nodes(raw_node_data, generated_nodes, error_nodes) + print('{} Verifying connections'.format(datetime.datetime.now())) + error_nodes = [] + prjxray.lib.verify_nodes(raw_node_data, generated_nodes, error_nodes) - if len(error_nodes) > 0: - if args.ignored_wires: - with open(args.ignored_wires, 'r') as f: - ignored_wires = [l.strip() for l in f.readlines()] + if len(error_nodes) > 0: + if args.ignored_wires: + with open(args.ignored_wires, 'r') as f: + ignored_wires = [l.strip() for l in f.readlines()] - print('{} Found {} errors, writing errors to {}'.format( - datetime.datetime.now(), - len(error_nodes), - args.error_nodes, - )) + print( + '{} Found {} errors, writing errors to {}'.format( + datetime.datetime.now(), + len(error_nodes), + args.error_nodes, + )) - with open(args.error_nodes, 'w') as f: - json.dump(error_nodes, f, indent=2) + with open(args.error_nodes, 'w') as f: + json.dump(error_nodes, f, indent=2) - if not args.ignored_wires: - sys.exit(1) + if not args.ignored_wires: + sys.exit(1) + + if not prjxray.lib.check_errors(error_nodes, ignored_wires): + print( + '{} Errors were not ignored via ignored_wires {}'.format( + datetime.datetime.now(), + args.ignored_wires, + )) + sys.exit(1) + else: + print( + '{} All errors were via ignored_wires {}'.format( + datetime.datetime.now(), + args.ignored_wires, + )) - if not prjxray.lib.check_errors(error_nodes, ignored_wires): - print('{} Errors were not ignored via ignored_wires {}'.format( - datetime.datetime.now(), - args.ignored_wires, - )) - sys.exit(1) - else: - print('{} All errors were via ignored_wires {}'.format( - datetime.datetime.now(), - args.ignored_wires, - )) if __name__ == '__main__': - main() + main() From 7adb81b81cab82c2b83997ccf8d7f034fe29c19b Mon Sep 17 00:00:00 2001 From: Keith Rothman <537074+litghost@users.noreply.github.com> Date: Thu, 27 Sep 2018 08:56:38 -0700 Subject: [PATCH 3/5] Removing index_in_site from site_type files. Signed-off-by: Keith Rothman <537074+litghost@users.noreply.github.com> --- fuzzers/074-dump_all/generate.tcl | 1 - fuzzers/074-dump_all/reduce_site_types.py | 4 ++++ fuzzers/074-dump_all/reduce_tile_types.py | 1 - 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/fuzzers/074-dump_all/generate.tcl b/fuzzers/074-dump_all/generate.tcl index 3aeaf90f..a961a1c5 100644 --- a/fuzzers/074-dump_all/generate.tcl +++ b/fuzzers/074-dump_all/generate.tcl @@ -41,7 +41,6 @@ foreach tile [get_tiles] { # SPEED_INDEX puts $fp "\t\t\t\{" puts $fp "\t\t\t\t\"site_pin\":\"$site_pin\"," - puts $fp "\t\t\t\t\"index_in_site\":\"[get_property INDEX_IN_SITE $site_pin]\"," puts $fp "\t\t\t\t\"direction\":\"[get_property DIRECTION $site_pin]\"," set site_pin_node [get_nodes -of_objects $site_pin] if {[llength $site_pin_node] == 0} { diff --git a/fuzzers/074-dump_all/reduce_site_types.py b/fuzzers/074-dump_all/reduce_site_types.py index 36f35a7b..8041f945 100644 --- a/fuzzers/074-dump_all/reduce_site_types.py +++ b/fuzzers/074-dump_all/reduce_site_types.py @@ -40,6 +40,10 @@ def main(): with open(os.path.join(args.output_dir, instance)) as f: instance_site_type = json.load(f) + for site_pin in instance_site_type['site_pins'].values(): + if 'index_in_site' in site_pin: + del site_pin['index_in_site'] + if proto_site_type is None: proto_site_type = instance_site_type else: diff --git a/fuzzers/074-dump_all/reduce_tile_types.py b/fuzzers/074-dump_all/reduce_tile_types.py index 32754fb5..25cb4948 100644 --- a/fuzzers/074-dump_all/reduce_tile_types.py +++ b/fuzzers/074-dump_all/reduce_tile_types.py @@ -131,7 +131,6 @@ def get_prototype_site(site): proto['site_pins'][name] = { 'direction': site_pin['direction'], - 'index_in_site': site_pin['index_in_site'], } for site_pip in site['site_pips']: From 5bce5a31b86c41c2f771b57e21c33393941ec11a Mon Sep 17 00:00:00 2001 From: Keith Rothman <537074+litghost@users.noreply.github.com> Date: Thu, 27 Sep 2018 08:59:27 -0700 Subject: [PATCH 4/5] Update travis to use pip3 not pip (python2). Signed-off-by: Keith Rothman <537074+litghost@users.noreply.github.com> --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 38a0fb70..4484ebfd 100644 --- a/.travis.yml +++ b/.travis.yml @@ -27,7 +27,7 @@ matrix: packages: - clang-format-3.9 install: - - pip install --user -r requirements.txt + - pip3 install --user -r requirements.txt script: - make format - test $(git status --porcelain | wc -l) -eq 0 || { git diff; false; } From 32e66e38ddbacaee67d59cc127384c4dedef6d01 Mon Sep 17 00:00:00 2001 From: Keith Rothman <537074+litghost@users.noreply.github.com> Date: Thu, 27 Sep 2018 09:07:56 -0700 Subject: [PATCH 5/5] Make ignored wires database specific and have travis be aware of python. Signed-off-by: Keith Rothman <537074+litghost@users.noreply.github.com> --- .travis.yml | 14 +++++++++++--- ...{ignored_wires.txt => artix7_ignored_wires.txt} | 0 fuzzers/074-dump_all/generate_after_dump.sh | 3 ++- fuzzers/074-dump_all/generate_grid.py | 6 ++---- 4 files changed, 15 insertions(+), 8 deletions(-) rename fuzzers/074-dump_all/{ignored_wires.txt => artix7_ignored_wires.txt} (100%) diff --git a/.travis.yml b/.travis.yml index 4484ebfd..aaa2cb96 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,10 +1,9 @@ -language: cpp - matrix: include: # Job 1) Test C++ w/ GCC - os: linux dist: trusty + language: cpp addons: apt: sources: @@ -22,12 +21,21 @@ matrix: # Job 2) Lint checks on Python and C++ - os: linux dist: trusty + language: python + python: 3.6 addons: apt: + sources: + - ubuntu-toolchain-r-test packages: - clang-format-3.9 + - g++-6 install: - - pip3 install --user -r requirements.txt + - export CC=gcc-6 + - export CXX=g++-6 + - pip install -r requirements.txt script: + - export CC=gcc-6 + - export CXX=g++-6 - make format - test $(git status --porcelain | wc -l) -eq 0 || { git diff; false; } diff --git a/fuzzers/074-dump_all/ignored_wires.txt b/fuzzers/074-dump_all/artix7_ignored_wires.txt similarity index 100% rename from fuzzers/074-dump_all/ignored_wires.txt rename to fuzzers/074-dump_all/artix7_ignored_wires.txt diff --git a/fuzzers/074-dump_all/generate_after_dump.sh b/fuzzers/074-dump_all/generate_after_dump.sh index a3226f52..75d38f60 100755 --- a/fuzzers/074-dump_all/generate_after_dump.sh +++ b/fuzzers/074-dump_all/generate_after_dump.sh @@ -10,4 +10,5 @@ python3 create_node_tree.py \ --ordered_wires_root_dir ../072-ordered_wires/specimen_001/ \ --output_dir output python3 reduce_site_types.py --output_dir output -python3 generate_grid.py --root_dir specimen_001/ --output_dir output +python3 generate_grid.py --root_dir specimen_001/ --output_dir output \ + --ignored_wires ${XRAY_DATABASE}_ignored_wires.txt diff --git a/fuzzers/074-dump_all/generate_grid.py b/fuzzers/074-dump_all/generate_grid.py index c1fd2468..c841be53 100644 --- a/fuzzers/074-dump_all/generate_grid.py +++ b/fuzzers/074-dump_all/generate_grid.py @@ -10,7 +10,6 @@ import os.path import json import datetime import pickle -import inspect import sys @@ -561,6 +560,7 @@ def main(): parser.add_argument('--root_dir', required=True) parser.add_argument('--output_dir', required=True) parser.add_argument('--verify_only', action='store_true') + parser.add_argument('--ignored_wires') args = parser.parse_args() @@ -648,9 +648,7 @@ def main(): json.dump(error_nodes, f, indent=2) ignored_wires = [] - path_to_file = os.path.dirname( - os.path.abspath(inspect.getfile(inspect.currentframe()))) - ignored_wires_file = os.path.join(path_to_file, 'ignored_wires.txt') + ignored_wires_file = args.ignored_wires if os.path.exists(ignored_wires_file): with open(ignored_wires_file) as f: ignored_wires = set(l.strip() for l in f)