Run make format.

Signed-off-by: Keith Rothman <537074+litghost@users.noreply.github.com>
This commit is contained in:
Keith Rothman 2018-09-27 08:53:39 -07:00
parent 85e14f81a1
commit c4a62fb315
13 changed files with 1427 additions and 1319 deletions

View File

@ -1,41 +1,40 @@
import json
with open('output/error_nodes.json') as f:
flat_error_nodes = json.load(f)
flat_error_nodes = json.load(f)
error_nodes = {}
for node, raw_node, generated_nodes in flat_error_nodes:
if node not in error_nodes:
error_nodes[node] = {
'raw_node': set(raw_node),
'generated_nodes': set(),
}
if node not in error_nodes:
error_nodes[node] = {
'raw_node': set(raw_node),
'generated_nodes': set(),
}
assert error_nodes[node]['raw_node'] == set(raw_node)
error_nodes[node]['generated_nodes'].add(tuple(sorted(generated_nodes)))
assert error_nodes[node]['raw_node'] == set(raw_node)
error_nodes[node]['generated_nodes'].add(tuple(sorted(generated_nodes)))
for node, error in error_nodes.items():
combined_generated_nodes = set()
for generated_node in error['generated_nodes']:
combined_generated_nodes |= set(generated_node)
combined_generated_nodes = set()
for generated_node in error['generated_nodes']:
combined_generated_nodes |= set(generated_node)
assert error['raw_node'] == combined_generated_nodes, (node, error)
assert error['raw_node'] == combined_generated_nodes, (node, error)
good_node = max(error['generated_nodes'], key=lambda x: len(x))
bad_nodes = error['generated_nodes'] - set((good_node,))
good_node = max(error['generated_nodes'], key=lambda x: len(x))
bad_nodes = error['generated_nodes'] - set((good_node, ))
if max(len(generated_node) for generated_node in bad_nodes) > 1:
assert False, node
else:
not_pcie = False
for generated_node in bad_nodes:
for wire in generated_node:
if not wire.startswith('PCIE'):
not_pcie = True
if not_pcie:
#print(node, good_node, map(tuple, bad_nodes))
print(repr((node, tuple(map(tuple, bad_nodes)))))
pass
if max(len(generated_node) for generated_node in bad_nodes) > 1:
assert False, node
else:
#print(repr((node, map(tuple, bad_nodes))))
pass
not_pcie = False
for generated_node in bad_nodes:
for wire in generated_node:
if not wire.startswith('PCIE'):
not_pcie = True
if not_pcie:
#print(node, good_node, map(tuple, bad_nodes))
print(repr((node, tuple(map(tuple, bad_nodes)))))
pass
else:
#print(repr((node, map(tuple, bad_nodes))))
pass

View File

@ -7,255 +7,272 @@ import prjxray.lib
import pickle
import collections
def build_node_index(fname):
node_index = {}
with open(fname, 'rb') as f:
f.seek(0, 2)
bytes = f.tell()
f.seek(0, 0)
with progressbar.ProgressBar(max_value=bytes) as bar:
end_of_line = 0
for l in f:
parts = l.decode('utf8').split(' ')
pip, node = parts[0:2]
node_index = {}
with open(fname, 'rb') as f:
f.seek(0, 2)
bytes = f.tell()
f.seek(0, 0)
with progressbar.ProgressBar(max_value=bytes) as bar:
end_of_line = 0
for l in f:
parts = l.decode('utf8').split(' ')
pip, node = parts[0:2]
if node not in node_index:
node_index[node] = []
if node not in node_index:
node_index[node] = []
node_index[node].append(end_of_line)
end_of_line = f.tell()
bar.update(end_of_line)
node_index[node].append(end_of_line)
end_of_line = f.tell()
bar.update(end_of_line)
return node_index
return node_index
def read_node(expected_node, wire_file, node_index):
with open(wire_file, 'rb') as f:
for index in node_index:
f.seek(index, 0)
with open(wire_file, 'rb') as f:
for index in node_index:
f.seek(index, 0)
parts = f.readline().decode('utf8').strip().split(' ')
parts = f.readline().decode('utf8').strip().split(' ')
pip, node = parts[0:2]
wires = parts[2:]
pip, node = parts[0:2]
wires = parts[2:]
assert node == expected_node, repr((node, expected_node, index))
assert node == expected_node, repr((node, expected_node, index))
yield wires
yield wires
def generate_edges(graph, root, graph_nodes):
""" Starting from root, generate an edge in dir and insert into graph.
""" Starting from root, generate an edge in dir and insert into graph.
If the tree forks, simply insert a joins to indicate the split.
"""
edge = [root]
prev_root = None
edge = [root]
prev_root = None
while True:
outbound_edges = graph_nodes[root]
outbound_edges -= set((prev_root,))
if len(outbound_edges) > 1:
graph['edges'].append(edge)
if root not in graph['joins']:
graph['joins'][root] = set()
graph['joins'][root] |= outbound_edges
while True:
outbound_edges = graph_nodes[root]
outbound_edges -= set((prev_root, ))
if len(outbound_edges) > 1:
graph['edges'].append(edge)
if root not in graph['joins']:
graph['joins'][root] = set()
graph['joins'][root] |= outbound_edges
for element in graph_nodes[root]:
if element not in graph['joins']:
graph['joins'][element] = set()
graph['joins'][element].add(root)
for element in graph_nodes[root]:
if element not in graph['joins']:
graph['joins'][element] = set()
graph['joins'][element].add(root)
break
else:
if len(outbound_edges) == 0:
graph['edges'].append(edge)
break
break
else:
if len(outbound_edges) == 0:
graph['edges'].append(edge)
break
next_root = tuple(outbound_edges)[0]
edge.append(next_root)
prev_root, root = root, next_root
next_root = tuple(outbound_edges)[0]
edge.append(next_root)
prev_root, root = root, next_root
def create_ordered_wires_for_node(node, wires_in_node, downhill, uphill):
if len(wires_in_node) <= 2:
return {'edges': [wires_in_node], 'joins': {}}
if len(wires_in_node) <= 2:
return {'edges': [wires_in_node], 'joins': {}}
downhill = set(tuple(l) for l in downhill)
uphill = set(tuple(l) for l in uphill)
downhill = set(tuple(l) for l in downhill)
uphill = set(tuple(l) for l in uphill)
roots = set()
all_wires = set()
roots = set()
all_wires = set()
for wire in downhill:
if len(wire) > 0:
roots |= set((wire[0], wire[-1]))
all_wires |= set(wire)
for wire in downhill:
if len(wire) > 0:
roots |= set((wire[0], wire[-1]))
all_wires |= set(wire)
for wire in uphill:
if len(wire) > 0:
roots |= set((wire[0], wire[-1]))
all_wires |= set(wire)
for wire in uphill:
if len(wire) > 0:
roots |= set((wire[0], wire[-1]))
all_wires |= set(wire)
assert len(wires_in_node) >= len(all_wires)
assert len(wires_in_node) >= len(all_wires)
if len(all_wires) <= 2:
return {'edges': tuple(all_wires), 'joins': {}}
if len(all_wires) <= 2:
return {'edges': tuple(all_wires), 'joins': {}}
graph_nodes = dict((wire, set()) for wire in all_wires)
graph_nodes = dict((wire, set()) for wire in all_wires)
for wire in all_wires:
for down in downhill:
try:
idx = down.index(wire)
if idx+1 < len(down):
graph_nodes[wire].add(down[idx+1])
if idx-1 >= 0:
graph_nodes[wire].add(down[idx-1])
except ValueError:
continue
for wire in all_wires:
for down in downhill:
try:
idx = down.index(wire)
if idx + 1 < len(down):
graph_nodes[wire].add(down[idx + 1])
if idx - 1 >= 0:
graph_nodes[wire].add(down[idx - 1])
except ValueError:
continue
for up in uphill:
try:
idx = up.index(wire)
if idx+1 < len(up):
graph_nodes[wire].add(up[idx+1])
if idx-1 >= 0:
graph_nodes[wire].add(up[idx-1])
except ValueError:
continue
for up in uphill:
try:
idx = up.index(wire)
if idx + 1 < len(up):
graph_nodes[wire].add(up[idx + 1])
if idx - 1 >= 0:
graph_nodes[wire].add(up[idx - 1])
except ValueError:
continue
graph = {'edges': [], 'joins': {}}
graph = {'edges': [], 'joins': {}}
while len(roots) > 0:
root = roots.pop()
while len(roots) > 0:
root = roots.pop()
if len(graph_nodes[root]) > 0:
generate_edges(graph, root, graph_nodes)
if len(graph_nodes[root]) > 0:
generate_edges(graph, root, graph_nodes)
# Dedup identical edges.
final_edges = set()
# Dedup identical edges.
final_edges = set()
for edge in graph['edges']:
edge1 = tuple(edge)
edge2 = tuple(edge[::-1])
for edge in graph['edges']:
edge1 = tuple(edge)
edge2 = tuple(edge[::-1])
if edge1 > edge2:
final_edges.add((edge2, edge1))
else:
final_edges.add((edge1, edge2))
edges = [edge[0] for edge in final_edges]
element_index = {}
for edge in edges:
for idx, element in enumerate(edge):
if element not in element_index:
element_index[element] = []
element_index[element].append((idx, edge))
new_edges = []
for edge in edges:
starts = element_index[edge[0]]
ends = element_index[edge[-1]]
found_any = False
for start in starts:
start_idx, other_edge = start
if other_edge is edge:
continue
for end in ends:
if other_edge is not end[1]:
continue
found_any = True
end_idx, _ = end
# check if the interior elements are the same.
if start_idx > end_idx:
step = -1
if edge1 > edge2:
final_edges.add((edge2, edge1))
else:
step = 1
final_edges.add((edge1, edge2))
other_edge_slice = slice(start_idx, end_idx+step if end_idx+step >= 0 else None, step)
if edge != other_edge[other_edge_slice]:
new_edges.append(edge)
edges = [edge[0] for edge in final_edges]
if not found_any:
new_edges.append(edge)
element_index = {}
for edge in edges:
for idx, element in enumerate(edge):
if element not in element_index:
element_index[element] = []
element_index[element].append((idx, edge))
output = {
'edges': new_edges,
'joins': dict((key, tuple(value))
for key, value in graph['joins'].items()),
'wires': wires_in_node,
}
new_edges = []
for edge in edges:
starts = element_index[edge[0]]
ends = element_index[edge[-1]]
all_wires_in_output = set()
for edge in output['edges']:
all_wires_in_output |= set(edge)
found_any = False
for start in starts:
start_idx, other_edge = start
if other_edge is edge:
continue
for element in output['joins']:
all_wires_in_output.add(element)
for end in ends:
if other_edge is not end[1]:
continue
found_any = True
end_idx, _ = end
# check if the interior elements are the same.
if start_idx > end_idx:
step = -1
else:
step = 1
other_edge_slice = slice(
start_idx, end_idx + step if end_idx + step >= 0 else None,
step)
if edge != other_edge[other_edge_slice]:
new_edges.append(edge)
if not found_any:
new_edges.append(edge)
output = {
'edges':
new_edges,
'joins':
dict((key, tuple(value)) for key, value in graph['joins'].items()),
'wires':
wires_in_node,
}
all_wires_in_output = set()
for edge in output['edges']:
all_wires_in_output |= set(edge)
for element in output['joins']:
all_wires_in_output.add(element)
return output
return output
def main():
parser = argparse.ArgumentParser(description="")
parser.add_argument('--dump_all_root_dir', required=True)
parser.add_argument('--ordered_wires_root_dir', required=True)
parser.add_argument('--output_dir', required=True)
parser = argparse.ArgumentParser(description="")
parser.add_argument('--dump_all_root_dir', required=True)
parser.add_argument('--ordered_wires_root_dir', required=True)
parser.add_argument('--output_dir', required=True)
args = parser.parse_args()
args = parser.parse_args()
downhill_wires = os.path.join(args.ordered_wires_root_dir, 'downhill_wires.txt')
uphill_wires = os.path.join(args.ordered_wires_root_dir, 'uphill_wires.txt')
downhill_wires = os.path.join(
args.ordered_wires_root_dir, 'downhill_wires.txt')
uphill_wires = os.path.join(
args.ordered_wires_root_dir, 'uphill_wires.txt')
assert os.path.exists(downhill_wires)
assert os.path.exists(uphill_wires)
assert os.path.exists(downhill_wires)
assert os.path.exists(uphill_wires)
print('{} Reading root.csv'.format(datetime.datetime.now()))
tiles, nodes = prjxray.lib.read_root_csv(args.dump_all_root_dir)
print('{} Reading root.csv'.format(datetime.datetime.now()))
tiles, nodes = prjxray.lib.read_root_csv(args.dump_all_root_dir)
print('{} Loading node<->wire mapping'.format(datetime.datetime.now()))
node_lookup = prjxray.lib.NodeLookup()
node_lookup_file = os.path.join(args.output_dir, 'nodes.pickle')
if os.path.exists(node_lookup_file):
node_lookup.load_from_file(node_lookup_file)
else:
node_lookup.load_from_root_csv(nodes)
node_lookup.save_to_file(node_lookup_file)
print('{} Loading node<->wire mapping'.format(datetime.datetime.now()))
node_lookup = prjxray.lib.NodeLookup()
node_lookup_file = os.path.join(args.output_dir, 'nodes.pickle')
if os.path.exists(node_lookup_file):
node_lookup.load_from_file(node_lookup_file)
else:
node_lookup.load_from_root_csv(nodes)
node_lookup.save_to_file(node_lookup_file)
wire_index_file = os.path.join(args.output_dir, 'wire_index.pickle')
if os.path.exists(wire_index_file):
print('{} Reading wire<->node index'.format(datetime.datetime.now()))
with open(wire_index_file, 'rb') as f:
wire_index = pickle.load(f)
wire_index_file = os.path.join(args.output_dir, 'wire_index.pickle')
if os.path.exists(wire_index_file):
print('{} Reading wire<->node index'.format(datetime.datetime.now()))
with open(wire_index_file, 'rb') as f:
wire_index = pickle.load(f)
downhill_wire_node_index = wire_index['downhill']
uphill_wire_node_index = wire_index['uphill']
else:
print('{} Creating wire<->node index'.format(datetime.datetime.now()))
downhill_wire_node_index = build_node_index(downhill_wires)
uphill_wire_node_index = build_node_index(uphill_wires)
downhill_wire_node_index = wire_index['downhill']
uphill_wire_node_index = wire_index['uphill']
else:
print('{} Creating wire<->node index'.format(datetime.datetime.now()))
downhill_wire_node_index = build_node_index(downhill_wires)
uphill_wire_node_index = build_node_index(uphill_wires)
with open(wire_index_file, 'wb') as f:
pickle.dump({
'downhill': downhill_wire_node_index,
'uphill': uphill_wire_node_index,
}, f)
with open(wire_index_file, 'wb') as f:
pickle.dump(
{
'downhill': downhill_wire_node_index,
'uphill': uphill_wire_node_index,
}, f)
print('{} Creating node tree'.format(datetime.datetime.now()))
nodes = collections.OrderedDict()
for node in progressbar.progressbar(sorted(node_lookup.nodes)):
nodes[node] = create_ordered_wires_for_node(
node,
tuple(wire['wire'] for wire in node_lookup.nodes[node]),
tuple(read_node(node, downhill_wires, downhill_wire_node_index[node] if node in downhill_wire_node_index else [])),
tuple(read_node(node, uphill_wires, uphill_wire_node_index[node] if node in uphill_wire_node_index else [])))
print('{} Creating node tree'.format(datetime.datetime.now()))
nodes = collections.OrderedDict()
for node in progressbar.progressbar(sorted(node_lookup.nodes)):
nodes[node] = create_ordered_wires_for_node(
node, tuple(wire['wire'] for wire in node_lookup.nodes[node]),
tuple(
read_node(
node, downhill_wires, downhill_wire_node_index[node]
if node in downhill_wire_node_index else [])),
tuple(
read_node(
node, uphill_wires, uphill_wire_node_index[node]
if node in uphill_wire_node_index else [])))
print('{} Writing node tree'.format(datetime.datetime.now()))
with open(os.path.join(args.output_dir, 'node_tree.json'), 'w') as f:
json.dump(nodes, f, indent=2)
print('{} Writing node tree'.format(datetime.datetime.now()))
with open(os.path.join(args.output_dir, 'node_tree.json'), 'w') as f:
json.dump(nodes, f, indent=2)
if __name__ == '__main__':
main()
main()

File diff suppressed because it is too large Load Diff

View File

@ -1,33 +1,32 @@
import json
with open('output/error_nodes.json') as f:
flat_error_nodes = json.load(f)
flat_error_nodes = json.load(f)
error_nodes = {}
for node, raw_node, generated_nodes in flat_error_nodes:
if node not in error_nodes:
error_nodes[node] = {
'raw_node': set(raw_node),
'generated_nodes': set(),
}
if node not in error_nodes:
error_nodes[node] = {
'raw_node': set(raw_node),
'generated_nodes': set(),
}
assert error_nodes[node]['raw_node'] == set(raw_node)
error_nodes[node]['generated_nodes'].add(tuple(sorted(generated_nodes)))
assert error_nodes[node]['raw_node'] == set(raw_node)
error_nodes[node]['generated_nodes'].add(tuple(sorted(generated_nodes)))
for node, error in error_nodes.items():
combined_generated_nodes = set()
for generated_node in error['generated_nodes']:
combined_generated_nodes |= set(generated_node)
combined_generated_nodes = set()
for generated_node in error['generated_nodes']:
combined_generated_nodes |= set(generated_node)
assert error['raw_node'] == combined_generated_nodes, (node, error)
assert error['raw_node'] == combined_generated_nodes, (node, error)
good_node = max(error['generated_nodes'], key=lambda x: len(x))
bad_nodes = error['generated_nodes'] - set((good_node,))
good_node = max(error['generated_nodes'], key=lambda x: len(x))
bad_nodes = error['generated_nodes'] - set((good_node, ))
if max(len(generated_node) for generated_node in bad_nodes) > 1:
assert False, node
else:
for generated_node in bad_nodes:
for wire in generated_node:
print(wire)
if max(len(generated_node) for generated_node in bad_nodes) > 1:
assert False, node
else:
for generated_node in bad_nodes:
for wire in generated_node:
print(wire)

View File

@ -13,43 +13,46 @@ import os.path
import re
import json
def main():
parser = argparse.ArgumentParser(description="Reduces per tile site types to generic site types.")
parser.add_argument('--output_dir', required=True)
parser = argparse.ArgumentParser(
description="Reduces per tile site types to generic site types.")
parser.add_argument('--output_dir', required=True)
args = parser.parse_args()
args = parser.parse_args()
SITE_TYPE = re.compile('^tile_type_(.+)_site_type_(.+)\.json$')
site_types = {}
for path in os.listdir(args.output_dir):
match = SITE_TYPE.fullmatch(path)
if match is None:
continue
SITE_TYPE = re.compile('^tile_type_(.+)_site_type_(.+)\.json$')
site_types = {}
for path in os.listdir(args.output_dir):
match = SITE_TYPE.fullmatch(path)
if match is None:
continue
site_type = match.group(2)
if site_type not in site_types:
site_types[site_type] = []
site_type = match.group(2)
if site_type not in site_types:
site_types[site_type] = []
site_types[site_type].append(path)
site_types[site_type].append(path)
for site_type in site_types:
proto_site_type = None
for instance in site_types[site_type]:
with open(os.path.join(args.output_dir, instance)) as f:
instance_site_type = json.load(f)
for site_type in site_types:
proto_site_type = None
for instance in site_types[site_type]:
with open(os.path.join(args.output_dir, instance)) as f:
instance_site_type = json.load(f)
if proto_site_type is None:
proto_site_type = instance_site_type
else:
prjxray.lib.compare_prototype_site(
proto_site_type,
instance_site_type,
)
if proto_site_type is None:
proto_site_type = instance_site_type
else:
prjxray.lib.compare_prototype_site(
proto_site_type,
instance_site_type,
)
with open(os.path.join(args.output_dir,
'site_type_{}.json'.format(site_type)),
'w') as f:
json.dump(proto_site_type, f, indent=2)
with open(os.path.join(args.output_dir,
'site_type_{}.json'.format(site_type)), 'w') as f:
json.dump(proto_site_type, f, indent=2)
if __name__ == '__main__':
main()
main()

View File

@ -19,24 +19,30 @@ import os
import functools
import re
def check_and_strip_prefix(name, prefix):
assert name.startswith(prefix), repr((name, prefix))
return name[len(prefix):]
assert name.startswith(prefix), repr((name, prefix))
return name[len(prefix):]
def flatten_site_pins(tile, site, site_pins, site_pin_node_to_wires):
def inner():
for site_pin in site_pins:
wires = tuple(site_pin_node_to_wires(tile, site_pin['node']))
def inner():
for site_pin in site_pins:
wires = tuple(site_pin_node_to_wires(tile, site_pin['node']))
if len(wires) == 0:
yield (check_and_strip_prefix(site_pin['site_pin'], site+'/'), None)
continue
if len(wires) == 0:
yield (
check_and_strip_prefix(site_pin['site_pin'], site + '/'),
None)
continue
assert len(wires) == 1, repr(wires)
assert len(wires) == 1, repr(wires)
yield (check_and_strip_prefix(site_pin['site_pin'], site+'/'), wires[0])
yield (
check_and_strip_prefix(site_pin['site_pin'], site + '/'),
wires[0])
return dict(inner())
return dict(inner())
# All site names appear to follow the pattern <type>_X<abs coord>Y<abs coord>.
@ -45,6 +51,7 @@ def flatten_site_pins(tile, site, site_pins, site_pin_node_to_wires):
# (e.g. start at X0Y0) and discard the prefix from the name.
SITE_COORDINATE_PATTERN = re.compile('^(.+)_X([0-9]+)Y([0-9]+)$')
def find_origin_coordinate(sites):
""" Find the coordinates of each site within the tile, and then subtract the
smallest coordinate to re-origin them all to be relative to the tile.
@ -68,256 +75,291 @@ def find_origin_coordinate(sites):
return min_x_coord, min_y_coord
def get_sites(tile, site_pin_node_to_wires):
min_x_coord, min_y_coord = find_origin_coordinate(tile['sites'])
min_x_coord, min_y_coord = find_origin_coordinate(tile['sites'])
for site in tile['sites']:
orig_site_name = site['site']
coordinate = SITE_COORDINATE_PATTERN.match(orig_site_name)
for site in tile['sites']:
orig_site_name = site['site']
coordinate = SITE_COORDINATE_PATTERN.match(orig_site_name)
x_coord = int(coordinate.group(2))
y_coord = int(coordinate.group(3))
x_coord = int(coordinate.group(2))
y_coord = int(coordinate.group(3))
yield (
{
'name': 'X{}Y{}'.format(x_coord - min_x_coord, y_coord - min_y_coord),
'prefix': coordinate.group(1),
'x_coord': x_coord - min_x_coord,
'y_coord': y_coord - min_y_coord,
'type': site['type'],
'site_pins': dict(flatten_site_pins(
tile['tile'],
site['site'], site['site_pins'], site_pin_node_to_wires)),
}
)
yield (
{
'name':
'X{}Y{}'.format(x_coord - min_x_coord, y_coord - min_y_coord),
'prefix':
coordinate.group(1),
'x_coord':
x_coord - min_x_coord,
'y_coord':
y_coord - min_y_coord,
'type':
site['type'],
'site_pins':
dict(
flatten_site_pins(
tile['tile'], site['site'], site['site_pins'],
site_pin_node_to_wires)),
})
def compare_sites_and_update(tile, sites, new_sites):
for site_a, site_b in zip(sites, new_sites):
assert site_a['type'] == site_b['type']
assert site_a['site_pins'].keys() == site_b['site_pins'].keys()
for site_a, site_b in zip(sites, new_sites):
assert site_a['type'] == site_b['type']
assert site_a['site_pins'].keys() == site_b['site_pins'].keys()
for site_pin in site_a['site_pins']:
if site_a['site_pins'][site_pin] is not None and site_b['site_pins'][site_pin] is not None:
assert site_a['site_pins'][site_pin] == site_b['site_pins'][site_pin]
elif site_a['site_pins'][site_pin] is None and site_b['site_pins'][site_pin] is not None:
site_a['site_pins'][site_pin] = site_b['site_pins'][site_pin]
for site_pin in site_a['site_pins']:
if site_a['site_pins'][site_pin] is not None and site_b[
'site_pins'][site_pin] is not None:
assert site_a['site_pins'][site_pin] == site_b['site_pins'][
site_pin]
elif site_a['site_pins'][site_pin] is None and site_b['site_pins'][
site_pin] is not None:
site_a['site_pins'][site_pin] = site_b['site_pins'][site_pin]
def get_prototype_site(site):
proto = {}
proto['type'] = site['type']
proto['site_pins'] = {}
proto['site_pips'] = {}
for site_pin in site['site_pins']:
name = check_and_strip_prefix(site_pin['site_pin'], site['site'] + '/')
proto = {}
proto['type'] = site['type']
proto['site_pins'] = {}
proto['site_pips'] = {}
for site_pin in site['site_pins']:
name = check_and_strip_prefix(site_pin['site_pin'], site['site'] + '/')
proto['site_pins'][name] = {
'direction': site_pin['direction'],
'index_in_site': site_pin['index_in_site'],
}
proto['site_pins'][name] = {
'direction': site_pin['direction'],
'index_in_site': site_pin['index_in_site'],
}
for site_pip in site['site_pips']:
name = check_and_strip_prefix(site_pip['site_pip'], site['site'] + '/')
for site_pip in site['site_pips']:
name = check_and_strip_prefix(site_pip['site_pip'], site['site'] + '/')
proto['site_pips'][name] = {
'to_pin': site_pip['to_pin'],
'from_pin': site_pip['from_pin'],
}
proto['site_pips'][name] = {
'to_pin': site_pip['to_pin'],
'from_pin': site_pip['from_pin'],
}
return proto
return proto
def get_pips(tile, pips):
proto_pips = {}
proto_pips = {}
for pip in pips:
name = check_and_strip_prefix(pip['pip'], tile + '/')
for pip in pips:
name = check_and_strip_prefix(pip['pip'], tile + '/')
proto_pips[name] = {
'src_wire': check_and_strip_prefix(pip['src_wire'], tile + '/')
if pip['src_wire'] is not None else None,
'dst_wire': check_and_strip_prefix(pip['dst_wire'], tile + '/')
if pip['dst_wire'] is not None else None,
'is_pseudo': pip['is_pseudo'],
'is_directional': pip['is_directional'],
'can_invert': pip['can_invert'],
}
proto_pips[name] = {
'src_wire':
check_and_strip_prefix(pip['src_wire'], tile + '/')
if pip['src_wire'] is not None else None,
'dst_wire':
check_and_strip_prefix(pip['dst_wire'], tile + '/')
if pip['dst_wire'] is not None else None,
'is_pseudo':
pip['is_pseudo'],
'is_directional':
pip['is_directional'],
'can_invert':
pip['can_invert'],
}
return proto_pips
return proto_pips
def compare_and_update_pips(pips, new_pips):
# Pip names are always the same, but sometimes the src_wire or dst_wire
# may be missing.
# Pip names are always the same, but sometimes the src_wire or dst_wire
# may be missing.
assert pips.keys() == new_pips.keys(), repr((pips.keys(), new_pips.keys()))
for name in pips:
if pips[name]['src_wire'] is not None and new_pips[name]['src_wire'] is not None:
assert pips[name]['src_wire'] == new_pips[name]['src_wire'], repr((
pips[name]['src_wire'],
new_pips[name]['src_wire'],
))
elif pips[name]['src_wire'] is None and new_pips[name]['src_wire'] is not None:
pips[name]['src_wire'] = new_pips[name]['src_wire']
assert pips.keys() == new_pips.keys(), repr((pips.keys(), new_pips.keys()))
for name in pips:
if pips[name]['src_wire'] is not None and new_pips[name][
'src_wire'] is not None:
assert pips[name]['src_wire'] == new_pips[name]['src_wire'], repr(
(
pips[name]['src_wire'],
new_pips[name]['src_wire'],
))
elif pips[name]['src_wire'] is None and new_pips[name][
'src_wire'] is not None:
pips[name]['src_wire'] = new_pips[name]['src_wire']
if pips[name]['dst_wire'] is not None and new_pips[name]['dst_wire'] is not None:
assert pips[name]['dst_wire'] == new_pips[name]['dst_wire'], repr((
pips[name]['dst_wire'],
new_pips[name]['dst_wire'],
))
elif pips[name]['dst_wire'] is None and new_pips[name]['dst_wire'] is not None:
pips[name]['dst_wire'] = new_pips[name]['dst_wire']
if pips[name]['dst_wire'] is not None and new_pips[name][
'dst_wire'] is not None:
assert pips[name]['dst_wire'] == new_pips[name]['dst_wire'], repr(
(
pips[name]['dst_wire'],
new_pips[name]['dst_wire'],
))
elif pips[name]['dst_wire'] is None and new_pips[name][
'dst_wire'] is not None:
pips[name]['dst_wire'] = new_pips[name]['dst_wire']
for k in ['is_pseudo', 'is_directional', 'can_invert']:
assert pips[name][k] == new_pips[name][k], (
k, pips[name][k], new_pips[name][k])
for k in ['is_pseudo', 'is_directional', 'can_invert']:
assert pips[name][k] == new_pips[name][k], (k, pips[name][k], new_pips[name][k])
def check_wires(wires, sites, pips):
""" Verify that the wires generates from nodes are a superset of wires in
""" Verify that the wires generates from nodes are a superset of wires in
sites and pips """
if sites is not None:
for site in sites:
for wire_to_site_pin in site['site_pins'].values():
if wire_to_site_pin is not None:
assert wire_to_site_pin in wires, repr((wire_to_site_pin, wires))
if sites is not None:
for site in sites:
for wire_to_site_pin in site['site_pins'].values():
if wire_to_site_pin is not None:
assert wire_to_site_pin in wires, repr(
(wire_to_site_pin, wires))
if pips is not None:
for pip in pips.values():
if pip['src_wire'] is not None:
assert pip['src_wire'] in wires, repr((pip['src_wire'], wires))
if pip['dst_wire'] is not None:
assert pip['dst_wire'] in wires, repr((pip['dst_wire'], wires))
if pips is not None:
for pip in pips.values():
if pip['src_wire'] is not None:
assert pip['src_wire'] in wires, repr((pip['src_wire'], wires))
if pip['dst_wire'] is not None:
assert pip['dst_wire'] in wires, repr((pip['dst_wire'], wires))
def read_json5(fname, nodes):
node_lookup = prjxray.lib.NodeLookup()
node_lookup.load_from_nodes(nodes)
node_lookup = prjxray.lib.NodeLookup()
node_lookup.load_from_nodes(nodes)
#print('{} Reading {} (in pid {})'.format(datetime.datetime.now(), fname, os.getpid()))
with open(fname) as f:
tile = json5.load(f)
#print('{} Reading {} (in pid {})'.format(datetime.datetime.now(), fname, os.getpid()))
with open(fname) as f:
tile = json5.load(f)
#print('{} Done reading {}'.format(datetime.datetime.now(), fname))
def get_site_types():
for site in tile['sites']:
yield get_prototype_site(site)
#print('{} Done reading {}'.format(datetime.datetime.now(), fname))
def get_site_types():
for site in tile['sites']:
yield get_prototype_site(site)
site_types = tuple(get_site_types())
sites = tuple(get_sites(tile, node_lookup.site_pin_node_to_wires))
pips = get_pips(tile['tile'], tile['pips'])
def inner():
for wire in tile['wires']:
assert wire['wire'].startswith(tile['tile'] + '/')
yield wire['wire'][len(tile['tile'])+1:]
site_types = tuple(get_site_types())
sites = tuple(get_sites(tile, node_lookup.site_pin_node_to_wires))
pips = get_pips(tile['tile'], tile['pips'])
wires = set(inner())
wires_from_nodes = set(node_lookup.wires_for_tile(tile['tile']))
assert len(wires_from_nodes - wires) == 0, repr((wires, wires_from_nodes))
def inner():
for wire in tile['wires']:
assert wire['wire'].startswith(tile['tile'] + '/')
yield wire['wire'][len(tile['tile']) + 1:]
wires = set(inner())
wires_from_nodes = set(node_lookup.wires_for_tile(tile['tile']))
assert len(wires_from_nodes - wires) == 0, repr((wires, wires_from_nodes))
return fname, tile, site_types, sites, pips, wires
return fname, tile, site_types, sites, pips, wires
def reduce_tile(pool, site_types, tile_type, tile_instances, node_lookup):
sites = None
pips = None
wires = set()
sites = None
pips = None
wires = set()
with progressbar.ProgressBar(max_value=len(tile_instances)) as bar:
chunksize = 20
if len(tile_instances) < chunksize*2:
iter = map(lambda file: read_json5(file, node_lookup.nodes), tile_instances)
else:
print('{} Using pool.imap_unordered'.format(datetime.datetime.now()))
iter = pool.imap_unordered(
functools.partial(read_json5, nodes=node_lookup.nodes),
tile_instances,
chunksize=chunksize,
)
for idx, (fname, tile, new_site_types, new_sites, new_pips, new_wires) in enumerate(iter):
bar.update(idx)
assert tile['type'] == tile_type, repr((tile['tile'], tile_type))
for site_type in new_site_types:
if site_type['type'] in site_types:
prjxray.lib.compare_prototype_site(site_type, site_types[site_type['type']])
with progressbar.ProgressBar(max_value=len(tile_instances)) as bar:
chunksize = 20
if len(tile_instances) < chunksize * 2:
iter = map(
lambda file: read_json5(file, node_lookup.nodes),
tile_instances)
else:
site_types[site_type['type']] = site_type
print(
'{} Using pool.imap_unordered'.format(datetime.datetime.now()))
iter = pool.imap_unordered(
functools.partial(read_json5, nodes=node_lookup.nodes),
tile_instances,
chunksize=chunksize,
)
# Sites are expect to always be the same
if sites is None:
sites = new_sites
else:
compare_sites_and_update(tile['tile'], sites, new_sites)
for idx, (fname, tile, new_site_types, new_sites, new_pips,
new_wires) in enumerate(iter):
bar.update(idx)
if pips is None:
pips = new_pips
else:
compare_and_update_pips(pips, new_pips)
assert tile['type'] == tile_type, repr((tile['tile'], tile_type))
wires |= new_wires
for site_type in new_site_types:
if site_type['type'] in site_types:
prjxray.lib.compare_prototype_site(
site_type, site_types[site_type['type']])
else:
site_types[site_type['type']] = site_type
bar.update(idx+1)
# Sites are expect to always be the same
if sites is None:
sites = new_sites
else:
compare_sites_and_update(tile['tile'], sites, new_sites)
check_wires(wires, sites, pips)
if pips is None:
pips = new_pips
else:
compare_and_update_pips(pips, new_pips)
return {
'tile_type': tile_type,
'sites': sites,
'pips': pips,
'wires': tuple(wires),
}
wires |= new_wires
bar.update(idx + 1)
check_wires(wires, sites, pips)
return {
'tile_type': tile_type,
'sites': sites,
'pips': pips,
'wires': tuple(wires),
}
def main():
parser = argparse.ArgumentParser(description="Reduces raw database dump into prototype tiles, grid, and connections.")
parser.add_argument('--root_dir', required=True)
parser.add_argument('--output_dir', required=True)
parser.add_argument('--ignore_cache', action='store_true')
parser = argparse.ArgumentParser(
description=
"Reduces raw database dump into prototype tiles, grid, and connections."
)
parser.add_argument('--root_dir', required=True)
parser.add_argument('--output_dir', required=True)
parser.add_argument('--ignore_cache', action='store_true')
args = parser.parse_args()
args = parser.parse_args()
print('{} Reading root.csv'.format(datetime.datetime.now()))
tiles, nodes = prjxray.lib.read_root_csv(args.root_dir)
print('{} Reading root.csv'.format(datetime.datetime.now()))
tiles, nodes = prjxray.lib.read_root_csv(args.root_dir)
print('{} Loading node<->wire mapping'.format(datetime.datetime.now()))
node_lookup = prjxray.lib.NodeLookup()
node_lookup_file = os.path.join(args.output_dir, 'nodes.pickle')
if os.path.exists(node_lookup_file) and not args.ignore_cache:
node_lookup.load_from_file(node_lookup_file)
else:
node_lookup.load_from_root_csv(nodes)
node_lookup.save_to_file(node_lookup_file)
print('{} Loading node<->wire mapping'.format(datetime.datetime.now()))
node_lookup = prjxray.lib.NodeLookup()
node_lookup_file = os.path.join(args.output_dir, 'nodes.pickle')
if os.path.exists(node_lookup_file) and not args.ignore_cache:
node_lookup.load_from_file(node_lookup_file)
else:
node_lookup.load_from_root_csv(nodes)
node_lookup.save_to_file(node_lookup_file)
site_types = {}
processes = min(multiprocessing.cpu_count(), 10)
print('Running {} processes'.format(processes))
pool = multiprocessing.Pool(processes=processes)
for tile_type in sorted(tiles.keys()):
#for tile_type in ['CLBLL_L', 'CLBLL_R', 'CLBLM_L', 'CLBLM_R', 'INT_L', 'INT_L']:
tile_type_file = os.path.join(args.output_dir, 'tile_type_{}.json'.format(tile_type))
site_types = {}
if os.path.exists(tile_type_file):
print('{} Skip reduced tile for {}'.format(datetime.datetime.now(), tile_type))
continue
print('{} Generating reduced tile for {}'.format(datetime.datetime.now(), tile_type))
reduced_tile = reduce_tile(
pool,
site_types,
tile_type, tiles[tile_type],
node_lookup)
for site_type in site_types:
with open(os.path.join(
args.output_dir,
'tile_type_{}_site_type_{}.json'.format(
tile_type,
site_types[site_type]['type']
)), 'w') as f:
json.dump(site_types[site_type], f, indent=2)
with open(tile_type_file, 'w') as f:
json.dump(reduced_tile, f, indent=2)
processes = min(multiprocessing.cpu_count(), 10)
print('Running {} processes'.format(processes))
pool = multiprocessing.Pool(processes=processes)
for tile_type in sorted(tiles.keys()):
#for tile_type in ['CLBLL_L', 'CLBLL_R', 'CLBLM_L', 'CLBLM_R', 'INT_L', 'INT_L']:
tile_type_file = os.path.join(
args.output_dir, 'tile_type_{}.json'.format(tile_type))
site_types = {}
if os.path.exists(tile_type_file):
print(
'{} Skip reduced tile for {}'.format(
datetime.datetime.now(), tile_type))
continue
print(
'{} Generating reduced tile for {}'.format(
datetime.datetime.now(), tile_type))
reduced_tile = reduce_tile(
pool, site_types, tile_type, tiles[tile_type], node_lookup)
for site_type in site_types:
with open(os.path.join(
args.output_dir, 'tile_type_{}_site_type_{}.json'.format(
tile_type, site_types[site_type]['type'])), 'w') as f:
json.dump(site_types[site_type], f, indent=2)
with open(tile_type_file, 'w') as f:
json.dump(reduced_tile, f, indent=2)
if __name__ == '__main__':
main()
main()

View File

@ -3,64 +3,72 @@ from collections import namedtuple
WireInGrid = namedtuple('WireInGrid', 'tile grid_x grid_y wire')
Connection = namedtuple('Connection', 'wire_a wire_b')
class Connections(object):
def __init__(self, tilegrid, tileconn, tile_wires):
self.grid = tilegrid['tiles']
self.tile_wires = tile_wires
self.coord_to_tile = {}
self.coord_to_tile_type = {}
def __init__(self, tilegrid, tileconn, tile_wires):
self.grid = tilegrid['tiles']
self.tile_wires = tile_wires
self.coord_to_tile = {}
self.coord_to_tile_type = {}
for tile, tile_info in self.grid.items():
self.coord_to_tile[(tile_info['grid_x'], tile_info['grid_y'])] = tile
self.coord_to_tile_type[(tile_info['grid_x'], tile_info['grid_y'])] = tile_info['type']
for tile, tile_info in self.grid.items():
self.coord_to_tile[(tile_info['grid_x'],
tile_info['grid_y'])] = tile
self.coord_to_tile_type[(tile_info['grid_x'],
tile_info['grid_y'])] = tile_info['type']
# Make sure we have tile type info for every tile in the grid.
assert tile_info['type'] in self.tile_wires, (tile_info['type'], self.tile_wires.keys())
# Make sure we have tile type info for every tile in the grid.
assert tile_info['type'] in self.tile_wires, (
tile_info['type'], self.tile_wires.keys())
self.potential_connections = {}
self.potential_connections = {}
for conn in tileconn:
grid_deltas = conn['grid_deltas']
tile_types = conn['tile_types']
for conn in tileconn:
grid_deltas = conn['grid_deltas']
tile_types = conn['tile_types']
for pairs in conn['wire_pairs']:
key = (tile_types[0], pairs[0])
if key not in self.potential_connections:
self.potential_connections[key] = []
self.potential_connections[key].append(
(grid_deltas, tile_types[1], pairs[1]))
def all_possible_connections_from(self, wire_in_grid):
tile_type = self.coord_to_tile_type[(
wire_in_grid.grid_x, wire_in_grid.grid_y)]
key = (tile_type, wire_in_grid.wire)
for pairs in conn['wire_pairs']:
key = (tile_types[0], pairs[0])
if key not in self.potential_connections:
self.potential_connections[key] = []
self.potential_connections[key].append((
grid_deltas, tile_types[1], pairs[1]
))
return
def all_possible_connections_from(self, wire_in_grid):
tile_type = self.coord_to_tile_type[(wire_in_grid.grid_x, wire_in_grid.grid_y)]
for relative_coord, target_tile_type, target_wire in (
self.potential_connections[key]):
rel_x, rel_y = relative_coord
target_coord = (
wire_in_grid.grid_x + rel_x, wire_in_grid.grid_y + rel_y)
key = (tile_type, wire_in_grid.wire)
if target_coord in self.coord_to_tile_type:
if self.coord_to_tile_type[target_coord] == target_tile_type:
yield Connection(
wire_in_grid,
WireInGrid(
tile=self.coord_to_tile[target_coord],
grid_x=target_coord[0],
grid_y=target_coord[1],
wire=target_wire))
if key not in self.potential_connections:
return
for relative_coord, target_tile_type, target_wire in (
self.potential_connections[key]):
rel_x, rel_y = relative_coord
target_coord = (wire_in_grid.grid_x+rel_x, wire_in_grid.grid_y+rel_y)
if target_coord in self.coord_to_tile_type:
if self.coord_to_tile_type[target_coord] == target_tile_type:
yield Connection(wire_in_grid, WireInGrid(
tile = self.coord_to_tile[target_coord],
grid_x = target_coord[0],
grid_y = target_coord[1],
wire = target_wire))
def get_connections(self):
""" Yields Connection objects that represent all connections present in
def get_connections(self):
""" Yields Connection objects that represent all connections present in
the grid based on tileconn """
for tile, tile_info in self.grid.items():
for wire in self.tile_wires[tile_info['type']]:
wire_in_grid = WireInGrid(
tile = tile,
grid_x = tile_info['grid_x'],
grid_y = tile_info['grid_y'],
wire = wire)
for potential_connection in self.all_possible_connections_from(wire_in_grid):
yield potential_connection
for tile, tile_info in self.grid.items():
for wire in self.tile_wires[tile_info['type']]:
wire_in_grid = WireInGrid(
tile=tile,
grid_x=tile_info['grid_x'],
grid_y=tile_info['grid_y'],
wire=wire)
for potential_connection in self.all_possible_connections_from(
wire_in_grid):
yield potential_connection

View File

@ -4,93 +4,101 @@ from prjxray import grid
from prjxray import tile
from prjxray import connections
def get_available_databases(prjxray_root):
""" Return set of available directory to databases given the root directory
""" Return set of available directory to databases given the root directory
of prjxray-db
"""
db_types = set()
for d in os.listdir(prjxray_root):
if d.startswith("."):
continue
db_types = set()
for d in os.listdir(prjxray_root):
if d.startswith("."):
continue
dpath = os.path.join(prjxray_root, d)
dpath = os.path.join(prjxray_root, d)
if os.path.exists(os.path.join(dpath, "settings.sh")):
db_types.add(dpath)
if os.path.exists(os.path.join(dpath, "settings.sh")):
db_types.add(dpath)
return db_types
return db_types
class Database(object):
def __init__(self, db_root):
""" Create project x-ray Database at given db_root.
def __init__(self, db_root):
""" Create project x-ray Database at given db_root.
db_root: Path to directory containing settings.sh, *.db, tilegrid.json and
tileconn.json
"""
self.db_root = db_root
self.tilegrid = None
self.tileconn = None
self.tile_types = None
self.db_root = db_root
self.tilegrid = None
self.tileconn = None
self.tile_types = None
self.tile_types = {}
for f in os.listdir(self.db_root):
if f.endswith('.json') and f.startswith('tile_type_'):
tile_type = f[len('tile_type_'):-len('.json')].lower()
self.tile_types = {}
for f in os.listdir(self.db_root):
if f.endswith('.json') and f.startswith('tile_type_'):
tile_type = f[len('tile_type_'):-len('.json')].lower()
segbits = os.path.join(self.db_root, 'segbits_{}.db'.format(tile_type))
if not os.path.isfile(segbits):
segbits = None
segbits = os.path.join(
self.db_root, 'segbits_{}.db'.format(tile_type))
if not os.path.isfile(segbits):
segbits = None
mask = os.path.join(self.db_root, 'mask_{}.db'.format(tile_type))
if not os.path.isfile(mask):
mask = None
mask = os.path.join(
self.db_root, 'mask_{}.db'.format(tile_type))
if not os.path.isfile(mask):
mask = None
tile_type_file = os.path.join(self.db_root, 'tile_type_{}.json'.format(tile_type.upper()))
if not os.path.isfile(tile_type_file):
tile_type_file = None
tile_type_file = os.path.join(
self.db_root, 'tile_type_{}.json'.format(
tile_type.upper()))
if not os.path.isfile(tile_type_file):
tile_type_file = None
self.tile_types[tile_type.upper()] = tile.TileDbs(
segbits = segbits,
mask = mask,
tile_type = tile_type_file,
)
self.tile_types[tile_type.upper()] = tile.TileDbs(
segbits=segbits,
mask=mask,
tile_type=tile_type_file,
)
def get_tile_types(self):
""" Return list of tile types """
return self.tile_types.keys()
def get_tile_types(self):
""" Return list of tile types """
return self.tile_types.keys()
def get_tile_type(self, tile_type):
""" Return Tile object for given tilename. """
return tile.Tile(tile_type, self.tile_types[tile_type])
def get_tile_type(self, tile_type):
""" Return Tile object for given tilename. """
return tile.Tile(tile_type, self.tile_types[tile_type])
def _read_tilegrid(self):
""" Read tilegrid database if not already read. """
if not self.tilegrid:
with open(os.path.join(self.db_root, 'tilegrid.json')) as f:
self.tilegrid = json.load(f)
def _read_tilegrid(self):
""" Read tilegrid database if not already read. """
if not self.tilegrid:
with open(os.path.join(self.db_root, 'tilegrid.json')) as f:
self.tilegrid = json.load(f)
def _read_tileconn(self):
""" Read tileconn database if not already read. """
if not self.tileconn:
with open(os.path.join(self.db_root, 'tileconn.json')) as f:
self.tileconn = json.load(f)
def _read_tileconn(self):
""" Read tileconn database if not already read. """
if not self.tileconn:
with open(os.path.join(self.db_root, 'tileconn.json')) as f:
self.tileconn = json.load(f)
def grid(self):
""" Return Grid object for database. """
self._read_tilegrid()
return grid.Grid(self.tilegrid)
def grid(self):
""" Return Grid object for database. """
self._read_tilegrid()
return grid.Grid(self.tilegrid)
def _read_tile_types(self):
for tile_type, db in self.tile_types.items():
with open(db.tile_type) as f:
self.tile_types[tile_type] = json.load(f)
def _read_tile_types(self):
for tile_type, db in self.tile_types.items():
with open(db.tile_type) as f:
self.tile_types[tile_type] = json.load(f)
def connections(self):
self._read_tilegrid()
self._read_tileconn()
self._read_tile_types()
def connections(self):
self._read_tilegrid()
self._read_tileconn()
self._read_tile_types()
tile_wires = dict((tile_type, db['wires'])
for tile_type, db in self.tile_types.items())
return connections.Connections(self.tilegrid, self.tileconn, tile_wires)
tile_wires = dict(
(tile_type, db['wires'])
for tile_type, db in self.tile_types.items())
return connections.Connections(
self.tilegrid, self.tileconn, tile_wires)

View File

@ -3,49 +3,51 @@ from collections import namedtuple
GridLoc = namedtuple('GridLoc', 'grid_x grid_y')
GridInfo = namedtuple('GridInfo', 'segment sites tile_type')
class Grid(object):
""" Object that represents grid for a given database.
""" Object that represents grid for a given database.
Provides methods to inspect grid by name or location. Also provides mapping
of segment offsets for particular grid locations and their tile types.
"""
def __init__(self, tilegrid):
self.tilegrid = tilegrid
self.loc = {}
self.tileinfo = {}
for tile in self.tilegrid['tiles']:
tileinfo = self.tilegrid['tiles'][tile]
grid_loc = GridLoc(tileinfo['grid_x'], tileinfo['grid_y'])
self.loc[grid_loc] = tile
self.tileinfo[tile] = GridInfo(
segment = tileinfo['segment'] if 'segment' in tileinfo else None,
sites = tileinfo['sites'],
tile_type = tileinfo['type'])
def __init__(self, tilegrid):
self.tilegrid = tilegrid
self.loc = {}
self.tileinfo = {}
x, y = zip(*self.loc.keys())
self._dims = (min(x), max(x), min(y), max(y))
for tile in self.tilegrid['tiles']:
tileinfo = self.tilegrid['tiles'][tile]
grid_loc = GridLoc(tileinfo['grid_x'], tileinfo['grid_y'])
self.loc[grid_loc] = tile
self.tileinfo[tile] = GridInfo(
segment=tileinfo['segment'] if 'segment' in tileinfo else None,
sites=tileinfo['sites'],
tile_type=tileinfo['type'])
def tile_locations(self):
""" Return list of tile locations. """
return self.loc.keys()
x, y = zip(*self.loc.keys())
self._dims = (min(x), max(x), min(y), max(y))
def dims(self):
""" Returns (x_min, x_max, y_min, y_max) for given Grid. """
return self._dims
def tile_locations(self):
""" Return list of tile locations. """
return self.loc.keys()
def is_populated(self, grid_loc):
return grid_loc in self.loc
def dims(self):
""" Returns (x_min, x_max, y_min, y_max) for given Grid. """
return self._dims
def loc_of_tilename(self, tilename):
tileinfo = self.tilegrid['tiles'][tilename]
return GridLoc(tileinfo['grid_x'], tileinfo['grid_y'])
def is_populated(self, grid_loc):
return grid_loc in self.loc
def tilename_at_loc(self, grid_loc):
return self.loc[grid_loc]
def loc_of_tilename(self, tilename):
tileinfo = self.tilegrid['tiles'][tilename]
return GridLoc(tileinfo['grid_x'], tileinfo['grid_y'])
def gridinfo_at_loc(self, grid_loc):
return self.tileinfo[self.loc[grid_loc]]
def tilename_at_loc(self, grid_loc):
return self.loc[grid_loc]
def gridinfo_at_tilename(self, tilename):
return self.tileinfo[tilename]
def gridinfo_at_loc(self, grid_loc):
return self.tileinfo[self.loc[grid_loc]]
def gridinfo_at_tilename(self, tilename):
return self.tileinfo[tilename]

View File

@ -4,29 +4,32 @@ import pickle
import pyjson5 as json5
import progressbar
def read_root_csv(root_dir):
""" Reads root.csv from raw db directory.
""" Reads root.csv from raw db directory.
This should only be used during database generation.
"""
tiles = {}
nodes = []
tiles = {}
nodes = []
with open(os.path.join(root_dir, 'root.csv')) as f:
for d in csv.DictReader(f):
if d['filetype'] == 'tile':
if d['subtype'] not in tiles:
tiles[d['subtype']] = []
with open(os.path.join(root_dir, 'root.csv')) as f:
for d in csv.DictReader(f):
if d['filetype'] == 'tile':
if d['subtype'] not in tiles:
tiles[d['subtype']] = []
tiles[d['subtype']].append(os.path.join(root_dir, d['filename']))
elif d['filetype'] == 'node':
nodes.append(os.path.join(root_dir, d['filename']))
tiles[d['subtype']].append(
os.path.join(root_dir, d['filename']))
elif d['filetype'] == 'node':
nodes.append(os.path.join(root_dir, d['filename']))
return tiles, nodes
return tiles, nodes
def verify_nodes(raw_nodes, nodes, error_nodes):
""" Compares raw_nodes with generated_nodes and adds errors to error_nodes.
""" Compares raw_nodes with generated_nodes and adds errors to error_nodes.
Args:
raw_nodes - Iterable of (node name, iterable of wires in node).
@ -36,24 +39,26 @@ def verify_nodes(raw_nodes, nodes, error_nodes):
that did not match.
"""
wire_nodes = {}
for node in nodes:
node_set = set(node)
for wire in node:
wire_nodes[wire] = node_set
wire_nodes = {}
for node in nodes:
node_set = set(node)
for wire in node:
wire_nodes[wire] = node_set
for node, raw_node_wires in raw_nodes:
raw_node_set = set(raw_node_wires)
for node, raw_node_wires in raw_nodes:
raw_node_set = set(raw_node_wires)
for wire in sorted(raw_node_set):
if wire not in wire_nodes:
if set((wire, )) != raw_node_set:
error_nodes.append((node, tuple(raw_node_set), (wire, )))
elif wire_nodes[wire] != raw_node_set:
error_nodes.append(
(node, tuple(raw_node_set), tuple(wire_nodes[wire])))
for wire in sorted(raw_node_set):
if wire not in wire_nodes:
if set((wire,)) != raw_node_set:
error_nodes.append((node, tuple(raw_node_set), (wire,)))
elif wire_nodes[wire] != raw_node_set:
error_nodes.append((node, tuple(raw_node_set), tuple(wire_nodes[wire])))
def check_errors(flat_error_nodes, ignored_wires):
""" Check if error_nodes has errors that are not covered in ignored_wires.
""" Check if error_nodes has errors that are not covered in ignored_wires.
Args:
flat_error_nodes - List of error_nodes generated from verify_nodes.
@ -61,82 +66,85 @@ def check_errors(flat_error_nodes, ignored_wires):
"""
error_nodes = {}
for node, raw_node, generated_nodes in flat_error_nodes:
if node not in error_nodes:
error_nodes[node] = {
'raw_node': set(raw_node),
'generated_nodes': set(),
}
error_nodes = {}
for node, raw_node, generated_nodes in flat_error_nodes:
if node not in error_nodes:
error_nodes[node] = {
'raw_node': set(raw_node),
'generated_nodes': set(),
}
# Make sure all raw nodes are the same.
assert error_nodes[node]['raw_node'] == set(raw_node)
# Make sure all raw nodes are the same.
assert error_nodes[node]['raw_node'] == set(raw_node)
error_nodes[node]['generated_nodes'].add(tuple(sorted(generated_nodes)))
error_nodes[node]['generated_nodes'].add(
tuple(sorted(generated_nodes)))
for node, error in error_nodes.items():
combined_generated_nodes = set()
for generated_node in error['generated_nodes']:
combined_generated_nodes |= set(generated_node)
for node, error in error_nodes.items():
combined_generated_nodes = set()
for generated_node in error['generated_nodes']:
combined_generated_nodes |= set(generated_node)
# Make sure there are not extra wires in nodes.
assert error['raw_node'] == combined_generated_nodes, (node, error)
# Make sure there are not extra wires in nodes.
assert error['raw_node'] == combined_generated_nodes, (node, error)
good_node = max(error['generated_nodes'], key=lambda x: len(x))
bad_nodes = error['generated_nodes'] - set((good_node,))
good_node = max(error['generated_nodes'], key=lambda x: len(x))
bad_nodes = error['generated_nodes'] - set((good_node, ))
# Max sure only single wires are stranded
assert max(len(generated_node) for generated_node in bad_nodes) == 1
# Max sure only single wires are stranded
assert max(len(generated_node) for generated_node in bad_nodes) == 1
for generate_node in bad_nodes:
for wire in generate_node:
if wire not in ignored_wires:
return False
for generate_node in bad_nodes:
for wire in generate_node:
if wire not in ignored_wires:
return False
return True
return True
class NodeLookup(object):
def __init__(self):
self.nodes = {}
def __init__(self):
self.nodes = {}
def load_from_nodes(self, nodes):
self.nodes = nodes
def load_from_nodes(self, nodes):
self.nodes = nodes
def load_from_root_csv(self, nodes):
for node in progressbar.progressbar(nodes):
with open(node) as f:
node_wires = json5.load(f)
assert node_wires['node'] not in self.nodes
self.nodes[node_wires['node']] = node_wires['wires']
def load_from_root_csv(self, nodes):
for node in progressbar.progressbar(nodes):
with open(node) as f:
node_wires = json5.load(f)
assert node_wires['node'] not in self.nodes
self.nodes[node_wires['node']] = node_wires['wires']
def load_from_file(self, fname):
with open(fname, 'rb') as f:
self.nodes = pickle.load(f)
def load_from_file(self, fname):
with open(fname, 'rb') as f:
self.nodes = pickle.load(f)
def save_to_file(self, fname):
with open(fname, 'wb') as f:
pickle.dump(self.nodes, f)
def save_to_file(self, fname):
with open(fname, 'wb') as f:
pickle.dump(self.nodes, f)
def site_pin_node_to_wires(self, tile, node):
if node is None:
return
def site_pin_node_to_wires(self, tile, node):
if node is None:
return
node_wires = self.nodes[node]
node_wires = self.nodes[node]
for wire in node_wires:
if wire['wire'].startswith(tile + '/'):
yield wire['wire'][len(tile)+1:]
for wire in node_wires:
if wire['wire'].startswith(tile + '/'):
yield wire['wire'][len(tile) + 1:]
def wires_for_tile(self, tile):
for node in self.nodes.values():
for wire in node:
if wire['wire'].startswith(tile + '/'):
yield wire['wire'][len(tile) + 1:]
def wires_for_tile(self, tile):
for node in self.nodes.values():
for wire in node:
if wire['wire'].startswith(tile + '/'):
yield wire['wire'][len(tile)+1:]
def compare_prototype_site(proto_a, proto_b):
""" Compare two proto site type.
""" Compare two proto site type.
Will assert if prototypes are not equivalent.
"""
assert proto_a == proto_b, repr((proto_a, proto_b))
assert proto_a == proto_b, repr((proto_a, proto_b))

View File

@ -1,11 +1,9 @@
from collections import namedtuple
import json
""" Database files available for a tile """
TileDbs = namedtuple('TileDbs', 'segbits mask tile_type')
Pip = namedtuple('Pip', 'net_to net_from can_invert is_directional is_pseudo')
""" Site - Represents an instance of a site within a tile.
name - Name of site within tile, instance specific.
@ -17,7 +15,6 @@ pins - Instaces of site pins within this site and tile. This is an tuple of
"""
Site = namedtuple('Site', 'name x y type site_pins')
""" SitePin - Tuple representing a site pin within a tile.
Sites are generic based on type, however sites are instanced
@ -33,53 +30,55 @@ wire - Wire name within the tile. This name is site instance specific.
"""
SitePin = namedtuple('SitePin', 'name wire direction')
class Tile(object):
""" Provides abstration of a tile in the database. """
def __init__(self, tilename, tile_dbs):
self.tilename = tilename
self.tilename_upper = self.tilename.upper()
self.tile_dbs = tile_dbs
""" Provides abstration of a tile in the database. """
self.wires = None
self.sites = None
self.pips = None
def __init__(self, tilename, tile_dbs):
self.tilename = tilename
self.tilename_upper = self.tilename.upper()
self.tile_dbs = tile_dbs
def yield_sites(sites):
for site in sites:
yield Site(
name = None,
type = site['type'],
x = None,
y = None,
site_pins = site['site_pins'],
)
self.wires = None
self.sites = None
self.pips = None
def yield_pips(pips):
for pip in pips:
yield Pip(
net_to = pip['dst_wire'],
net_from = pip['src_wire'],
can_invert = bool(int(pip['can_invert'])),
is_directional = bool(int(pip['is_directional'])),
is_pseudo = bool(int(pip['is_pseudo'])),
)
def yield_sites(sites):
for site in sites:
yield Site(
name=None,
type=site['type'],
x=None,
y=None,
site_pins=site['site_pins'],
)
with open(self.tile_dbs.tile_type) as f:
tile_type = json.load(f)
assert self.tilename_upper == tile_type['tile_type']
self.wires = tile_type['wires']
self.sites = tuple(yield_sites(tile_type['sites']))
self.pips = tuple(yield_pips(tile_type['pips']))
def yield_pips(pips):
for pip in pips:
yield Pip(
net_to=pip['dst_wire'],
net_from=pip['src_wire'],
can_invert=bool(int(pip['can_invert'])),
is_directional=bool(int(pip['is_directional'])),
is_pseudo=bool(int(pip['is_pseudo'])),
)
def get_wires(self):
"""Returns a set of wire names present in this tile."""
return self.wires
with open(self.tile_dbs.tile_type) as f:
tile_type = json.load(f)
assert self.tilename_upper == tile_type['tile_type']
self.wires = tile_type['wires']
self.sites = tuple(yield_sites(tile_type['sites']))
self.pips = tuple(yield_pips(tile_type['pips']))
def get_sites(self):
""" Returns tuple of Site namedtuple's present in this tile. """
return self.sites
def get_wires(self):
"""Returns a set of wire names present in this tile."""
return self.wires
def get_pips(self):
""" Returns tuple of Pip namedtuple's representing the PIPs in this tile.
def get_sites(self):
""" Returns tuple of Site namedtuple's present in this tile. """
return self.sites
def get_pips(self):
""" Returns tuple of Pip namedtuple's representing the PIPs in this tile.
"""
return self.pips
return self.pips

View File

@ -2,29 +2,34 @@ from __future__ import print_function
import prjxray.db
import argparse
def quick_test(db_root):
db = prjxray.db.Database(db_root)
g = db.grid()
db = prjxray.db.Database(db_root)
g = db.grid()
# Verify that we have some tile information for every tile in grid.
tile_types_in_grid = set(g.gridinfo_at_loc(loc).tile_type for loc in g.tile_locations())
tile_types_in_db = set(db.get_tile_types())
assert len(tile_types_in_grid - tile_types_in_db) == 0
# Verify that we have some tile information for every tile in grid.
tile_types_in_grid = set(
g.gridinfo_at_loc(loc).tile_type for loc in g.tile_locations())
tile_types_in_db = set(db.get_tile_types())
assert len(tile_types_in_grid - tile_types_in_db) == 0
# Verify that all tile types can be loaded.
for tile_type in db.get_tile_types():
tile = db.get_tile_type(tile_type)
tile.get_wires()
tile.get_sites()
tile.get_pips()
# Verify that all tile types can be loaded.
for tile_type in db.get_tile_types():
tile = db.get_tile_type(tile_type)
tile.get_wires()
tile.get_sites()
tile.get_pips()
def main():
parser = argparse.ArgumentParser(description="Runs a sanity check on a prjxray database.")
parser.add_argument('--db_root', required=True)
parser = argparse.ArgumentParser(
description="Runs a sanity check on a prjxray database.")
parser.add_argument('--db_root', required=True)
args = parser.parse_args()
args = parser.parse_args()
quick_test(args.db_root)
quick_test(args.db_root)
if __name__ == '__main__':
main()
main()

View File

@ -9,111 +9,123 @@ import pyjson5 as json5
import json
import sys
def full_wire_name(wire_in_grid):
return '{}/{}'.format(wire_in_grid.tile, wire_in_grid.wire)
return '{}/{}'.format(wire_in_grid.tile, wire_in_grid.wire)
def make_connection(wires, connection):
wire_a = full_wire_name(connection.wire_a)
wire_b = full_wire_name(connection.wire_b)
wire_a = full_wire_name(connection.wire_a)
wire_b = full_wire_name(connection.wire_b)
if wire_a not in wires:
wires[wire_a] = set((wire_a,))
if wire_a not in wires:
wires[wire_a] = set((wire_a, ))
if wire_b not in wires:
wires[wire_b] = set((wire_b,))
if wire_b not in wires:
wires[wire_b] = set((wire_b, ))
wire_a_set = wires[wire_a]
wire_b_set = wires[wire_b]
wire_a_set = wires[wire_a]
wire_b_set = wires[wire_b]
if wire_a_set is wire_b_set:
return
if wire_a_set is wire_b_set:
return
wire_a_set |= wire_b_set
wire_a_set |= wire_b_set
for wire in wire_a_set:
wires[wire] = wire_a_set
for wire in wire_a_set:
wires[wire] = wire_a_set
def make_connections(db_root):
db = prjxray.db.Database(db_root)
c = db.connections()
db = prjxray.db.Database(db_root)
c = db.connections()
wires = {}
for connection in c.get_connections():
make_connection(wires, connection)
wires = {}
for connection in c.get_connections():
make_connection(wires, connection)
nodes = {}
nodes = {}
for wire_node in wires.values():
nodes[id(wire_node)] = wire_node
for wire_node in wires.values():
nodes[id(wire_node)] = wire_node
return nodes.values()
return nodes.values()
def read_json5(fname):
with open(fname, 'r') as f:
return json5.load(f)
with open(fname, 'r') as f:
return json5.load(f)
def main():
parser = argparse.ArgumentParser(description="Tests database against raw node list.")
parser.add_argument('--db_root', required=True)
parser.add_argument('--raw_node_root', required=True)
parser.add_argument('--error_nodes', default="error_nodes.json")
parser.add_argument('--ignored_wires')
parser = argparse.ArgumentParser(
description="Tests database against raw node list.")
parser.add_argument('--db_root', required=True)
parser.add_argument('--raw_node_root', required=True)
parser.add_argument('--error_nodes', default="error_nodes.json")
parser.add_argument('--ignored_wires')
args = parser.parse_args()
args = parser.parse_args()
processes = min(multiprocessing.cpu_count(), 10)
processes = min(multiprocessing.cpu_count(), 10)
print('{} Running {} processes'.format(datetime.datetime.now(), processes))
pool = multiprocessing.Pool(processes=processes)
print('{} Reading raw data index'.format(datetime.datetime.now(), processes))
_, nodes = prjxray.lib.read_root_csv(args.raw_node_root)
print('{} Reading raw_node_data'.format(datetime.datetime.now()))
raw_node_data = []
with progressbar.ProgressBar(max_value=len(nodes)) as bar:
for idx, node in enumerate(pool.imap_unordered(
read_json5,
nodes,
chunksize = 20,
)):
bar.update(idx)
raw_node_data.append((node['node'], tuple(wire['wire'] for wire in node['wires'])))
bar.update(idx+1)
print('{} Running {} processes'.format(datetime.datetime.now(), processes))
pool = multiprocessing.Pool(processes=processes)
print(
'{} Reading raw data index'.format(datetime.datetime.now(), processes))
_, nodes = prjxray.lib.read_root_csv(args.raw_node_root)
print('{} Reading raw_node_data'.format(datetime.datetime.now()))
raw_node_data = []
with progressbar.ProgressBar(max_value=len(nodes)) as bar:
for idx, node in enumerate(pool.imap_unordered(
read_json5,
nodes,
chunksize=20,
)):
bar.update(idx)
raw_node_data.append(
(node['node'], tuple(wire['wire'] for wire in node['wires'])))
bar.update(idx + 1)
print('{} Creating connections'.format(datetime.datetime.now()))
generated_nodes = make_connections(args.db_root)
print('{} Creating connections'.format(datetime.datetime.now()))
generated_nodes = make_connections(args.db_root)
print('{} Verifying connections'.format(datetime.datetime.now()))
error_nodes = []
prjxray.lib.verify_nodes(raw_node_data, generated_nodes, error_nodes)
print('{} Verifying connections'.format(datetime.datetime.now()))
error_nodes = []
prjxray.lib.verify_nodes(raw_node_data, generated_nodes, error_nodes)
if len(error_nodes) > 0:
if args.ignored_wires:
with open(args.ignored_wires, 'r') as f:
ignored_wires = [l.strip() for l in f.readlines()]
if len(error_nodes) > 0:
if args.ignored_wires:
with open(args.ignored_wires, 'r') as f:
ignored_wires = [l.strip() for l in f.readlines()]
print('{} Found {} errors, writing errors to {}'.format(
datetime.datetime.now(),
len(error_nodes),
args.error_nodes,
))
print(
'{} Found {} errors, writing errors to {}'.format(
datetime.datetime.now(),
len(error_nodes),
args.error_nodes,
))
with open(args.error_nodes, 'w') as f:
json.dump(error_nodes, f, indent=2)
with open(args.error_nodes, 'w') as f:
json.dump(error_nodes, f, indent=2)
if not args.ignored_wires:
sys.exit(1)
if not args.ignored_wires:
sys.exit(1)
if not prjxray.lib.check_errors(error_nodes, ignored_wires):
print(
'{} Errors were not ignored via ignored_wires {}'.format(
datetime.datetime.now(),
args.ignored_wires,
))
sys.exit(1)
else:
print(
'{} All errors were via ignored_wires {}'.format(
datetime.datetime.now(),
args.ignored_wires,
))
if not prjxray.lib.check_errors(error_nodes, ignored_wires):
print('{} Errors were not ignored via ignored_wires {}'.format(
datetime.datetime.now(),
args.ignored_wires,
))
sys.exit(1)
else:
print('{} All errors were via ignored_wires {}'.format(
datetime.datetime.now(),
args.ignored_wires,
))
if __name__ == '__main__':
main()
main()