mirror of https://github.com/openXC7/prjxray.git
Merge pull request #549 from litghost/accelerate_post_processing
Refactor 074 post-processing
This commit is contained in:
commit
f61ccd1f2a
|
|
@ -9,9 +9,9 @@ set tile, an error will be generated.
|
|||
|
||||
import argparse
|
||||
import prjxray.lib
|
||||
import prjxray.node_lookup
|
||||
import datetime
|
||||
import os.path
|
||||
import json
|
||||
import pyjson5 as json5
|
||||
import progressbar
|
||||
import multiprocessing
|
||||
|
|
@ -190,15 +190,12 @@ def get_sites(tile, site_pin_node_to_wires):
|
|||
})
|
||||
|
||||
|
||||
def read_json5(fname, nodes):
|
||||
node_lookup = prjxray.lib.NodeLookup()
|
||||
node_lookup.load_from_nodes(nodes)
|
||||
def read_json5(fname, database_file):
|
||||
node_lookup = prjxray.node_lookup.NodeLookup(database_file)
|
||||
|
||||
#print('{} Reading {} (in pid {})'.format(datetime.datetime.now(), fname, os.getpid()))
|
||||
with open(fname) as f:
|
||||
tile = json5.load(f)
|
||||
|
||||
#print('{} Done reading {}'.format(datetime.datetime.now(), fname))
|
||||
def get_site_types():
|
||||
for site in tile['sites']:
|
||||
yield get_prototype_site(site)
|
||||
|
|
@ -219,22 +216,21 @@ def read_json5(fname, nodes):
|
|||
return fname, tile, site_types, sites, pips, wires
|
||||
|
||||
|
||||
def reduce_tile(pool, site_types, tile_type, tile_instances, node_lookup):
|
||||
def reduce_tile(pool, site_types, tile_type, tile_instances, database_file):
|
||||
sites = None
|
||||
pips = None
|
||||
wires = set()
|
||||
|
||||
with progressbar.ProgressBar(max_value=len(tile_instances)) as bar:
|
||||
chunksize = 20
|
||||
chunksize = 1
|
||||
if len(tile_instances) < chunksize * 2:
|
||||
iter = map(
|
||||
lambda file: read_json5(file, node_lookup.nodes),
|
||||
tile_instances)
|
||||
lambda file: read_json5(file, database_file), tile_instances)
|
||||
else:
|
||||
print(
|
||||
'{} Using pool.imap_unordered'.format(datetime.datetime.now()))
|
||||
iter = pool.imap_unordered(
|
||||
functools.partial(read_json5, nodes=node_lookup.nodes),
|
||||
functools.partial(read_json5, database_file=database_file),
|
||||
tile_instances,
|
||||
chunksize=chunksize,
|
||||
)
|
||||
|
|
@ -292,17 +288,16 @@ def main():
|
|||
tiles, nodes = prjxray.lib.read_root_csv(args.root_dir)
|
||||
|
||||
print('{} Loading node<->wire mapping'.format(datetime.datetime.now()))
|
||||
node_lookup = prjxray.lib.NodeLookup()
|
||||
node_lookup_file = os.path.join(args.output_dir, 'nodes.pickle')
|
||||
if os.path.exists(node_lookup_file) and not args.ignore_cache:
|
||||
node_lookup.load_from_file(node_lookup_file)
|
||||
database_file = os.path.join(args.output_dir, 'nodes.db')
|
||||
if os.path.exists(database_file) and not args.ignore_cache:
|
||||
node_lookup = prjxray.node_lookup.NodeLookup(database_file)
|
||||
else:
|
||||
node_lookup.load_from_root_csv(nodes)
|
||||
node_lookup.save_to_file(node_lookup_file)
|
||||
node_lookup = prjxray.node_lookup.NodeLookup(database_file)
|
||||
node_lookup.build_database(nodes=nodes, tiles=tiles)
|
||||
|
||||
site_types = {}
|
||||
|
||||
processes = min(multiprocessing.cpu_count(), 10)
|
||||
processes = multiprocessing.cpu_count()
|
||||
print('Running {} processes'.format(processes))
|
||||
pool = multiprocessing.Pool(processes=processes)
|
||||
|
||||
|
|
@ -320,7 +315,7 @@ def main():
|
|||
'{} Generating reduced tile for {}'.format(
|
||||
datetime.datetime.now(), tile_type))
|
||||
reduced_tile = reduce_tile(
|
||||
pool, site_types, tile_type, tiles[tile_type], node_lookup)
|
||||
pool, site_types, tile_type, tiles[tile_type], database_file)
|
||||
for site_type in site_types:
|
||||
with open(os.path.join(
|
||||
args.output_dir, 'tile_type_{}_site_type_{}.json'.format(
|
||||
|
|
|
|||
|
|
@ -0,0 +1,112 @@
|
|||
import sqlite3
|
||||
import progressbar
|
||||
import pyjson5 as json5
|
||||
import os.path
|
||||
|
||||
|
||||
def create_tables(conn):
|
||||
c = conn.cursor()
|
||||
|
||||
c.execute(
|
||||
"""CREATE TABLE tile(
|
||||
pkey INTEGER PRIMARY KEY,
|
||||
name TEXT
|
||||
);""")
|
||||
c.execute(
|
||||
"""CREATE TABLE node(
|
||||
pkey INTEGER PRIMARY KEY,
|
||||
name TEXT
|
||||
);""")
|
||||
c.execute(
|
||||
"""CREATE TABLE wire(
|
||||
pkey INTEGER PRIMARY KEY,
|
||||
name TEXT,
|
||||
node_pkey INTEGER,
|
||||
tile_pkey INTEGER,
|
||||
FOREIGN KEY(node_pkey) REFERENCES node(pkey),
|
||||
FOREIGN KEY(tile_pkey) REFERENCES tile(pkey)
|
||||
);""")
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
||||
class NodeLookup(object):
|
||||
def __init__(self, database):
|
||||
self.conn = sqlite3.connect(database)
|
||||
|
||||
def build_database(self, nodes, tiles):
|
||||
create_tables(self.conn)
|
||||
|
||||
c = self.conn.cursor()
|
||||
tile_names = []
|
||||
for tile_type in tiles:
|
||||
for tile in tiles[tile_type]:
|
||||
tile_names.append(tile)
|
||||
|
||||
tile_pkeys = {}
|
||||
for tile_file in progressbar.progressbar(tile_names):
|
||||
# build/specimen_001/tile_DSP_L_X34Y145.json5
|
||||
root, _ = os.path.splitext(os.path.basename(tile_file))
|
||||
tile = root[5:]
|
||||
c.execute("INSERT INTO tile(name) VALUES (?);", (tile, ))
|
||||
tile_pkeys[tile] = c.lastrowid
|
||||
|
||||
nodes_processed = set()
|
||||
for node in progressbar.progressbar(nodes):
|
||||
with open(node) as f:
|
||||
node_wires = json5.load(f)
|
||||
assert node_wires['node'] not in nodes_processed
|
||||
nodes_processed.add(node_wires['node'])
|
||||
|
||||
c.execute(
|
||||
"INSERT INTO node(name) VALUES (?);",
|
||||
(node_wires['node'], ))
|
||||
node_pkey = c.lastrowid
|
||||
|
||||
for wire in node_wires['wires']:
|
||||
tile = wire['wire'].split('/')[0]
|
||||
|
||||
tile_pkey = tile_pkeys[tile]
|
||||
c.execute(
|
||||
"""
|
||||
INSERT INTO wire(name, tile_pkey, node_pkey) VALUES (?, ?, ?);""",
|
||||
(wire['wire'], tile_pkey, node_pkey))
|
||||
|
||||
self.conn.commit()
|
||||
|
||||
c = self.conn.cursor()
|
||||
c.execute("CREATE INDEX tile_names ON tile(name);")
|
||||
c.execute("CREATE INDEX node_names ON node(name);")
|
||||
c.execute("CREATE INDEX wire_node_tile ON wire(node_pkey, tile_pkey);")
|
||||
c.execute("CREATE INDEX wire_tile ON wire(tile_pkey);")
|
||||
self.conn.commit()
|
||||
|
||||
def site_pin_node_to_wires(self, tile, node):
|
||||
if node is None:
|
||||
return
|
||||
|
||||
c = self.conn.cursor()
|
||||
c.execute(
|
||||
"""
|
||||
WITH
|
||||
the_tile(tile_pkey) AS (SELECT pkey AS tile_pkey FROM tile WHERE name = ?),
|
||||
the_node(node_pkey) AS (SELECT pkey AS node_pkey FROM node WHERE name = ?)
|
||||
SELECT wire.name FROM wire
|
||||
INNER JOIN the_tile ON the_tile.tile_pkey = wire.tile_pkey
|
||||
INNER JOIN the_node ON the_node.node_pkey = wire.node_pkey;
|
||||
""", (tile, node))
|
||||
|
||||
for row in c:
|
||||
yield row[0][len(tile) + 1:]
|
||||
|
||||
def wires_for_tile(self, tile):
|
||||
c = self.conn.cursor()
|
||||
c.execute(
|
||||
"""
|
||||
WITH
|
||||
the_tile(tile_pkey) AS (SELECT pkey AS tile_pkey FROM tile WHERE name = ?)
|
||||
SELECT wire.name FROM wire
|
||||
INNER JOIN the_tile ON the_tile.tile_pkey = wire.tile_pkey;
|
||||
""", (tile, ))
|
||||
for row in c:
|
||||
yield row[0][len(tile) + 1:]
|
||||
Loading…
Reference in New Issue