mirror of https://github.com/openXC7/prjxray.git
scripts: use open safe file class
Signed-off-by: Alessandro Comodi <acomodi@antmicro.com>
This commit is contained in:
parent
6e026bf30e
commit
1bd8142625
|
|
@ -8,6 +8,9 @@
|
||||||
# https://opensource.org/licenses/ISC
|
# https://opensource.org/licenses/ISC
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: ISC
|
# SPDX-License-Identifier: ISC
|
||||||
|
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
class Bitfilter(object):
|
class Bitfilter(object):
|
||||||
def __init__(
|
def __init__(
|
||||||
self, frames_to_include=None, frames_to_exclude=[],
|
self, frames_to_include=None, frames_to_exclude=[],
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@
|
||||||
# SPDX-License-Identifier: ISC
|
# SPDX-License-Identifier: ISC
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from prjxray import util
|
from prjxray.util import block_type_s2i
|
||||||
|
|
||||||
# Break frames into WORD_SIZE bit words.
|
# Break frames into WORD_SIZE bit words.
|
||||||
WORD_SIZE_BITS = 32
|
WORD_SIZE_BITS = 32
|
||||||
|
|
@ -119,7 +119,7 @@ def addr_bits2word(block_type, top_bottom, cfg_row, cfg_col, minor_addr):
|
||||||
"""Convert a deconstructed address to a 32 bit word"""
|
"""Convert a deconstructed address to a 32 bit word"""
|
||||||
# https://www.xilinx.com/support/documentation/user_guides/ug470_7Series_Config.pdf
|
# https://www.xilinx.com/support/documentation/user_guides/ug470_7Series_Config.pdf
|
||||||
ret = 0
|
ret = 0
|
||||||
ret |= util.block_type_s2i[block_type] << 23
|
ret |= block_type_s2i[block_type] << 23
|
||||||
ret |= {"top": 0, "bottom": 1}[top_bottom] << 22
|
ret |= {"top": 0, "bottom": 1}[top_bottom] << 22
|
||||||
ret |= cfg_row << 17
|
ret |= cfg_row << 17
|
||||||
ret |= cfg_col << 7
|
ret |= cfg_col << 7
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,8 @@ import pickle
|
||||||
import re
|
import re
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
|
|
||||||
def read_root_csv(root_dir):
|
def read_root_csv(root_dir):
|
||||||
""" Reads root.csv from raw db directory.
|
""" Reads root.csv from raw db directory.
|
||||||
|
|
@ -24,7 +26,7 @@ def read_root_csv(root_dir):
|
||||||
tiles = {}
|
tiles = {}
|
||||||
nodes = []
|
nodes = []
|
||||||
|
|
||||||
with open(os.path.join(root_dir, 'root.csv')) as f:
|
with OpenSafeFile(os.path.join(root_dir, 'root.csv')) as f:
|
||||||
for d in csv.DictReader(f):
|
for d in csv.DictReader(f):
|
||||||
if d['filetype'] == 'tile':
|
if d['filetype'] == 'tile':
|
||||||
if d['subtype'] not in tiles:
|
if d['subtype'] not in tiles:
|
||||||
|
|
@ -123,17 +125,17 @@ class NodeLookup(object):
|
||||||
import pyjson5 as json5
|
import pyjson5 as json5
|
||||||
import progressbar
|
import progressbar
|
||||||
for node in progressbar.progressbar(nodes):
|
for node in progressbar.progressbar(nodes):
|
||||||
with open(node) as f:
|
with OpenSafeFile(node) as f:
|
||||||
node_wires = json5.load(f)
|
node_wires = json5.load(f)
|
||||||
assert node_wires['node'] not in self.nodes
|
assert node_wires['node'] not in self.nodes
|
||||||
self.nodes[node_wires['node']] = node_wires['wires']
|
self.nodes[node_wires['node']] = node_wires['wires']
|
||||||
|
|
||||||
def load_from_file(self, fname):
|
def load_from_file(self, fname):
|
||||||
with open(fname, 'rb') as f:
|
with OpenSafeFile(fname, 'rb') as f:
|
||||||
self.nodes = pickle.load(f)
|
self.nodes = pickle.load(f)
|
||||||
|
|
||||||
def save_to_file(self, fname):
|
def save_to_file(self, fname):
|
||||||
with open(fname, 'wb') as f:
|
with OpenSafeFile(fname, 'wb') as f:
|
||||||
pickle.dump(self.nodes, f)
|
pickle.dump(self.nodes, f)
|
||||||
|
|
||||||
def site_pin_node_to_wires(self, tile, node):
|
def site_pin_node_to_wires(self, tile, node):
|
||||||
|
|
|
||||||
|
|
@ -54,6 +54,8 @@ import json
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import numpy.linalg as linalg
|
import numpy.linalg as linalg
|
||||||
|
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -83,7 +85,7 @@ def load_data(file_name, tagfilter=lambda tag: True, address_map=None):
|
||||||
segdata = None
|
segdata = None
|
||||||
all_segdata = []
|
all_segdata = []
|
||||||
|
|
||||||
with open(file_name, "r") as fp:
|
with OpenSafeFile(file_name, "r") as fp:
|
||||||
for line in fp.readlines():
|
for line in fp.readlines():
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
|
|
||||||
|
|
@ -174,7 +176,7 @@ def write_segbits(file_name, all_tags, all_bits, W):
|
||||||
|
|
||||||
lines.append(all_tags[r] + " " + " ".join(bits) + "\n")
|
lines.append(all_tags[r] + " " + " ".join(bits) + "\n")
|
||||||
|
|
||||||
with open(file_name, "w") as fp:
|
with OpenSafeFile(file_name, "w") as fp:
|
||||||
for line in lines:
|
for line in lines:
|
||||||
fp.write(line)
|
fp.write(line)
|
||||||
|
|
||||||
|
|
@ -702,7 +704,7 @@ def build_address_map(tilegrid_file):
|
||||||
address_map = {}
|
address_map = {}
|
||||||
|
|
||||||
# Load tilegrid
|
# Load tilegrid
|
||||||
with open(tilegrid_file, "r") as fp:
|
with OpenSafeFile(tilegrid_file, "r") as fp:
|
||||||
tilegrid = json.load(fp)
|
tilegrid = json.load(fp)
|
||||||
|
|
||||||
# Loop over tiles
|
# Loop over tiles
|
||||||
|
|
@ -982,7 +984,7 @@ def main():
|
||||||
|
|
||||||
# Dump to CSV
|
# Dump to CSV
|
||||||
if args.x is not None:
|
if args.x is not None:
|
||||||
with open(args.x, "w") as fp:
|
with OpenSafeFile(args.x, "w") as fp:
|
||||||
dump_solution_to_csv(fp, tags_to_solve, bits_to_solve, X)
|
dump_solution_to_csv(fp, tags_to_solve, bits_to_solve, X)
|
||||||
|
|
||||||
# Dump results
|
# Dump results
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ import progressbar
|
||||||
import pyjson5 as json5
|
import pyjson5 as json5
|
||||||
import os.path
|
import os.path
|
||||||
|
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
def create_tables(conn):
|
def create_tables(conn):
|
||||||
c = conn.cursor()
|
c = conn.cursor()
|
||||||
|
|
@ -63,7 +64,7 @@ class NodeLookup(object):
|
||||||
|
|
||||||
nodes_processed = set()
|
nodes_processed = set()
|
||||||
for node in progressbar.progressbar(nodes):
|
for node in progressbar.progressbar(nodes):
|
||||||
with open(node) as f:
|
with OpenSafeFile(node) as f:
|
||||||
node_wires = json5.load(f)
|
node_wires = json5.load(f)
|
||||||
assert node_wires['node'] not in nodes_processed
|
assert node_wires['node'] not in nodes_processed
|
||||||
nodes_processed.add(node_wires['node'])
|
nodes_processed.add(node_wires['node'])
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,7 @@ tilegrid.json provides tile addresses
|
||||||
'''
|
'''
|
||||||
|
|
||||||
import os, json, re
|
import os, json, re
|
||||||
from prjxray import util
|
from prjxray.util import OpenSafeFile, get_db_root, get_fabric
|
||||||
|
|
||||||
BLOCK_TYPES = set(('CLB_IO_CLK', 'BLOCK_RAM', 'CFG_CLB'))
|
BLOCK_TYPES = set(('CLB_IO_CLK', 'BLOCK_RAM', 'CFG_CLB'))
|
||||||
|
|
||||||
|
|
@ -85,12 +85,12 @@ class Segmaker:
|
||||||
def __init__(self, bitsfile, verbose=None, db_root=None, fabric=None):
|
def __init__(self, bitsfile, verbose=None, db_root=None, fabric=None):
|
||||||
self.db_root = db_root
|
self.db_root = db_root
|
||||||
if self.db_root is None:
|
if self.db_root is None:
|
||||||
self.db_root = util.get_db_root()
|
self.db_root = get_db_root()
|
||||||
assert self.db_root, "No db root specified."
|
assert self.db_root, "No db root specified."
|
||||||
|
|
||||||
self.fabric = fabric
|
self.fabric = fabric
|
||||||
if self.fabric is None:
|
if self.fabric is None:
|
||||||
self.fabric = util.get_fabric()
|
self.fabric = get_fabric()
|
||||||
assert self.fabric, "No fabric specified."
|
assert self.fabric, "No fabric specified."
|
||||||
|
|
||||||
self.verbose = verbose if verbose is not None else os.getenv(
|
self.verbose = verbose if verbose is not None else os.getenv(
|
||||||
|
|
@ -129,7 +129,7 @@ class Segmaker:
|
||||||
|
|
||||||
def load_grid(self):
|
def load_grid(self):
|
||||||
'''Load self.grid holding tile addresses'''
|
'''Load self.grid holding tile addresses'''
|
||||||
with open(os.path.join(self.db_root, self.fabric, "tilegrid.json"),
|
with OpenSafeFile(os.path.join(self.db_root, self.fabric, "tilegrid.json"),
|
||||||
"r") as f:
|
"r") as f:
|
||||||
self.grid = json.load(f)
|
self.grid = json.load(f)
|
||||||
assert "segments" not in self.grid, "Old format tilegrid.json"
|
assert "segments" not in self.grid, "Old format tilegrid.json"
|
||||||
|
|
@ -152,7 +152,7 @@ class Segmaker:
|
||||||
'''
|
'''
|
||||||
self.bits = dict()
|
self.bits = dict()
|
||||||
print("Loading bits from %s." % bitsfile)
|
print("Loading bits from %s." % bitsfile)
|
||||||
with open(bitsfile, "r") as f:
|
with OpenSafeFile(bitsfile, "r") as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
# ex: bit_00020500_000_17
|
# ex: bit_00020500_000_17
|
||||||
line = line.split("_")
|
line = line.split("_")
|
||||||
|
|
@ -446,7 +446,7 @@ class Segmaker:
|
||||||
segments = self.segments_by_type[segtype]
|
segments = self.segments_by_type[segtype]
|
||||||
if segments:
|
if segments:
|
||||||
print("Writing %s." % filename)
|
print("Writing %s." % filename)
|
||||||
with open(filename, "w") as f:
|
with OpenSafeFile(filename, "w") as f:
|
||||||
for segname, segdata in sorted(segments.items()):
|
for segname, segdata in sorted(segments.items()):
|
||||||
# seg 00020300_010
|
# seg 00020300_010
|
||||||
print("seg %s" % segname, file=f)
|
print("seg %s" % segname, file=f)
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ from collections import namedtuple
|
||||||
import json
|
import json
|
||||||
from prjxray import lib
|
from prjxray import lib
|
||||||
from prjxray.timing import fast_slow_tuple_to_corners, RcElement
|
from prjxray.timing import fast_slow_tuple_to_corners, RcElement
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
TileDbs = namedtuple(
|
TileDbs = namedtuple(
|
||||||
'TileDbs', 'segbits block_ram_segbits ppips mask tile_type')
|
'TileDbs', 'segbits block_ram_segbits ppips mask tile_type')
|
||||||
|
|
@ -313,7 +314,7 @@ class Tile(object):
|
||||||
backward_timing=get_pip_timing(pip.get('dst_to_src')),
|
backward_timing=get_pip_timing(pip.get('dst_to_src')),
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(self.tile_dbs.tile_type) as f:
|
with OpenSafeFile(self.tile_dbs.tile_type) as f:
|
||||||
tile_type = json.load(f)
|
tile_type = json.load(f)
|
||||||
assert self.tilename_upper == tile_type['tile_type']
|
assert self.tilename_upper == tile_type['tile_type']
|
||||||
self.wires = get_wires(tile_type['wires'])
|
self.wires = get_wires(tile_type['wires'])
|
||||||
|
|
|
||||||
|
|
@ -10,8 +10,8 @@
|
||||||
# SPDX-License-Identifier: ISC
|
# SPDX-License-Identifier: ISC
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from prjxray import bitstream
|
from prjxray import bitstream
|
||||||
from prjxray import util
|
|
||||||
from prjxray.grid_types import BlockType
|
from prjxray.grid_types import BlockType
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
import enum
|
import enum
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -84,22 +84,16 @@ class TileSegbits(object):
|
||||||
self.feature_addresses = {}
|
self.feature_addresses = {}
|
||||||
|
|
||||||
if tile_db.ppips is not None:
|
if tile_db.ppips is not None:
|
||||||
with open(tile_db.ppips) as f:
|
with OpenSafeFile(tile_db.ppips) as f:
|
||||||
util.lock_file(f, 10)
|
|
||||||
self.ppips = read_ppips(f)
|
self.ppips = read_ppips(f)
|
||||||
util.unlock_file(f)
|
|
||||||
|
|
||||||
if tile_db.segbits is not None:
|
if tile_db.segbits is not None:
|
||||||
with open(tile_db.segbits) as f:
|
with OpenSafeFile(tile_db.segbits) as f:
|
||||||
util.lock_file(f, 10)
|
|
||||||
self.segbits[BlockType.CLB_IO_CLK] = read_segbits(f)
|
self.segbits[BlockType.CLB_IO_CLK] = read_segbits(f)
|
||||||
util.unlock_file(f)
|
|
||||||
|
|
||||||
if tile_db.block_ram_segbits is not None:
|
if tile_db.block_ram_segbits is not None:
|
||||||
with open(tile_db.block_ram_segbits) as f:
|
with OpenSafeFile(tile_db.block_ram_segbits) as f:
|
||||||
util.lock_file(f, 10)
|
|
||||||
self.segbits[BlockType.BLOCK_RAM] = read_segbits(f)
|
self.segbits[BlockType.BLOCK_RAM] = read_segbits(f)
|
||||||
util.unlock_file(f)
|
|
||||||
|
|
||||||
for block_type in self.segbits:
|
for block_type in self.segbits:
|
||||||
for feature in self.segbits[block_type]:
|
for feature in self.segbits[block_type]:
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,7 @@ TileSegbitsAlias performs severals functions to achieve the alias:
|
||||||
from prjxray import bitstream
|
from prjxray import bitstream
|
||||||
from prjxray.grid_types import Bits
|
from prjxray.grid_types import Bits
|
||||||
from prjxray.tile_segbits import read_ppips
|
from prjxray.tile_segbits import read_ppips
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
|
|
||||||
class TileSegbitsAlias(object):
|
class TileSegbitsAlias(object):
|
||||||
|
|
@ -67,7 +68,7 @@ class TileSegbitsAlias(object):
|
||||||
self.ppips = {}
|
self.ppips = {}
|
||||||
|
|
||||||
if tile_db.ppips is not None:
|
if tile_db.ppips is not None:
|
||||||
with open(tile_db.ppips) as f:
|
with OpenSafeFile(tile_db.ppips) as f:
|
||||||
self.ppips = read_ppips(f)
|
self.ppips = read_ppips(f)
|
||||||
self.tile_segbits = db.get_tile_segbits(self.alias_tile_type)
|
self.tile_segbits = db.get_tile_segbits(self.alias_tile_type)
|
||||||
|
|
||||||
|
|
|
||||||
106
prjxray/util.py
106
prjxray/util.py
|
|
@ -18,6 +18,49 @@ import yaml
|
||||||
from .roi import Roi
|
from .roi import Roi
|
||||||
|
|
||||||
|
|
||||||
|
def timeout_handler(signum, frame):
|
||||||
|
raise Exception("ERROR TIMEOUT: could not lock file")
|
||||||
|
|
||||||
|
|
||||||
|
class OpenSafeFile:
|
||||||
|
"""
|
||||||
|
Opens a file in a thread-safe mode, allowing for safe read and writes
|
||||||
|
to a file that can potentially be modified by multiple processes at
|
||||||
|
the same time.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name, mode="r", timeout=10):
|
||||||
|
self.name = name
|
||||||
|
self.mode = mode
|
||||||
|
self.timeout = timeout
|
||||||
|
|
||||||
|
self.fd = None
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
self.fd = open(self.name, self.mode)
|
||||||
|
self.lock_file()
|
||||||
|
return self.fd
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_value, traceback):
|
||||||
|
self.unlock_file()
|
||||||
|
self.fd.close()
|
||||||
|
|
||||||
|
def lock_file(self):
|
||||||
|
assert self.fd is not None
|
||||||
|
try:
|
||||||
|
signal.signal(signal.SIGALRM, timeout_handler)
|
||||||
|
signal.alarm(self.timeout)
|
||||||
|
fcntl.flock(self.fd.fileno(), fcntl.LOCK_EX)
|
||||||
|
signal.alarm(0)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"{e}: {self.name}")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
def unlock_file(self):
|
||||||
|
assert self.fd is not None
|
||||||
|
fcntl.flock(self.fd.fileno(), fcntl.LOCK_UN)
|
||||||
|
|
||||||
|
|
||||||
def get_db_root():
|
def get_db_root():
|
||||||
# Used during tilegrid db bootstrap
|
# Used during tilegrid db bootstrap
|
||||||
ret = os.getenv("XRAY_DATABASE_ROOT", None)
|
ret = os.getenv("XRAY_DATABASE_ROOT", None)
|
||||||
|
|
@ -44,7 +87,7 @@ def get_part_information(db_root, part):
|
||||||
filename = os.path.join(db_root, "mapping", "parts.yaml")
|
filename = os.path.join(db_root, "mapping", "parts.yaml")
|
||||||
assert os.path.isfile(filename), \
|
assert os.path.isfile(filename), \
|
||||||
"Mapping file {} does not exists".format(filename)
|
"Mapping file {} does not exists".format(filename)
|
||||||
with open(filename, 'r') as stream:
|
with OpenSafeFile(filename, 'r') as stream:
|
||||||
part_mapping = yaml.load(stream, Loader=yaml.FullLoader)
|
part_mapping = yaml.load(stream, Loader=yaml.FullLoader)
|
||||||
part = part_mapping.get(part, None)
|
part = part_mapping.get(part, None)
|
||||||
assert part, "Part {} not found in {}".format(part, part_mapping)
|
assert part, "Part {} not found in {}".format(part, part_mapping)
|
||||||
|
|
@ -53,7 +96,7 @@ def get_part_information(db_root, part):
|
||||||
|
|
||||||
def set_part_information(db_root, information):
|
def set_part_information(db_root, information):
|
||||||
filename = os.path.join(db_root, "mapping", "parts.yaml")
|
filename = os.path.join(db_root, "mapping", "parts.yaml")
|
||||||
with open(filename, 'w+') as stream:
|
with OpenSafeFile(filename, 'w+') as stream:
|
||||||
yaml.dump(information, stream)
|
yaml.dump(information, stream)
|
||||||
assert os.path.isfile(filename), \
|
assert os.path.isfile(filename), \
|
||||||
"Mapping file {} does not exists".format(filename)
|
"Mapping file {} does not exists".format(filename)
|
||||||
|
|
@ -63,7 +106,7 @@ def get_part_resources(file_path, part):
|
||||||
filename = os.path.join(file_path, "resources.yaml")
|
filename = os.path.join(file_path, "resources.yaml")
|
||||||
assert os.path.isfile(filename), \
|
assert os.path.isfile(filename), \
|
||||||
"Mapping file {} does not exists".format(filename)
|
"Mapping file {} does not exists".format(filename)
|
||||||
with open(filename, 'r') as stream:
|
with OpenSafeFile(filename, 'r') as stream:
|
||||||
res_mapping = yaml.load(stream, Loader=yaml.FullLoader)
|
res_mapping = yaml.load(stream, Loader=yaml.FullLoader)
|
||||||
res = res_mapping.get(part, None)
|
res = res_mapping.get(part, None)
|
||||||
assert res, "Part {} not found in {}".format(part, part_mapping)
|
assert res, "Part {} not found in {}".format(part, part_mapping)
|
||||||
|
|
@ -72,7 +115,7 @@ def get_part_resources(file_path, part):
|
||||||
|
|
||||||
def set_part_resources(file_path, information):
|
def set_part_resources(file_path, information):
|
||||||
filename = os.path.join(file_path, "resources.yaml")
|
filename = os.path.join(file_path, "resources.yaml")
|
||||||
with open(filename, 'w+') as stream:
|
with OpenSafeFile(filename, 'w+') as stream:
|
||||||
yaml.dump(information, stream)
|
yaml.dump(information, stream)
|
||||||
assert os.path.isfile(filename), \
|
assert os.path.isfile(filename), \
|
||||||
"Mapping file {} does not exists".format(filename)
|
"Mapping file {} does not exists".format(filename)
|
||||||
|
|
@ -83,7 +126,7 @@ def get_fabric_for_part(db_root, part):
|
||||||
assert os.path.isfile(filename), \
|
assert os.path.isfile(filename), \
|
||||||
"Mapping file {} does not exists".format(filename)
|
"Mapping file {} does not exists".format(filename)
|
||||||
part = get_part_information(db_root, part)
|
part = get_part_information(db_root, part)
|
||||||
with open(filename, 'r') as stream:
|
with OpenSafeFile(filename, 'r') as stream:
|
||||||
device_mapping = yaml.load(stream, Loader=yaml.FullLoader)
|
device_mapping = yaml.load(stream, Loader=yaml.FullLoader)
|
||||||
device = device_mapping.get(part['device'], None)
|
device = device_mapping.get(part['device'], None)
|
||||||
assert device, "Device {} not found in {}".format(
|
assert device, "Device {} not found in {}".format(
|
||||||
|
|
@ -95,7 +138,7 @@ def get_devices(db_root):
|
||||||
filename = os.path.join(db_root, "mapping", "devices.yaml")
|
filename = os.path.join(db_root, "mapping", "devices.yaml")
|
||||||
assert os.path.isfile(filename), \
|
assert os.path.isfile(filename), \
|
||||||
"Mapping file {} does not exists".format(filename)
|
"Mapping file {} does not exists".format(filename)
|
||||||
with open(filename, 'r') as stream:
|
with OpenSafeFile(filename, 'r') as stream:
|
||||||
device_mapping = yaml.load(stream, Loader=yaml.FullLoader)
|
device_mapping = yaml.load(stream, Loader=yaml.FullLoader)
|
||||||
return device_mapping
|
return device_mapping
|
||||||
|
|
||||||
|
|
@ -104,7 +147,7 @@ def get_parts(db_root):
|
||||||
filename = os.path.join(db_root, "mapping", "parts.yaml")
|
filename = os.path.join(db_root, "mapping", "parts.yaml")
|
||||||
assert os.path.isfile(filename), \
|
assert os.path.isfile(filename), \
|
||||||
"Mapping file {} does not exists".format(filename)
|
"Mapping file {} does not exists".format(filename)
|
||||||
with open(filename, 'r') as stream:
|
with OpenSafeFile(filename, 'r') as stream:
|
||||||
part_mapping = yaml.load(stream, Loader=yaml.FullLoader)
|
part_mapping = yaml.load(stream, Loader=yaml.FullLoader)
|
||||||
return part_mapping
|
return part_mapping
|
||||||
|
|
||||||
|
|
@ -255,11 +298,9 @@ def parse_db_line(line):
|
||||||
|
|
||||||
|
|
||||||
def parse_db_lines(fn):
|
def parse_db_lines(fn):
|
||||||
with open(fn, "r") as f:
|
with OpenSafeFile(fn, "r") as f:
|
||||||
lock_file(f, 10)
|
|
||||||
for line in f:
|
for line in f:
|
||||||
yield line, parse_db_line(line)
|
yield line, parse_db_line(line)
|
||||||
unlock_file(f)
|
|
||||||
|
|
||||||
|
|
||||||
def write_db_lines(fn, entries, track_origin=False):
|
def write_db_lines(fn, entries, track_origin=False):
|
||||||
|
|
@ -272,11 +313,9 @@ def write_db_lines(fn, entries, track_origin=False):
|
||||||
new_line = " ".join([tag] + sorted(bits))
|
new_line = " ".join([tag] + sorted(bits))
|
||||||
new_lines.append(new_line)
|
new_lines.append(new_line)
|
||||||
|
|
||||||
with open(fn, "w") as f:
|
with OpenSafeFile(fn, "w") as f:
|
||||||
lock_file(f, 10)
|
|
||||||
for line in sorted(new_lines):
|
for line in sorted(new_lines):
|
||||||
print(line, file=f)
|
print(line, file=f)
|
||||||
unlock_file(f)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_tagbit(x):
|
def parse_tagbit(x):
|
||||||
|
|
@ -409,44 +448,3 @@ def add_bool_arg(parser, yes_arg, default=False, **kwargs):
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--no-' + dashed, dest=dest, action='store_false', **kwargs)
|
'--no-' + dashed, dest=dest, action='store_false', **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def timeout_handler(signum, frame):
|
|
||||||
raise Exception("ERROR: could not lock file!")
|
|
||||||
|
|
||||||
class OpenSafeFile:
|
|
||||||
"""
|
|
||||||
Opens a file in a thread-safe mode, allowing for safe read and writes
|
|
||||||
to a file that can potentially be modified by multiple processes at
|
|
||||||
the same time.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, name, mode, timeout=10):
|
|
||||||
self.name = name
|
|
||||||
self.mode = mode
|
|
||||||
self.timeout = timeout
|
|
||||||
|
|
||||||
self.fd = None
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self.fd = open(self.name, self.mode)
|
|
||||||
self.lock_file()
|
|
||||||
return self.fd
|
|
||||||
|
|
||||||
def __exit__(self):
|
|
||||||
self.unlock_file()
|
|
||||||
self.fd.close()
|
|
||||||
|
|
||||||
def lock_file(self):
|
|
||||||
assert self.fd is not None
|
|
||||||
try:
|
|
||||||
signal.signal(signal.SIGALRM, timeout_handler)
|
|
||||||
signal.alarm(timeout)
|
|
||||||
fcntl.flock(self.fd.fileno(), fcntl.LOCK_EX)
|
|
||||||
signal.alarm(0)
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
def unlock_file(self):
|
|
||||||
assert self.fd is not None
|
|
||||||
fcntl.flock(self.fd.fileno(), fcntl.LOCK_UN)
|
|
||||||
|
|
|
||||||
|
|
@ -16,9 +16,10 @@ import contextlib
|
||||||
import os
|
import os
|
||||||
import fasm
|
import fasm
|
||||||
import fasm.output
|
import fasm.output
|
||||||
from prjxray.db import Database
|
|
||||||
from prjxray import fasm_disassembler
|
from prjxray import fasm_disassembler
|
||||||
from prjxray import bitstream
|
from prjxray import bitstream
|
||||||
|
from prjxray.db import Database
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
import subprocess
|
import subprocess
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
|
|
@ -41,7 +42,7 @@ def bits_to_fasm(db_root, part, bits_file, verbose, canonical):
|
||||||
grid = db.grid()
|
grid = db.grid()
|
||||||
disassembler = fasm_disassembler.FasmDisassembler(db)
|
disassembler = fasm_disassembler.FasmDisassembler(db)
|
||||||
|
|
||||||
with open(bits_file) as f:
|
with OpenSafeFile(bits_file) as f:
|
||||||
bitdata = bitstream.load_bitdata(f)
|
bitdata = bitstream.load_bitdata(f)
|
||||||
|
|
||||||
model = fasm.output.merge_and_sort(
|
model = fasm.output.merge_and_sort(
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,7 @@ from prjxray.timing import Outpin, Inpin, Wire, Buffer, \
|
||||||
PassTransistor, IntristicDelay, RcElement, PvtCorner
|
PassTransistor, IntristicDelay, RcElement, PvtCorner
|
||||||
from prjxray.math_models import ExcelMathModel
|
from prjxray.math_models import ExcelMathModel
|
||||||
from prjxray.db import Database
|
from prjxray.db import Database
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
from prjxray import util
|
from prjxray import util
|
||||||
|
|
||||||
FAST = PvtCorner.FAST
|
FAST = PvtCorner.FAST
|
||||||
|
|
@ -481,7 +482,7 @@ def add_net(wb, net, timing_lookup):
|
||||||
def build_wire_filter(wire_filter):
|
def build_wire_filter(wire_filter):
|
||||||
wires_to_include = set()
|
wires_to_include = set()
|
||||||
|
|
||||||
with open(wire_filter) as f:
|
with OpenSafeFile(wire_filter) as f:
|
||||||
for l in f:
|
for l in f:
|
||||||
wire = l.strip()
|
wire = l.strip()
|
||||||
if not wire:
|
if not wire:
|
||||||
|
|
@ -514,7 +515,7 @@ def main():
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
with open(args.timing_json) as f:
|
with OpenSafeFile(args.timing_json) as f:
|
||||||
timing = json.load(f)
|
timing = json.load(f)
|
||||||
|
|
||||||
db = Database(args.db_root, args.part)
|
db = Database(args.db_root, args.part)
|
||||||
|
|
|
||||||
|
|
@ -218,7 +218,7 @@ def read_segbits(fn_in):
|
||||||
lines = []
|
lines = []
|
||||||
llast = None
|
llast = None
|
||||||
|
|
||||||
with open(fn_in, "r") as f:
|
with util.OpenSafeFile(fn_in, "r") as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
# Hack: skip duplicate lines
|
# Hack: skip duplicate lines
|
||||||
# This happens while merging a new multibit entry
|
# This happens while merging a new multibit entry
|
||||||
|
|
@ -327,7 +327,7 @@ def update_mask(db_root, mask_db, src_dbs, offset=0):
|
||||||
mask_db_file = "%s/mask_%s.db" % (db_root, mask_db)
|
mask_db_file = "%s/mask_%s.db" % (db_root, mask_db)
|
||||||
|
|
||||||
if os.path.exists(mask_db_file):
|
if os.path.exists(mask_db_file):
|
||||||
with open(mask_db_file, "r") as f:
|
with util.OpenSafeFile(mask_db_file, "r") as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
line = line.split()
|
line = line.split()
|
||||||
assert len(line) == 2
|
assert len(line) == 2
|
||||||
|
|
@ -340,7 +340,7 @@ def update_mask(db_root, mask_db, src_dbs, offset=0):
|
||||||
if not os.path.exists(seg_db_file):
|
if not os.path.exists(seg_db_file):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
with open(seg_db_file, "r") as f:
|
with util.OpenSafeFile(seg_db_file, "r") as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
line = line.split()
|
line = line.split()
|
||||||
for bit in line[1:]:
|
for bit in line[1:]:
|
||||||
|
|
@ -353,7 +353,7 @@ def update_mask(db_root, mask_db, src_dbs, offset=0):
|
||||||
bits.add(bit)
|
bits.add(bit)
|
||||||
|
|
||||||
if len(bits) > 0:
|
if len(bits) > 0:
|
||||||
with open(mask_db_file, "w") as f:
|
with util.OpenSafeFile(mask_db_file, "w") as f:
|
||||||
for bit in sorted(bits):
|
for bit in sorted(bits):
|
||||||
print("bit %s" % bit, file=f)
|
print("bit %s" % bit, file=f)
|
||||||
|
|
||||||
|
|
@ -361,7 +361,8 @@ def update_mask(db_root, mask_db, src_dbs, offset=0):
|
||||||
def load_zero_db(fn):
|
def load_zero_db(fn):
|
||||||
# Remove comments and convert to list of lines
|
# Remove comments and convert to list of lines
|
||||||
ret = []
|
ret = []
|
||||||
for l in open(fn, "r"):
|
with util.OpenSafeFile(fn, "r") as f:
|
||||||
|
for l in f:
|
||||||
pos = l.find("#")
|
pos = l.find("#")
|
||||||
if pos >= 0:
|
if pos >= 0:
|
||||||
l = l[0:pos]
|
l = l[0:pos]
|
||||||
|
|
@ -535,11 +536,9 @@ def update_seg_fns(
|
||||||
)
|
)
|
||||||
changes += new_changes
|
changes += new_changes
|
||||||
|
|
||||||
with open(fn_out, "w") as f:
|
with util.OpenSafeFile(fn_out, "w") as f:
|
||||||
util.lock_file(f, 10)
|
|
||||||
for line in sorted(lines):
|
for line in sorted(lines):
|
||||||
print(line, file=f)
|
print(line, file=f)
|
||||||
util.unlock_file(f)
|
|
||||||
|
|
||||||
if changes is not None:
|
if changes is not None:
|
||||||
seg_files += 1
|
seg_files += 1
|
||||||
|
|
@ -654,7 +653,7 @@ def load_tag_groups(file_name):
|
||||||
tag_groups = []
|
tag_groups = []
|
||||||
|
|
||||||
# Load tag group specifications
|
# Load tag group specifications
|
||||||
with open(file_name, "r") as fp:
|
with util.OpenSafeFile(file_name, "r") as fp:
|
||||||
for line in fp:
|
for line in fp:
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -23,6 +23,7 @@ from collections import defaultdict
|
||||||
from prjxray import fasm_assembler, util
|
from prjxray import fasm_assembler, util
|
||||||
from prjxray.db import Database
|
from prjxray.db import Database
|
||||||
from prjxray.roi import Roi
|
from prjxray.roi import Roi
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
@ -133,11 +134,11 @@ def run(
|
||||||
bank_to_tile = defaultdict(lambda: set())
|
bank_to_tile = defaultdict(lambda: set())
|
||||||
|
|
||||||
if part is not None:
|
if part is not None:
|
||||||
with open(os.path.join(db_root, part, "package_pins.csv"), "r") as fp:
|
with OpenSafeFile(os.path.join(db_root, part, "package_pins.csv"), "r") as fp:
|
||||||
reader = csv.DictReader(fp)
|
reader = csv.DictReader(fp)
|
||||||
package_pins = [l for l in reader]
|
package_pins = [l for l in reader]
|
||||||
|
|
||||||
with open(os.path.join(db_root, part, "part.json"), "r") as fp:
|
with OpenSafeFile(os.path.join(db_root, part, "part.json"), "r") as fp:
|
||||||
part_data = json.load(fp)
|
part_data = json.load(fp)
|
||||||
|
|
||||||
for bank, loc in part_data["iobanks"].items():
|
for bank, loc in part_data["iobanks"].items():
|
||||||
|
|
@ -167,7 +168,7 @@ def run(
|
||||||
|
|
||||||
extra_features = []
|
extra_features = []
|
||||||
if roi:
|
if roi:
|
||||||
with open(roi) as f:
|
with OpenSafeFile(roi) as f:
|
||||||
roi_j = json.load(f)
|
roi_j = json.load(f)
|
||||||
x1 = roi_j['info']['GRID_X_MIN']
|
x1 = roi_j['info']['GRID_X_MIN']
|
||||||
x2 = roi_j['info']['GRID_X_MAX']
|
x2 = roi_j['info']['GRID_X_MAX']
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,8 @@ import argparse
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -33,7 +35,7 @@ def read_pips_from_tile(tile_file):
|
||||||
PIP name strings. Names are formatted as <dst_wire>.<src_wire>
|
PIP name strings. Names are formatted as <dst_wire>.<src_wire>
|
||||||
"""
|
"""
|
||||||
|
|
||||||
with open(tile_file, "r") as fp:
|
with OpenSafeFile(tile_file, "r") as fp:
|
||||||
root = json.load(fp)
|
root = json.load(fp)
|
||||||
pips = root["pips"]
|
pips = root["pips"]
|
||||||
|
|
||||||
|
|
@ -53,7 +55,7 @@ def read_ppips(ppips_file):
|
||||||
"""
|
"""
|
||||||
ppips = {}
|
ppips = {}
|
||||||
|
|
||||||
with open(ppips_file, "r") as fp:
|
with OpenSafeFile(ppips_file, "r") as fp:
|
||||||
for line in fp.readlines():
|
for line in fp.readlines():
|
||||||
line = line.split()
|
line = line.split()
|
||||||
if len(line) == 2:
|
if len(line) == 2:
|
||||||
|
|
@ -70,7 +72,7 @@ def read_segbits(segbits_file):
|
||||||
"""
|
"""
|
||||||
segbits = []
|
segbits = []
|
||||||
|
|
||||||
with open(segbits_file, "r") as fp:
|
with OpenSafeFile(segbits_file, "r") as fp:
|
||||||
for line in fp.readlines():
|
for line in fp.readlines():
|
||||||
line = line.split()
|
line = line.split()
|
||||||
if len(line) > 1:
|
if len(line) > 1:
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,8 @@ import argparse
|
||||||
import re
|
import re
|
||||||
import itertools
|
import itertools
|
||||||
|
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -34,7 +36,7 @@ def load_tag_groups(file_name):
|
||||||
tag_groups = []
|
tag_groups = []
|
||||||
|
|
||||||
# Load tag group specifications
|
# Load tag group specifications
|
||||||
with open(file_name, "r") as fp:
|
with OpenSafeFile(file_name, "r") as fp:
|
||||||
for line in fp:
|
for line in fp:
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
|
|
||||||
|
|
@ -89,7 +91,7 @@ def load_segbits(file_name):
|
||||||
|
|
||||||
segbits = {}
|
segbits = {}
|
||||||
|
|
||||||
with open(file_name, "r") as fp:
|
with OpenSafeFile(file_name, "r") as fp:
|
||||||
for line in fp:
|
for line in fp:
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
fields = line.split()
|
fields = line.split()
|
||||||
|
|
@ -114,7 +116,7 @@ def save_segbits(file_name, segbits):
|
||||||
Save segbits to a .db or .rdb file
|
Save segbits to a .db or .rdb file
|
||||||
"""
|
"""
|
||||||
|
|
||||||
with open(file_name, "w") as fp:
|
with OpenSafeFile(file_name, "w") as fp:
|
||||||
for tag, bits in segbits.items():
|
for tag, bits in segbits.items():
|
||||||
|
|
||||||
if isinstance(bits, str):
|
if isinstance(bits, str):
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@
|
||||||
# SPDX-License-Identifier: ISC
|
# SPDX-License-Identifier: ISC
|
||||||
|
|
||||||
import sys, os, re
|
import sys, os, re
|
||||||
from prjxray import util
|
from prjxray.util import OpenSafeFile, parse_db_lines, write_db_lines
|
||||||
|
|
||||||
|
|
||||||
def index_masks(fn_in, groups_in):
|
def index_masks(fn_in, groups_in):
|
||||||
|
|
@ -21,7 +21,7 @@ def index_masks(fn_in, groups_in):
|
||||||
groups[group] = set()
|
groups[group] = set()
|
||||||
|
|
||||||
# Index bits
|
# Index bits
|
||||||
for line, (tag, bits, mode) in util.parse_db_lines(fn_in):
|
for line, (tag, bits, mode) in parse_db_lines(fn_in):
|
||||||
assert not mode, "Unresolved tag: %s" % (line, )
|
assert not mode, "Unresolved tag: %s" % (line, )
|
||||||
prefix = tag[0:tag.rfind(".")]
|
prefix = tag[0:tag.rfind(".")]
|
||||||
group = groups.get(prefix, None)
|
group = groups.get(prefix, None)
|
||||||
|
|
@ -42,7 +42,7 @@ def index_masks(fn_in, groups_in):
|
||||||
def apply_masks(fn_in, groups):
|
def apply_masks(fn_in, groups):
|
||||||
"""Add 0 entries ("!") to .db entries based on groups definition"""
|
"""Add 0 entries ("!") to .db entries based on groups definition"""
|
||||||
new_db = {}
|
new_db = {}
|
||||||
for line, (tag, bits, mode) in util.parse_db_lines(fn_in):
|
for line, (tag, bits, mode) in parse_db_lines(fn_in):
|
||||||
assert not mode, "Unresolved tag: %s" % (line, )
|
assert not mode, "Unresolved tag: %s" % (line, )
|
||||||
prefix = tag[0:tag.rfind(".")]
|
prefix = tag[0:tag.rfind(".")]
|
||||||
group = groups.get(prefix, None)
|
group = groups.get(prefix, None)
|
||||||
|
|
@ -58,7 +58,8 @@ def apply_masks(fn_in, groups):
|
||||||
|
|
||||||
def load_groups(fn):
|
def load_groups(fn):
|
||||||
ret = []
|
ret = []
|
||||||
for l in open(fn, "r"):
|
with OpenSafeFile(fn, "r") as f:
|
||||||
|
for l in f:
|
||||||
ret.append(l.strip())
|
ret.append(l.strip())
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
@ -67,7 +68,7 @@ def run(fn_in, fn_out, groups_fn, verbose=False):
|
||||||
groups_in = load_groups(groups_fn)
|
groups_in = load_groups(groups_fn)
|
||||||
groups = index_masks(fn_in, groups_in)
|
groups = index_masks(fn_in, groups_in)
|
||||||
new_db = apply_masks(fn_in, groups)
|
new_db = apply_masks(fn_in, groups)
|
||||||
util.write_db_lines(fn_out, new_db)
|
write_db_lines(fn_out, new_db)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
|
||||||
|
|
@ -15,6 +15,7 @@ import os
|
||||||
import parse as format_parser
|
import parse as format_parser
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
"""Module for generating the Info.md file found in the database directory."""
|
"""Module for generating the Info.md file found in the database directory."""
|
||||||
|
|
||||||
info_md_header = """
|
info_md_header = """
|
||||||
|
|
|
||||||
|
|
@ -23,6 +23,8 @@ import re
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
|
|
@ -49,7 +51,7 @@ def main():
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
# Load pin dump
|
# Load pin dump
|
||||||
with open(args.csv, "r") as fp:
|
with OpenSafeFile(args.csv, "r") as fp:
|
||||||
pin_dump = list(csv.DictReader(fp))
|
pin_dump = list(csv.DictReader(fp))
|
||||||
|
|
||||||
# Group pins into ports
|
# Group pins into ports
|
||||||
|
|
@ -103,7 +105,7 @@ def main():
|
||||||
port["width"] += 1
|
port["width"] += 1
|
||||||
|
|
||||||
# Write pin ports to a JSON file
|
# Write pin ports to a JSON file
|
||||||
with open(args.json, "w") as fp:
|
with OpenSafeFile(args.json, "w") as fp:
|
||||||
json.dump(ports, fp, indent=1, sort_keys=True)
|
json.dump(ports, fp, indent=1, sort_keys=True)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,7 @@
|
||||||
import json
|
import json
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
def get_elems_count(timings, slice, site, bel_type):
|
def get_elems_count(timings, slice, site, bel_type):
|
||||||
combinational = 0
|
combinational = 0
|
||||||
|
|
@ -113,7 +114,7 @@ def produce_sdf(timings, outdir):
|
||||||
"""
|
"""
|
||||||
)"""
|
)"""
|
||||||
|
|
||||||
with open(outdir + '/' + slice + '.sdf', "w") as fp:
|
with OpenSafeFile(outdir + '/' + slice + '.sdf', "w") as fp:
|
||||||
fp.write(sdf)
|
fp.write(sdf)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -125,7 +126,7 @@ def main():
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
with open(args.json, 'r') as fp:
|
with OpenSafeFile(args.json, 'r') as fp:
|
||||||
timings = json.load(fp)
|
timings = json.load(fp)
|
||||||
|
|
||||||
produce_sdf(timings, args.sdf)
|
produce_sdf(timings, args.sdf)
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,8 @@ import sys
|
||||||
import argparse
|
import argparse
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -32,7 +34,7 @@ def load_just_bits(file_name):
|
||||||
Read bits from a .db or .rdb file. Ignores tags and bit values.
|
Read bits from a .db or .rdb file. Ignores tags and bit values.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
with open(file_name, "r") as fp:
|
with OpenSafeFile(file_name, "r") as fp:
|
||||||
lines = fp.readlines()
|
lines = fp.readlines()
|
||||||
|
|
||||||
bits = set()
|
bits = set()
|
||||||
|
|
|
||||||
|
|
@ -10,11 +10,12 @@
|
||||||
# SPDX-License-Identifier: ISC
|
# SPDX-License-Identifier: ISC
|
||||||
|
|
||||||
import sys, re
|
import sys, re
|
||||||
from prjxray import util
|
from prjxray.util import OpenSafeFile, db_root_arg, parse_db_line
|
||||||
|
|
||||||
|
|
||||||
def run(fnin, fnout=None, strict=False, verbose=False):
|
def run(fnin, fnout=None, strict=False, verbose=False):
|
||||||
lines = open(fnin, 'r').read().split('\n')
|
with OpenSafeFile(fnin) as f:
|
||||||
|
lines = f.read().split('\n')
|
||||||
tags = dict()
|
tags = dict()
|
||||||
bitss = dict()
|
bitss = dict()
|
||||||
for line in lines:
|
for line in lines:
|
||||||
|
|
@ -24,7 +25,7 @@ def run(fnin, fnout=None, strict=False, verbose=False):
|
||||||
# TODO: figure out what to do with masks
|
# TODO: figure out what to do with masks
|
||||||
if line.startswith("bit "):
|
if line.startswith("bit "):
|
||||||
continue
|
continue
|
||||||
tag, bits, mode, _ = util.parse_db_line(line)
|
tag, bits, mode, _ = parse_db_line(line)
|
||||||
if strict:
|
if strict:
|
||||||
if mode != "always":
|
if mode != "always":
|
||||||
assert not mode, "strict: got ill defined line: %s" % (line, )
|
assert not mode, "strict: got ill defined line: %s" % (line, )
|
||||||
|
|
@ -39,7 +40,7 @@ def run(fnin, fnout=None, strict=False, verbose=False):
|
||||||
bitss[bits] = tag
|
bitss[bits] = tag
|
||||||
|
|
||||||
if fnout:
|
if fnout:
|
||||||
with open(fnout, "w") as fout:
|
with OpenSafeFile(fnout, "w") as fout:
|
||||||
for line in sorted(lines):
|
for line in sorted(lines):
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if line == '':
|
if line == '':
|
||||||
|
|
@ -53,7 +54,7 @@ def main():
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Parse a db file, checking for consistency")
|
description="Parse a db file, checking for consistency")
|
||||||
|
|
||||||
util.db_root_arg(parser)
|
db_root_arg(parser)
|
||||||
parser.add_argument('--verbose', action='store_true', help='')
|
parser.add_argument('--verbose', action='store_true', help='')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--strict',
|
'--strict',
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,9 @@
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from sdf_timing import sdfparse
|
from sdf_timing import sdfparse
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
|
|
||||||
def merge(timings_list, site):
|
def merge(timings_list, site):
|
||||||
|
|
@ -59,15 +61,16 @@ def main():
|
||||||
timings_list = list()
|
timings_list = list()
|
||||||
|
|
||||||
for sdf in args.sdfs:
|
for sdf in args.sdfs:
|
||||||
with open(sdf, 'r') as fp:
|
with OpenSafeFile(sdf, 'r') as fp:
|
||||||
timing = sdfparse.parse(fp.read())
|
timing = sdfparse.parse(fp.read())
|
||||||
timings_list.append(timing)
|
timings_list.append(timing)
|
||||||
|
|
||||||
merged_sdf = merge(timings_list, args.site)
|
merged_sdf = merge(timings_list, args.site)
|
||||||
open(args.out, 'w').write(sdfparse.emit(merged_sdf, timescale='1ns'))
|
with OpenSafeFile(args.out, 'w') as fp:
|
||||||
|
fp.write(sdfparse.emit(merged_sdf, timescale='1ns'))
|
||||||
|
|
||||||
if args.json is not None:
|
if args.json is not None:
|
||||||
with open(args.json, 'w') as fp:
|
with OpenSafeFile(args.json, 'w') as fp:
|
||||||
json.dump(merged_sdf, fp, indent=4, sort_keys=True)
|
json.dump(merged_sdf, fp, indent=4, sort_keys=True)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -19,7 +19,7 @@ import sys, os, json, re
|
||||||
import copy
|
import copy
|
||||||
from prjxray import bitstream
|
from prjxray import bitstream
|
||||||
from prjxray import db as prjxraydb
|
from prjxray import db as prjxraydb
|
||||||
from prjxray import util
|
from prjxray.util import OpenSafeFile, parse_tagbit, db_root_arg, part_arg
|
||||||
|
|
||||||
|
|
||||||
class NoDB(Exception):
|
class NoDB(Exception):
|
||||||
|
|
@ -39,7 +39,7 @@ def process_db(db, tile_type, process, verbose):
|
||||||
verbose and print("process_db(%s): %s" % (tile_type, fns))
|
verbose and print("process_db(%s): %s" % (tile_type, fns))
|
||||||
for fn in fns:
|
for fn in fns:
|
||||||
if fn:
|
if fn:
|
||||||
with open(fn, "r") as f:
|
with OpenSafeFile(fn, "r") as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
process(line)
|
process(line)
|
||||||
|
|
||||||
|
|
@ -61,7 +61,7 @@ def get_database(db, tile_type, bit_only=False, verbose=False):
|
||||||
return
|
return
|
||||||
tagbits = []
|
tagbits = []
|
||||||
else:
|
else:
|
||||||
tagbits = [util.parse_tagbit(x) for x in parts[1:]]
|
tagbits = [parse_tagbit(x) for x in parts[1:]]
|
||||||
|
|
||||||
tags.append(list([name] + tagbits))
|
tags.append(list([name] + tagbits))
|
||||||
|
|
||||||
|
|
@ -430,7 +430,7 @@ def tile_segnames(tiles):
|
||||||
|
|
||||||
def load_tiles(db_root, part):
|
def load_tiles(db_root, part):
|
||||||
# TODO: Migrate to new tilegrid format via library.
|
# TODO: Migrate to new tilegrid format via library.
|
||||||
with open("%s/%s/tilegrid.json" % (db_root, part), "r") as f:
|
with OpenSafeFile("%s/%s/tilegrid.json" % (db_root, part), "r") as f:
|
||||||
tiles = json.load(f)
|
tiles = json.load(f)
|
||||||
return tiles
|
return tiles
|
||||||
|
|
||||||
|
|
@ -449,7 +449,8 @@ def run(
|
||||||
db = prjxraydb.Database(db_root, part)
|
db = prjxraydb.Database(db_root, part)
|
||||||
tiles = load_tiles(db_root, part)
|
tiles = load_tiles(db_root, part)
|
||||||
segments = mk_segments(tiles)
|
segments = mk_segments(tiles)
|
||||||
bitdata = bitstream.load_bitdata2(open(bits_file, "r"))
|
with OpenSafeFile(bits_file) as f:
|
||||||
|
bitdata = bitstream.load_bitdata2(f)
|
||||||
|
|
||||||
if flag_unknown_bits:
|
if flag_unknown_bits:
|
||||||
print_unknown_bits(tiles, bitdata)
|
print_unknown_bits(tiles, bitdata)
|
||||||
|
|
@ -486,8 +487,8 @@ def main():
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Decode bits within a tile's address space")
|
description="Decode bits within a tile's address space")
|
||||||
|
|
||||||
util.db_root_arg(parser)
|
db_root_arg(parser)
|
||||||
util.part_arg(parser)
|
part_arg(parser)
|
||||||
parser.add_argument('--verbose', action='store_true', help='')
|
parser.add_argument('--verbose', action='store_true', help='')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-z',
|
'-z',
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,8 @@ import sys
|
||||||
|
|
||||||
import itertools
|
import itertools
|
||||||
|
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -69,7 +71,7 @@ def load_and_sort_segbits(file_name, tagmap=lambda tag: tag):
|
||||||
|
|
||||||
# Load segbits
|
# Load segbits
|
||||||
segbits = {}
|
segbits = {}
|
||||||
with open(file_name, "r") as fp:
|
with OpenSafeFile(file_name, "r") as fp:
|
||||||
lines = fp.readlines()
|
lines = fp.readlines()
|
||||||
|
|
||||||
# Parse lines
|
# Parse lines
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,7 @@
|
||||||
import sys, os, json
|
import sys, os, json
|
||||||
import pickle
|
import pickle
|
||||||
|
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
class MergeFind:
|
class MergeFind:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
@ -34,11 +35,11 @@ class MergeFind:
|
||||||
def db_gen():
|
def db_gen():
|
||||||
print("Reading database..")
|
print("Reading database..")
|
||||||
|
|
||||||
with open("%s/%s/tilegrid.json" % (os.getenv("XRAY_DATABASE_DIR"),
|
with OpenSafeFile("%s/%s/tilegrid.json" % (os.getenv("XRAY_DATABASE_DIR"),
|
||||||
os.getenv("XRAY_DATABASE")), "r") as f:
|
os.getenv("XRAY_DATABASE")), "r") as f:
|
||||||
tilegrid = json.load(f)
|
tilegrid = json.load(f)
|
||||||
|
|
||||||
with open("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
|
with OpenSafeFile("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
|
||||||
os.getenv("XRAY_DATABASE")), "r") as f:
|
os.getenv("XRAY_DATABASE")), "r") as f:
|
||||||
tileconn = json.load(f)
|
tileconn = json.load(f)
|
||||||
|
|
||||||
|
|
@ -81,7 +82,7 @@ def db_gen():
|
||||||
reverse_node_node = dict()
|
reverse_node_node = dict()
|
||||||
|
|
||||||
for tile_type in ["int_l", "int_r"]:
|
for tile_type in ["int_l", "int_r"]:
|
||||||
with open("%s/%s/segbits_%s.db" %
|
with OpenSafeFile("%s/%s/segbits_%s.db" %
|
||||||
(os.getenv("XRAY_DATABASE_DIR"), os.getenv("XRAY_DATABASE"),
|
(os.getenv("XRAY_DATABASE_DIR"), os.getenv("XRAY_DATABASE"),
|
||||||
tile_type), "r") as f:
|
tile_type), "r") as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
|
|
|
||||||
|
|
@ -65,8 +65,8 @@ import sys
|
||||||
import json
|
import json
|
||||||
import utils.xjson as xjson
|
import utils.xjson as xjson
|
||||||
import utils.cmp as cmp
|
import utils.cmp as cmp
|
||||||
from prjxray.util import lock_file, unlock_file
|
|
||||||
|
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
def split_all(s, chars):
|
def split_all(s, chars):
|
||||||
"""Split on multiple character values.
|
"""Split on multiple character values.
|
||||||
|
|
@ -323,7 +323,8 @@ def sort_db(pathname):
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
lines = open(pathname).readlines()
|
with OpenSafeFile(pathname) as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
|
||||||
tosort = []
|
tosort = []
|
||||||
for l in lines:
|
for l in lines:
|
||||||
|
|
@ -334,22 +335,10 @@ def sort_db(pathname):
|
||||||
|
|
||||||
tosort.sort(key=cmp.cmp_key)
|
tosort.sort(key=cmp.cmp_key)
|
||||||
|
|
||||||
# Make sure the sort is stable
|
|
||||||
#for i in range(0, 4):
|
|
||||||
# copy = tosort.copy()
|
|
||||||
# random.shuffle(copy)
|
|
||||||
# copy.sort(key=cmp.cmp_key)
|
|
||||||
# assert len(copy) == len(tosort)
|
|
||||||
# for i in range(0, len(copy)):
|
|
||||||
# assert copy[i] == tosort[i], "\n%r\n != \n%r\n" % (
|
|
||||||
# copy[i], tosort[i])
|
|
||||||
|
|
||||||
with open(pathname, 'w') as f:
|
with open(pathname, 'w') as f:
|
||||||
lock_file(f, 10)
|
|
||||||
for _, l in tosort:
|
for _, l in tosort:
|
||||||
f.write(l)
|
f.write(l)
|
||||||
f.write('\n')
|
f.write('\n')
|
||||||
unlock_file(f)
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
@ -387,13 +376,15 @@ def sort_csv(pathname):
|
||||||
|
|
||||||
def sort_json(filename):
|
def sort_json(filename):
|
||||||
"""Sort a XXX.json file."""
|
"""Sort a XXX.json file."""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
d = json.load(open(filename))
|
with OpenSafeFile(filename) as f:
|
||||||
|
d = json.load(f)
|
||||||
except json.JSONDecodeError as e:
|
except json.JSONDecodeError as e:
|
||||||
print(e)
|
print(e)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
with open(filename, 'w') as f:
|
with OpenSafeFile(filename, 'w') as f:
|
||||||
xjson.pprint(f, d)
|
xjson.pprint(f, d)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
@ -401,17 +392,15 @@ def sort_json(filename):
|
||||||
|
|
||||||
def sort_db_text(n):
|
def sort_db_text(n):
|
||||||
rows = []
|
rows = []
|
||||||
with open(n) as f:
|
with OpenSafeFile(n) as f:
|
||||||
for l in f:
|
for l in f:
|
||||||
rows.append(([extract_num(s) for s in l.split()], l))
|
rows.append(([extract_num(s) for s in l.split()], l))
|
||||||
|
|
||||||
rows.sort(key=lambda i: i[0])
|
rows.sort(key=lambda i: i[0])
|
||||||
|
|
||||||
with open(n, 'w') as f:
|
with OpenSafeFile(n, 'w') as f:
|
||||||
lock_file(f, 10)
|
|
||||||
for l in rows:
|
for l in rows:
|
||||||
f.write(l[-1])
|
f.write(l[-1])
|
||||||
unlock_file(f)
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -25,6 +25,8 @@ a valid bitstream.
|
||||||
import argparse
|
import argparse
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
|
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
conf_regs = {
|
conf_regs = {
|
||||||
0: "CRC",
|
0: "CRC",
|
||||||
1: "FAR_MAJ",
|
1: "FAR_MAJ",
|
||||||
|
|
@ -124,7 +126,7 @@ class Bitstream:
|
||||||
self.curr_fdri_write_len = 0
|
self.curr_fdri_write_len = 0
|
||||||
self.curr_crc_check = 0
|
self.curr_crc_check = 0
|
||||||
self.fdri_in_progress = False
|
self.fdri_in_progress = False
|
||||||
with open(file_name, "rb") as f:
|
with OpenSafeFile(file_name, "rb") as f:
|
||||||
self.bytes = f.read()
|
self.bytes = f.read()
|
||||||
pos, self.header = self.get_header()
|
pos, self.header = self.get_header()
|
||||||
self.body = [
|
self.body = [
|
||||||
|
|
@ -395,7 +397,7 @@ class Bitstream:
|
||||||
else:
|
else:
|
||||||
frame_stream.write(
|
frame_stream.write(
|
||||||
"#{:3}:{:6},".format(i % 65, hex(self.frame_data[i])))
|
"#{:3}:{:6},".format(i % 65, hex(self.frame_data[i])))
|
||||||
with open(file_name, "w") as f:
|
with OpenSafeFile(file_name, "w") as f:
|
||||||
print(frame_stream.getvalue(), file=f)
|
print(frame_stream.getvalue(), file=f)
|
||||||
|
|
||||||
def write_frames(self, file_name):
|
def write_frames(self, file_name):
|
||||||
|
|
@ -409,7 +411,7 @@ class Bitstream:
|
||||||
frame_stream.write("\n")
|
frame_stream.write("\n")
|
||||||
elif i < len(self.frame_data) - 1:
|
elif i < len(self.frame_data) - 1:
|
||||||
frame_stream.write(",")
|
frame_stream.write(",")
|
||||||
with open(file_name, "w") as f:
|
with OpenSafeFile(file_name, "w") as f:
|
||||||
print(frame_stream.getvalue(), file=f)
|
print(frame_stream.getvalue(), file=f)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -19,6 +19,7 @@ import tempfile
|
||||||
|
|
||||||
import prjxray
|
import prjxray
|
||||||
import utils.fasm2frames as fasm2frames
|
import utils.fasm2frames as fasm2frames
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
from textx.exceptions import TextXSyntaxError
|
from textx.exceptions import TextXSyntaxError
|
||||||
|
|
||||||
|
|
@ -69,7 +70,7 @@ class TestStringMethods(unittest.TestCase):
|
||||||
return os.path.join(os.path.dirname(__file__), 'test_data', fname)
|
return os.path.join(os.path.dirname(__file__), 'test_data', fname)
|
||||||
|
|
||||||
def get_test_data(self, fname):
|
def get_test_data(self, fname):
|
||||||
with open(self.filename_test_data(fname)) as f:
|
with OpenSafeFile(self.filename_test_data(fname)) as f:
|
||||||
return f.read()
|
return f.read()
|
||||||
|
|
||||||
def fasm2frames(self, fin_data, **kw):
|
def fasm2frames(self, fin_data, **kw):
|
||||||
|
|
|
||||||
|
|
@ -14,13 +14,14 @@
|
||||||
|
|
||||||
import os, sys, json
|
import os, sys, json
|
||||||
|
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
with open("%s/%s/tilegrid.json" % (os.getenv("XRAY_DATABASE_DIR"),
|
with OpenSafeFile("%s/%s/tilegrid.json" % (os.getenv("XRAY_DATABASE_DIR"),
|
||||||
os.getenv("XRAY_DATABASE")), "r") as f:
|
os.getenv("XRAY_DATABASE")), "r") as f:
|
||||||
tilegrid = json.load(f)
|
tilegrid = json.load(f)
|
||||||
|
|
||||||
with open("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
|
with OpenSafeFile("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
|
||||||
os.getenv("XRAY_DATABASE")), "r") as f:
|
os.getenv("XRAY_DATABASE")), "r") as f:
|
||||||
tileconn = json.load(f)
|
tileconn = json.load(f)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -11,13 +11,14 @@
|
||||||
|
|
||||||
import os, sys, json
|
import os, sys, json
|
||||||
|
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
def main(argv):
|
def main(argv):
|
||||||
if len(argv) != 3:
|
if len(argv) != 3:
|
||||||
print("Usage example: python3 %s HCLK_R HCLK_SW6E3" % sys.argv[0])
|
print("Usage example: python3 %s HCLK_R HCLK_SW6E3" % sys.argv[0])
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
with open("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
|
with OpenSafeFile("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
|
||||||
os.getenv("XRAY_DATABASE")), "r") as f:
|
os.getenv("XRAY_DATABASE")), "r") as f:
|
||||||
tileconn = json.load(f)
|
tileconn = json.load(f)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ import os
|
||||||
import re
|
import re
|
||||||
import tempfile
|
import tempfile
|
||||||
import json
|
import json
|
||||||
from prjxray import util
|
from prjxray.util import OpenSafeFile, db_root_arg, get_parts, set_part_resources
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
@ -31,7 +31,7 @@ def main():
|
||||||
'family',
|
'family',
|
||||||
help="Name of the device family.",
|
help="Name of the device family.",
|
||||||
choices=['artix7', 'kintex7', 'zynq7', 'spartan7'])
|
choices=['artix7', 'kintex7', 'zynq7', 'spartan7'])
|
||||||
util.db_root_arg(parser)
|
db_root_arg(parser)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
|
|
@ -40,7 +40,7 @@ def main():
|
||||||
os.getenv('XRAY_DIR'), 'settings', args.family)
|
os.getenv('XRAY_DIR'), 'settings', args.family)
|
||||||
information = {}
|
information = {}
|
||||||
|
|
||||||
parts = util.get_parts(args.db_root)
|
parts = get_parts(args.db_root)
|
||||||
processed_parts = dict()
|
processed_parts = dict()
|
||||||
for part in parts.keys():
|
for part in parts.keys():
|
||||||
# Skip parts which differ only in the speedgrade, as they have the same pins
|
# Skip parts which differ only in the speedgrade, as they have the same pins
|
||||||
|
|
@ -64,7 +64,7 @@ def main():
|
||||||
cwd=cwd,
|
cwd=cwd,
|
||||||
stdout=subprocess.PIPE)
|
stdout=subprocess.PIPE)
|
||||||
|
|
||||||
with open(tmp_file, "r") as fp:
|
with OpenSafeFile(tmp_file, "r") as fp:
|
||||||
pins_json = json.load(fp)
|
pins_json = json.load(fp)
|
||||||
|
|
||||||
os.remove(tmp_file)
|
os.remove(tmp_file)
|
||||||
|
|
@ -81,7 +81,7 @@ def main():
|
||||||
processed_parts[common_part] = {'pins': pins}
|
processed_parts[common_part] = {'pins': pins}
|
||||||
|
|
||||||
# Overwrites the <family>/resources.yaml file completly with new data
|
# Overwrites the <family>/resources.yaml file completly with new data
|
||||||
util.set_part_resources(resource_path, information)
|
set_part_resources(resource_path, information)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,7 @@ import multiprocessing
|
||||||
import pyjson5 as json5
|
import pyjson5 as json5
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
from prjxray import util
|
from prjxray.util import OpenSafeFile, db_root_arg, part_arg
|
||||||
|
|
||||||
|
|
||||||
def full_wire_name(wire_in_grid):
|
def full_wire_name(wire_in_grid):
|
||||||
|
|
@ -64,7 +64,7 @@ def make_connections(db_root, part):
|
||||||
|
|
||||||
|
|
||||||
def read_json5(fname):
|
def read_json5(fname):
|
||||||
with open(fname, 'r') as f:
|
with OpenSafeFile(fname, 'r') as f:
|
||||||
return json5.load(f)
|
return json5.load(f)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -72,8 +72,8 @@ def main():
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Tests database against raw node list.")
|
description="Tests database against raw node list.")
|
||||||
|
|
||||||
util.db_root_arg(parser)
|
db_root_arg(parser)
|
||||||
util.part_arg(parser)
|
part_arg(parser)
|
||||||
parser.add_argument('--raw_node_root', required=True)
|
parser.add_argument('--raw_node_root', required=True)
|
||||||
parser.add_argument('--error_nodes', default="error_nodes.json")
|
parser.add_argument('--error_nodes', default="error_nodes.json")
|
||||||
parser.add_argument('--ignored_wires')
|
parser.add_argument('--ignored_wires')
|
||||||
|
|
@ -109,7 +109,7 @@ def main():
|
||||||
|
|
||||||
if len(error_nodes) > 0:
|
if len(error_nodes) > 0:
|
||||||
if args.ignored_wires:
|
if args.ignored_wires:
|
||||||
with open(args.ignored_wires, 'r') as f:
|
with OpenSafeFile(args.ignored_wires, 'r') as f:
|
||||||
ignored_wires = [l.strip() for l in f.readlines()]
|
ignored_wires = [l.strip() for l in f.readlines()]
|
||||||
|
|
||||||
print(
|
print(
|
||||||
|
|
@ -119,7 +119,7 @@ def main():
|
||||||
args.error_nodes,
|
args.error_nodes,
|
||||||
))
|
))
|
||||||
|
|
||||||
with open(args.error_nodes, 'w') as f:
|
with OpenSafeFile(args.error_nodes, 'w') as f:
|
||||||
json.dump(error_nodes, f, indent=2)
|
json.dump(error_nodes, f, indent=2)
|
||||||
|
|
||||||
if not args.ignored_wires:
|
if not args.ignored_wires:
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@
|
||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
from prjxray.xjson import pprint
|
from prjxray.xjson import pprint
|
||||||
|
from prjxray.util import OpenSafeFile
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if len(sys.argv) == 1:
|
if len(sys.argv) == 1:
|
||||||
|
|
@ -18,5 +19,6 @@ if __name__ == "__main__":
|
||||||
doctest.testmod()
|
doctest.testmod()
|
||||||
else:
|
else:
|
||||||
assert len(sys.argv) == 2
|
assert len(sys.argv) == 2
|
||||||
d = json.load(open(sys.argv[1]))
|
with OpenSafeFile(sys.argv[1]) as f:
|
||||||
|
d = json.load(f)
|
||||||
pprint(sys.stdout, d)
|
pprint(sys.stdout, d)
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue