mirror of
https://gitlab.com/wgp/dougal/software.git
synced 2025-12-06 12:47:08 +00:00
We do this so that we can look for the "saillineOffset" parameter, which we expect to be present in source preplot imports and allows us to correlate source and sail lines. The change to bin/sps.py is necessary to let the JSON serialisation take place.
52 lines
1.1 KiB
Python
52 lines
1.1 KiB
Python
#!/usr/bin/python3
|
|
|
|
"""
|
|
SPS importing functions.
|
|
|
|
And by SPS, we mean more or less any line-delimited, fixed-width record format.
|
|
"""
|
|
|
|
import builtins
|
|
from parse_fwr import parse_fwr
|
|
|
|
def int (v):
|
|
return builtins.int(float(v))
|
|
|
|
def parse_line (string, spec):
|
|
"""Parse a line from an SPS file."""
|
|
names = spec["names"]
|
|
widths = spec["widths"]
|
|
normalisers = spec["normalisers"]
|
|
record = [ t[0](t[1]) for t in zip(normalisers, parse_fwr(string, widths)) ]
|
|
return dict(zip(names, record))
|
|
|
|
def from_file(path, spec = None):
|
|
if spec is None:
|
|
spec = {
|
|
"names": [ "line_name", "point_number", "easting", "northing" ],
|
|
"widths": [ -1, 10, 10, -25, 10, 10 ],
|
|
"normalisers": [ int, int, float, float ]
|
|
}
|
|
else:
|
|
normaliser_tokens = [ "int", "float", "str", "bool" ]
|
|
spec["normalisers"] = [ eval(t) for t in spec["types"] if t in normaliser_tokens ]
|
|
|
|
records = []
|
|
with open(path) as fd:
|
|
cnt = 0
|
|
line = fd.readline()
|
|
while line:
|
|
cnt = cnt+1
|
|
|
|
if line == "EOF":
|
|
break
|
|
|
|
record = parse_line(line, spec)
|
|
if record is not None:
|
|
records.append(record)
|
|
|
|
line = fd.readline()
|
|
|
|
del spec["normalisers"]
|
|
return records
|