Files
dougal-software/bin/import_final_p190.py
2020-08-08 23:59:13 +02:00

73 lines
1.8 KiB
Python
Executable File

#!/usr/bin/python3
"""
Import final p190.
For each survey in configuration.surveys(), check for new
or modified final P1/90 files and (re-)import them into the
database.
"""
import os
from glob import glob
import re
import configuration
import p190
from datastore import Datastore
if __name__ == '__main__':
print("Reading configuration")
surveys = configuration.surveys()
print("Connecting to database")
db = Datastore()
db.connect()
print("Reading surveys")
for survey in surveys:
print(f'Survey: {survey["id"]} ({survey["schema"]})')
db.set_survey(survey["schema"])
final_p190 = survey["final"]["p190"]
pattern = final_p190["pattern"]
rx = re.compile(pattern["regex"])
for fileprefix in final_p190["paths"]:
print(f"Path prefix: {fileprefix}")
for globspec in final_p190["globs"]:
fullglob = os.path.join(fileprefix, globspec)
for filepath in glob(fullglob):
print(f"Found {filepath}")
if not db.file_in_db(filepath):
print("Importing")
match = rx.match(os.path.basename(filepath))
file_info = dict(zip(pattern["captures"], match.groups()))
p190_data = p190.from_file(filepath, with_objrefs=True)
p190_data_timestamped = p190.apply_tstamps(
p190_data,
final_p190["timestamp_format"],
fix_bad_seconds=True
)
p190_data_normalised = p190.normalise(p190_data_timestamped)
print("Saving")
p190_records = p190.p190_type("S", p190_data_normalised)
file_data = dict()
file_data["offsets_p190"] = [h["description"].strip().split(" to ")+h["data"].split() for h in p190_data_normalised if h["record_type"] == "H" and h["header_type"] == "09"]
db.save_final_p190(p190_records, file_info, filepath, survey["epsg"])
else:
print("Already in DB")
print("Done")