mirror of
https://gitlab.com/wgp/dougal/software.git
synced 2025-12-06 06:47:07 +00:00
@@ -256,6 +256,62 @@ class Datastore:
|
||||
|
||||
self.maybe_commit()
|
||||
|
||||
|
||||
def save_preplot_line_info(self, lines, filepath, filedata = None):
|
||||
"""
|
||||
Save preplot line information
|
||||
|
||||
Arguments:
|
||||
|
||||
lines (iterable): should be a collection of lines returned from
|
||||
one of the line info reading functions (see preplots.py).
|
||||
|
||||
filepath (string): the full path to the preplot file from where the lines
|
||||
have been read. It will be added to the survey's `file` table so that
|
||||
it can be monitored for changes.
|
||||
"""
|
||||
|
||||
with self.conn.cursor() as cursor:
|
||||
cursor.execute("BEGIN;")
|
||||
|
||||
hash = self.add_file(filepath, cursor)
|
||||
count=0
|
||||
for line in lines:
|
||||
count += 1
|
||||
print(f"\u001b[2KSaving line {count} / {len(lines)}", end="\r", flush=True)
|
||||
|
||||
sail_line = line["sail_line"]
|
||||
incr = line.get("incr", True)
|
||||
ntba = line.get("ntba", False)
|
||||
remarks = line.get("remarks", None)
|
||||
meta = json.dumps(line.get("meta", {}))
|
||||
source_lines = line.get("source_line", [])
|
||||
|
||||
for source_line in source_lines:
|
||||
qry = """
|
||||
INSERT INTO preplot_saillines AS ps
|
||||
(sailline, line, sailline_class, line_class, incr, ntba, remarks, meta, hash)
|
||||
VALUES
|
||||
(%s, %s, 'V', 'S', %s, %s, %s, %s, %s)
|
||||
ON CONFLICT (sailline, sailline_class, line, line_class, incr) DO UPDATE
|
||||
SET
|
||||
incr = EXCLUDED.incr,
|
||||
ntba = EXCLUDED.ntba,
|
||||
remarks = COALESCE(EXCLUDED.remarks, ps.remarks),
|
||||
meta = ps.meta || EXCLUDED.meta,
|
||||
hash = EXCLUDED.hash;
|
||||
"""
|
||||
|
||||
# NOTE Consider using cursor.executemany() instead. Then again,
|
||||
# we're only expecting a few hundred lines at most.
|
||||
cursor.execute(qry, (sail_line, source_line, incr, ntba, remarks, meta, hash))
|
||||
|
||||
if filedata is not None:
|
||||
self.save_file_data(filepath, json.dumps(filedata), cursor)
|
||||
|
||||
self.maybe_commit()
|
||||
|
||||
|
||||
def save_raw_p190(self, records, fileinfo, filepath, epsg = 0, filedata = None, ntbp = False):
|
||||
"""
|
||||
Save raw P1 data.
|
||||
|
||||
@@ -15,6 +15,12 @@ import configuration
|
||||
import preplots
|
||||
from datastore import Datastore
|
||||
|
||||
def preplots_sorter (preplot):
|
||||
rank = {
|
||||
"x-sl+csv": 10
|
||||
}
|
||||
return rank.get(preplot.get("type"), 0)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Connecting to database")
|
||||
@@ -28,7 +34,10 @@ if __name__ == '__main__':
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
db.set_survey(survey["schema"])
|
||||
for file in survey["preplots"]:
|
||||
|
||||
# We sort the preplots so that ancillary line info always comes last,
|
||||
# after the actual line + point data has been imported
|
||||
for file in sorted(survey["preplots"], key=preplots_sorter):
|
||||
realpath = configuration.translate_path(file["path"])
|
||||
|
||||
print(f"Preplot: {file['path']}")
|
||||
@@ -48,7 +57,10 @@ if __name__ == '__main__':
|
||||
|
||||
if type(preplot) is list:
|
||||
print("Saving to DB")
|
||||
db.save_preplots(preplot, file["path"], file["class"], survey["epsg"], file)
|
||||
if file.get("type") == "x-sl+csv":
|
||||
db.save_preplot_line_info(preplot, file["path"], file)
|
||||
else:
|
||||
db.save_preplots(preplot, file["path"], file["class"], survey["epsg"], file)
|
||||
elif type(preplot) is str:
|
||||
print(preplot)
|
||||
else:
|
||||
|
||||
Reference in New Issue
Block a user