2020-08-08 23:59:13 +02:00
|
|
|
|
import os
|
|
|
|
|
|
import json
|
|
|
|
|
|
import psycopg2
|
|
|
|
|
|
import configuration
|
|
|
|
|
|
import preplots
|
2020-08-10 22:55:08 +02:00
|
|
|
|
import p111
|
2022-02-07 15:51:38 +01:00
|
|
|
|
from hashlib import md5 # Because it's good enough
|
2020-08-08 23:59:13 +02:00
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
Interface to the PostgreSQL database.
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
def file_hash(file):
|
|
|
|
|
|
"""
|
2022-02-07 15:51:38 +01:00
|
|
|
|
Calculate a file hash based on its name, size, modification and creation times.
|
2020-08-08 23:59:13 +02:00
|
|
|
|
|
|
|
|
|
|
The hash is used to uniquely identify files in the database and detect if they
|
|
|
|
|
|
have changed.
|
|
|
|
|
|
"""
|
2022-02-07 15:51:38 +01:00
|
|
|
|
h = md5()
|
|
|
|
|
|
h.update(file.encode())
|
|
|
|
|
|
name_digest = h.hexdigest()[:16]
|
2020-08-08 23:59:13 +02:00
|
|
|
|
st = os.stat(file)
|
2022-02-07 15:51:38 +01:00
|
|
|
|
return ":".join([str(v) for v in [st.st_size, st.st_mtime, st.st_ctime, name_digest]])
|
2020-08-08 23:59:13 +02:00
|
|
|
|
|
|
|
|
|
|
class Datastore:
|
|
|
|
|
|
"""
|
|
|
|
|
|
The database interface.
|
|
|
|
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
|
|
db = Datastore()
|
|
|
|
|
|
"""
|
|
|
|
|
|
conn = None
|
|
|
|
|
|
autocommit = True
|
|
|
|
|
|
|
|
|
|
|
|
def __init__(self, **opts):
|
|
|
|
|
|
self.settings = configuration.read()["db"]
|
|
|
|
|
|
self.options = opts
|
|
|
|
|
|
|
|
|
|
|
|
def connect(self, **opts):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Connect to the database.
|
|
|
|
|
|
|
|
|
|
|
|
opts: Dictionary of options to pass to psycopg2.connect in addition to the
|
|
|
|
|
|
connection string found in the system configuration (see configuration.py).
|
|
|
|
|
|
|
|
|
|
|
|
Does not normally need to be invoked directly, as it is called by the
|
|
|
|
|
|
set_survey() method.
|
|
|
|
|
|
"""
|
|
|
|
|
|
if self.conn is not None:
|
|
|
|
|
|
self.conn.close()
|
|
|
|
|
|
|
|
|
|
|
|
self.conn = psycopg2.connect(configuration.read()["db"]["connection_string"], **opts)
|
|
|
|
|
|
|
2023-08-30 14:16:08 +02:00
|
|
|
|
def set_autocommit(self, value = True):
|
2020-08-08 23:59:13 +02:00
|
|
|
|
"""
|
|
|
|
|
|
Enable or disable autocommit.
|
|
|
|
|
|
|
|
|
|
|
|
By default, each one of this class' methods commit their results before returning.
|
|
|
|
|
|
Sometimes that is not desirable so this methods allows the caller to change that
|
|
|
|
|
|
behaviour. If autocommit is disabled, the caller should commit / rollback explicitly,
|
|
|
|
|
|
e.g., by calling Datastore#conn.commit()
|
|
|
|
|
|
"""
|
|
|
|
|
|
self.autocommit = value
|
|
|
|
|
|
|
|
|
|
|
|
def maybe_commit(self):
|
|
|
|
|
|
if self.autocommit is True:
|
|
|
|
|
|
self.conn.commit()
|
|
|
|
|
|
|
|
|
|
|
|
def set_survey(self, schema = None):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Set the schema corresponding to the survey to which subsequent operations will apply.
|
|
|
|
|
|
Note that this will replace the existing connection (if any) with a new one.
|
|
|
|
|
|
If no schema is given, defaults to 'public'.
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
if schema is None:
|
|
|
|
|
|
search_path = "public"
|
|
|
|
|
|
else:
|
|
|
|
|
|
search_path = ",".join([schema, "public"])
|
|
|
|
|
|
|
|
|
|
|
|
if self.conn:
|
|
|
|
|
|
self.conn.close()
|
|
|
|
|
|
|
|
|
|
|
|
self.connect(options=f"-c search_path={search_path}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def file_in_db(self, filepath):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Check if the file is already in the database.
|
|
|
|
|
|
"""
|
|
|
|
|
|
with self.conn:
|
|
|
|
|
|
with self.conn.cursor() as cursor:
|
|
|
|
|
|
qry = "SELECT path, hash FROM files WHERE path = %s;"
|
|
|
|
|
|
cursor.execute(qry, (filepath,))
|
|
|
|
|
|
results = cursor.fetchall()
|
|
|
|
|
|
if len(results):
|
2023-08-30 14:54:27 +02:00
|
|
|
|
return (filepath, file_hash(configuration.translate_path(filepath))) in results
|
2020-08-08 23:59:13 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def add_file(self, path, cursor = None):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Add a file to the `files` table of a survey.
|
|
|
|
|
|
"""
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
cur = self.conn.cursor()
|
|
|
|
|
|
else:
|
|
|
|
|
|
cur = cursor
|
|
|
|
|
|
|
2023-08-30 14:54:27 +02:00
|
|
|
|
realpath = configuration.translate_path(path)
|
|
|
|
|
|
hash = file_hash(realpath)
|
2020-08-08 23:59:13 +02:00
|
|
|
|
qry = "CALL add_file(%s, %s);"
|
|
|
|
|
|
cur.execute(qry, (path, hash))
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
# We do not commit if we've been passed a cursor, instead
|
|
|
|
|
|
# we assume that we are in the middle of a transaction
|
|
|
|
|
|
return hash
|
|
|
|
|
|
|
|
|
|
|
|
def del_file(self, path, cursor = None):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Remove a file from a survey's `file` table.
|
|
|
|
|
|
"""
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
cur = self.conn.cursor()
|
|
|
|
|
|
else:
|
|
|
|
|
|
cur = cursor
|
|
|
|
|
|
|
|
|
|
|
|
qry = "DELETE FROM files WHERE path = %s;"
|
|
|
|
|
|
cur.execute(qry, (path,))
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
# We do not commit if we've been passed a cursor, instead
|
|
|
|
|
|
# we assume that we are in the middle of a transaction
|
|
|
|
|
|
|
2020-09-06 13:38:37 +02:00
|
|
|
|
def del_hash(self, hash, cursor = None):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Remove a hash from a survey's `file` table.
|
|
|
|
|
|
"""
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
cur = self.conn.cursor()
|
|
|
|
|
|
else:
|
|
|
|
|
|
cur = cursor
|
|
|
|
|
|
|
|
|
|
|
|
qry = "DELETE FROM files WHERE hash = %s;"
|
|
|
|
|
|
cur.execute(qry, (hash,))
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
# We do not commit if we've been passed a cursor, instead
|
|
|
|
|
|
# we assume that we are in the middle of a transaction
|
|
|
|
|
|
|
2020-08-08 23:59:13 +02:00
|
|
|
|
def list_files(self, cursor = None):
|
|
|
|
|
|
"""
|
|
|
|
|
|
List all files known to a survey.
|
|
|
|
|
|
"""
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
cur = self.conn.cursor()
|
|
|
|
|
|
else:
|
|
|
|
|
|
cur = cursor
|
|
|
|
|
|
|
2020-08-31 13:09:57 +02:00
|
|
|
|
qry = "SELECT * FROM files WHERE hash NOT LIKE '*%';"
|
2020-08-08 23:59:13 +02:00
|
|
|
|
cur.execute(qry)
|
|
|
|
|
|
res = cur.fetchall()
|
|
|
|
|
|
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
# We do not commit if we've been passed a cursor, instead
|
|
|
|
|
|
# we assume that we are in the middle of a transaction
|
|
|
|
|
|
return res
|
|
|
|
|
|
|
2020-09-08 18:36:30 +02:00
|
|
|
|
def set_ntbp(self, path, ntbp, cursor = None):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Set or remove a sequence's NTBP flag
|
|
|
|
|
|
"""
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
cur = self.conn.cursor()
|
|
|
|
|
|
else:
|
|
|
|
|
|
cur = cursor
|
|
|
|
|
|
|
2023-08-30 14:54:27 +02:00
|
|
|
|
hash = file_hash(configuration.translate_path(path))
|
2020-09-08 18:36:30 +02:00
|
|
|
|
qry = """
|
|
|
|
|
|
UPDATE raw_lines rl
|
|
|
|
|
|
SET ntbp = %s
|
|
|
|
|
|
FROM raw_shots rs, files f
|
|
|
|
|
|
WHERE rs.hash = f.hash AND rs.sequence = rl.sequence AND f.hash = %s;
|
|
|
|
|
|
"""
|
|
|
|
|
|
cur.execute(qry, (ntbp, hash))
|
|
|
|
|
|
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
# We do not commit if we've been passed a cursor, instead
|
|
|
|
|
|
# we assume that we are in the middle of a transaction
|
|
|
|
|
|
|
2020-10-04 03:41:20 +02:00
|
|
|
|
def save_preplots(self, lines, filepath, preplot_class, epsg = 0, filedata = None):
|
2020-08-08 23:59:13 +02:00
|
|
|
|
"""
|
|
|
|
|
|
Save preplot data.
|
|
|
|
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
|
|
|
|
|
|
|
|
lines (iterable): should be a collection of lines returned from
|
|
|
|
|
|
one of the preplot-reading functions (see preplots.py).
|
|
|
|
|
|
|
2020-10-04 03:39:58 +02:00
|
|
|
|
filepath (string): the full path to the preplot file from where the lines
|
2020-08-08 23:59:13 +02:00
|
|
|
|
have been read. It will be added to the survey's `file` table so that
|
|
|
|
|
|
it can be monitored for changes.
|
|
|
|
|
|
|
|
|
|
|
|
preplot_class (string): a valid preplot class code (`V` for saillines,
|
|
|
|
|
|
`S` for sources).
|
|
|
|
|
|
|
|
|
|
|
|
epsg (number): the EPSG code that identifies the preplot's CRS. Defaults
|
|
|
|
|
|
to unknown.
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
with self.conn.cursor() as cursor:
|
2020-10-02 19:31:14 +02:00
|
|
|
|
cursor.execute("BEGIN;")
|
|
|
|
|
|
|
2020-10-04 03:39:58 +02:00
|
|
|
|
hash = self.add_file(filepath, cursor)
|
2020-08-08 23:59:13 +02:00
|
|
|
|
count=0
|
|
|
|
|
|
for line in lines:
|
|
|
|
|
|
count += 1
|
|
|
|
|
|
print(f"\u001b[2KSaving line {count} / {len(lines)} ({len(line['points'])} points)", end="\r", flush=True)
|
|
|
|
|
|
|
|
|
|
|
|
p0 = line["points"][0]
|
|
|
|
|
|
p1 = line["points"][-1]
|
|
|
|
|
|
incr = p0["point_number"] <= p1["point_number"]
|
|
|
|
|
|
geom = f'LINESTRING({p0["easting"]} {p0["northing"]}, {p1["easting"]} {p1["northing"]})'
|
|
|
|
|
|
|
|
|
|
|
|
qry = """
|
|
|
|
|
|
INSERT INTO preplot_lines AS pl
|
|
|
|
|
|
(line, incr, class, remarks, geometry, hash)
|
|
|
|
|
|
VALUES
|
|
|
|
|
|
(%s, %s, %s, '', ST_GeomFromText(%s, %s), %s)
|
|
|
|
|
|
ON CONFLICT (line, class) DO UPDATE
|
|
|
|
|
|
SET incr = EXCLUDED.incr, geometry = EXCLUDED.geometry, hash = EXCLUDED.hash
|
|
|
|
|
|
WHERE pl.hash <> EXCLUDED.hash;
|
|
|
|
|
|
-- Note that remarks are *not* overwritten
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
cursor.execute(qry, (line["line_name"], incr, preplot_class, geom, epsg, hash))
|
|
|
|
|
|
|
|
|
|
|
|
points = [ (line["line_name"], p["point_number"], preplot_class, p["easting"], p["northing"], epsg) for p in line["points"] ]
|
|
|
|
|
|
|
|
|
|
|
|
qry = """
|
|
|
|
|
|
INSERT INTO preplot_points
|
|
|
|
|
|
(line, point, class, geometry)
|
|
|
|
|
|
VALUES
|
|
|
|
|
|
(%s, %s, %s, ST_SetSRID(ST_MakePoint(%s, %s), %s))
|
|
|
|
|
|
ON CONFLICT (line, point, class) DO UPDATE
|
|
|
|
|
|
SET geometry = EXCLUDED.geometry;
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
cursor.executemany(qry, points)
|
|
|
|
|
|
|
2020-10-04 03:41:20 +02:00
|
|
|
|
if filedata is not None:
|
|
|
|
|
|
self.save_file_data(filepath, json.dumps(filedata), cursor)
|
|
|
|
|
|
|
2020-08-08 23:59:13 +02:00
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
|
2024-05-04 17:32:30 +02:00
|
|
|
|
|
|
|
|
|
|
def save_preplot_line_info(self, lines, filepath, filedata = None):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Save preplot line information
|
|
|
|
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
|
|
|
|
|
|
|
|
lines (iterable): should be a collection of lines returned from
|
|
|
|
|
|
one of the line info reading functions (see preplots.py).
|
|
|
|
|
|
|
|
|
|
|
|
filepath (string): the full path to the preplot file from where the lines
|
|
|
|
|
|
have been read. It will be added to the survey's `file` table so that
|
|
|
|
|
|
it can be monitored for changes.
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
with self.conn.cursor() as cursor:
|
|
|
|
|
|
cursor.execute("BEGIN;")
|
|
|
|
|
|
|
2025-07-19 11:31:52 +02:00
|
|
|
|
# Check which preplot lines we actually have already imported,
|
|
|
|
|
|
# as the line info file may contain extra lines.
|
|
|
|
|
|
|
|
|
|
|
|
qry = """
|
|
|
|
|
|
SELECT line, class
|
|
|
|
|
|
FROM preplot_lines
|
|
|
|
|
|
ORDER BY line, class;
|
|
|
|
|
|
"""
|
|
|
|
|
|
cursor.execute(qry)
|
|
|
|
|
|
preplot_lines = cursor.fetchall()
|
|
|
|
|
|
|
2024-05-04 17:32:30 +02:00
|
|
|
|
hash = self.add_file(filepath, cursor)
|
|
|
|
|
|
count=0
|
|
|
|
|
|
for line in lines:
|
|
|
|
|
|
count += 1
|
2025-07-19 11:31:52 +02:00
|
|
|
|
|
|
|
|
|
|
if not (line["sail_line"], "V") in preplot_lines:
|
|
|
|
|
|
print(f"\u001b[2KSkipping line {count} / {len(lines)}", end="\n", flush=True)
|
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
|
|
print(f"\u001b[2KSaving line {count} / {len(lines)} ", end="\n", flush=True)
|
2024-05-04 17:32:30 +02:00
|
|
|
|
|
|
|
|
|
|
sail_line = line["sail_line"]
|
|
|
|
|
|
incr = line.get("incr", True)
|
|
|
|
|
|
ntba = line.get("ntba", False)
|
|
|
|
|
|
remarks = line.get("remarks", None)
|
|
|
|
|
|
meta = json.dumps(line.get("meta", {}))
|
|
|
|
|
|
source_lines = line.get("source_line", [])
|
|
|
|
|
|
|
|
|
|
|
|
for source_line in source_lines:
|
|
|
|
|
|
qry = """
|
|
|
|
|
|
INSERT INTO preplot_saillines AS ps
|
|
|
|
|
|
(sailline, line, sailline_class, line_class, incr, ntba, remarks, meta, hash)
|
|
|
|
|
|
VALUES
|
|
|
|
|
|
(%s, %s, 'V', 'S', %s, %s, %s, %s, %s)
|
|
|
|
|
|
ON CONFLICT (sailline, sailline_class, line, line_class, incr) DO UPDATE
|
|
|
|
|
|
SET
|
|
|
|
|
|
incr = EXCLUDED.incr,
|
|
|
|
|
|
ntba = EXCLUDED.ntba,
|
|
|
|
|
|
remarks = COALESCE(EXCLUDED.remarks, ps.remarks),
|
|
|
|
|
|
meta = ps.meta || EXCLUDED.meta,
|
|
|
|
|
|
hash = EXCLUDED.hash;
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
# NOTE Consider using cursor.executemany() instead. Then again,
|
|
|
|
|
|
# we're only expecting a few hundred lines at most.
|
|
|
|
|
|
cursor.execute(qry, (sail_line, source_line, incr, ntba, remarks, meta, hash))
|
|
|
|
|
|
|
|
|
|
|
|
if filedata is not None:
|
|
|
|
|
|
self.save_file_data(filepath, json.dumps(filedata), cursor)
|
|
|
|
|
|
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-08-31 13:04:46 +02:00
|
|
|
|
def save_raw_p190(self, records, fileinfo, filepath, epsg = 0, filedata = None, ntbp = False):
|
2020-08-08 23:59:13 +02:00
|
|
|
|
"""
|
|
|
|
|
|
Save raw P1 data.
|
|
|
|
|
|
|
|
|
|
|
|
Note that despite its name, this function does not care whether the data
|
|
|
|
|
|
comes from a P1/90 or a P1/11 or something else altogether.
|
|
|
|
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
|
|
|
|
|
|
|
|
records (iterable): a collection of P1 records as produced for instance by
|
|
|
|
|
|
the from_file() function from p190.py.
|
|
|
|
|
|
|
|
|
|
|
|
fileinfo (dictionary): information about the source file. It must have at
|
|
|
|
|
|
least "line" and "sequence" elements.
|
|
|
|
|
|
|
|
|
|
|
|
filepath (string): the full path to the file from where the lines have been
|
|
|
|
|
|
read.
|
|
|
|
|
|
|
|
|
|
|
|
epsg (number): the EPSG code identifying this dataset's CRS. Defaults to unknown.
|
|
|
|
|
|
|
|
|
|
|
|
filedata (dictionary): Arbitrary data associated with this file and saved in the
|
|
|
|
|
|
`file_data` table.
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
with self.conn.cursor() as cursor:
|
2020-10-02 19:31:14 +02:00
|
|
|
|
cursor.execute("BEGIN;")
|
|
|
|
|
|
|
2020-08-08 23:59:13 +02:00
|
|
|
|
hash = self.add_file(filepath, cursor)
|
|
|
|
|
|
incr = records[0]["point_number"] <= records[-1]["point_number"]
|
|
|
|
|
|
|
2020-08-31 13:09:57 +02:00
|
|
|
|
# Start by deleting any online data we may have for this sequence
|
2020-09-06 13:38:37 +02:00
|
|
|
|
self.del_hash("*online*", cursor)
|
2020-08-31 13:09:57 +02:00
|
|
|
|
|
2020-08-08 23:59:13 +02:00
|
|
|
|
qry = """
|
|
|
|
|
|
INSERT INTO raw_lines (sequence, line, remarks, ntbp, incr)
|
|
|
|
|
|
VALUES (%s, %s, '', %s, %s)
|
|
|
|
|
|
ON CONFLICT DO NOTHING;
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
2020-08-31 13:04:46 +02:00
|
|
|
|
cursor.execute(qry, (fileinfo["sequence"], fileinfo["line"], ntbp, incr))
|
2020-08-08 23:59:13 +02:00
|
|
|
|
|
|
|
|
|
|
qry = """
|
|
|
|
|
|
INSERT INTO raw_lines_files (sequence, hash)
|
|
|
|
|
|
VALUES (%s, %s)
|
|
|
|
|
|
ON CONFLICT DO NOTHING;
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
cursor.execute(qry, (fileinfo["sequence"], hash))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
shots = [ (fileinfo["sequence"], r["point_number"], r["record_type"], r["objref"], r["tstamp"], hash, r["easting"], r["northing"], epsg) for r in records ]
|
|
|
|
|
|
|
|
|
|
|
|
qry = """
|
|
|
|
|
|
INSERT INTO raw_shots (sequence, point, class, objref, tstamp, hash, geometry)
|
|
|
|
|
|
VALUES (%s, %s, %s, %s, %s, %s, ST_SetSRID(ST_MakePoint(%s, %s), %s))
|
|
|
|
|
|
ON CONFLICT (sequence, point, class) DO UPDATE
|
|
|
|
|
|
SET
|
|
|
|
|
|
objref = EXCLUDED.objref, tstamp = EXCLUDED.tstamp,
|
|
|
|
|
|
hash = EXCLUDED.hash, geometry = EXCLUDED.geometry;
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
cursor.executemany(qry, shots)
|
|
|
|
|
|
|
|
|
|
|
|
if filedata is not None:
|
|
|
|
|
|
self.save_file_data(filepath, json.dumps(filedata), cursor)
|
|
|
|
|
|
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
|
|
|
|
|
|
def save_final_p190(self, records, fileinfo, filepath, epsg = 0, filedata = None):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Save final P1 data.
|
|
|
|
|
|
|
|
|
|
|
|
Note that despite its name, this function does not care whether the data
|
|
|
|
|
|
comes from a P1/90 or a P1/11 or something else altogether.
|
|
|
|
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
|
|
|
|
|
|
|
|
records (iterable): a collection of P1 records as produced for instance by
|
|
|
|
|
|
the from_file() function from p190.py.
|
|
|
|
|
|
|
|
|
|
|
|
fileinfo (dictionary): information about the source file. It must have at
|
|
|
|
|
|
least "line" and "sequence" elements.
|
|
|
|
|
|
|
|
|
|
|
|
filepath (string): the full path to the file from where the lines have been
|
|
|
|
|
|
read.
|
|
|
|
|
|
|
|
|
|
|
|
epsg (number): the EPSG code identifying this dataset's CRS. Defaults to unknown.
|
|
|
|
|
|
|
|
|
|
|
|
filedata (dictionary): Arbitrary data associated with this file and saved in the
|
|
|
|
|
|
`file_data` table.
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
with self.conn.cursor() as cursor:
|
2020-10-02 19:31:14 +02:00
|
|
|
|
cursor.execute("BEGIN;")
|
|
|
|
|
|
|
2020-08-08 23:59:13 +02:00
|
|
|
|
hash = self.add_file(filepath, cursor)
|
|
|
|
|
|
#print(records[0])
|
|
|
|
|
|
#print(records[-1])
|
|
|
|
|
|
incr = records[0]["point_number"] <= records[-1]["point_number"]
|
|
|
|
|
|
|
|
|
|
|
|
qry = """
|
|
|
|
|
|
INSERT INTO final_lines (sequence, line, remarks)
|
|
|
|
|
|
VALUES (%s, %s, '')
|
|
|
|
|
|
ON CONFLICT DO NOTHING;
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
cursor.execute(qry, (fileinfo["sequence"], fileinfo["line"]))
|
|
|
|
|
|
|
|
|
|
|
|
qry = """
|
|
|
|
|
|
INSERT INTO final_lines_files (sequence, hash)
|
|
|
|
|
|
VALUES (%s, %s)
|
|
|
|
|
|
ON CONFLICT DO NOTHING;
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
cursor.execute(qry, (fileinfo["sequence"], hash))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
shots = [ (fileinfo["sequence"], r["point_number"], r["record_type"], r["objref"], r["tstamp"], hash, r["easting"], r["northing"], epsg) for r in records ]
|
|
|
|
|
|
|
|
|
|
|
|
qry = """
|
|
|
|
|
|
INSERT INTO final_shots (sequence, point, class, objref, tstamp, hash, geometry)
|
|
|
|
|
|
VALUES (%s, %s, %s, %s, %s, %s, ST_SetSRID(ST_MakePoint(%s, %s), %s))
|
|
|
|
|
|
ON CONFLICT (sequence, point, class) DO UPDATE
|
|
|
|
|
|
SET
|
|
|
|
|
|
objref = EXCLUDED.objref, tstamp = EXCLUDED.tstamp,
|
|
|
|
|
|
hash = EXCLUDED.hash, geometry = EXCLUDED.geometry;
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
cursor.executemany(qry, shots)
|
|
|
|
|
|
|
|
|
|
|
|
if filedata is not None:
|
|
|
|
|
|
self.save_file_data(filepath, json.dumps(filedata), cursor)
|
|
|
|
|
|
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
|
2020-08-31 13:04:46 +02:00
|
|
|
|
def save_raw_p111 (self, records, fileinfo, filepath, epsg = 0, filedata = None, ntbp = False):
|
2020-08-10 22:55:08 +02:00
|
|
|
|
|
|
|
|
|
|
with self.conn.cursor() as cursor:
|
2020-10-02 19:31:14 +02:00
|
|
|
|
cursor.execute("BEGIN;")
|
2022-02-07 15:51:38 +01:00
|
|
|
|
|
2020-08-10 22:55:08 +02:00
|
|
|
|
hash = self.add_file(filepath, cursor)
|
2022-02-07 15:51:38 +01:00
|
|
|
|
|
2021-05-12 20:35:51 +02:00
|
|
|
|
if not records or len(records) == 0:
|
|
|
|
|
|
print("File has no records (or none have been detected)")
|
|
|
|
|
|
# We add the file to the database anyway to signal that we have
|
|
|
|
|
|
# actually seen it.
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
return
|
|
|
|
|
|
|
2020-08-10 22:55:08 +02:00
|
|
|
|
incr = p111.point_number(records[0]) <= p111.point_number(records[-1])
|
|
|
|
|
|
|
2020-08-31 13:09:57 +02:00
|
|
|
|
# Start by deleting any online data we may have for this sequence
|
2020-09-06 13:38:37 +02:00
|
|
|
|
self.del_hash("*online*", cursor)
|
2020-08-31 13:09:57 +02:00
|
|
|
|
|
2020-08-10 22:55:08 +02:00
|
|
|
|
qry = """
|
2021-05-24 13:30:25 +02:00
|
|
|
|
INSERT INTO raw_lines (sequence, line, remarks, ntbp, incr, meta)
|
|
|
|
|
|
VALUES (%s, %s, '', %s, %s, %s)
|
2022-05-08 15:25:11 +02:00
|
|
|
|
ON CONFLICT (sequence) DO UPDATE SET
|
|
|
|
|
|
line = EXCLUDED.line,
|
|
|
|
|
|
ntbp = EXCLUDED.ntbp,
|
|
|
|
|
|
incr = EXCLUDED.incr,
|
|
|
|
|
|
meta = EXCLUDED.meta;
|
2020-08-10 22:55:08 +02:00
|
|
|
|
"""
|
|
|
|
|
|
|
2021-05-24 13:30:25 +02:00
|
|
|
|
cursor.execute(qry, (fileinfo["sequence"], fileinfo["line"], ntbp, incr, json.dumps(fileinfo["meta"])))
|
2022-02-07 15:51:38 +01:00
|
|
|
|
|
2021-05-25 03:19:42 +02:00
|
|
|
|
qry = """
|
|
|
|
|
|
UPDATE raw_lines
|
|
|
|
|
|
SET meta = meta || %s
|
|
|
|
|
|
WHERE sequence = %s;
|
|
|
|
|
|
"""
|
2022-02-07 15:51:38 +01:00
|
|
|
|
|
2021-05-25 03:19:42 +02:00
|
|
|
|
cursor.execute(qry, (json.dumps(fileinfo["meta"]), fileinfo["sequence"]))
|
2020-08-10 22:55:08 +02:00
|
|
|
|
|
|
|
|
|
|
qry = """
|
|
|
|
|
|
INSERT INTO raw_lines_files (sequence, hash)
|
|
|
|
|
|
VALUES (%s, %s)
|
|
|
|
|
|
ON CONFLICT DO NOTHING;
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
cursor.execute(qry, (fileinfo["sequence"], hash))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
shots = [ (fileinfo["sequence"], p111.line(r), p111.point_number(r), r["Object Ref. Number"], r["tstamp"], hash, p111.easting(r), p111.northing(r), epsg) for r in records ]
|
|
|
|
|
|
|
|
|
|
|
|
qry = """
|
|
|
|
|
|
INSERT INTO raw_shots (sequence, line, point, objref, tstamp, hash, geometry)
|
|
|
|
|
|
VALUES (%s, %s, %s, %s, %s, %s, ST_SetSRID(ST_MakePoint(%s, %s), %s))
|
|
|
|
|
|
ON CONFLICT (sequence, point) DO UPDATE
|
|
|
|
|
|
SET
|
|
|
|
|
|
objref = EXCLUDED.objref, tstamp = EXCLUDED.tstamp,
|
|
|
|
|
|
hash = EXCLUDED.hash, geometry = EXCLUDED.geometry;
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
cursor.executemany(qry, shots)
|
|
|
|
|
|
|
|
|
|
|
|
if filedata is not None:
|
|
|
|
|
|
self.save_file_data(filepath, json.dumps(filedata), cursor)
|
|
|
|
|
|
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
|
|
|
|
|
|
def save_final_p111 (self, records, fileinfo, filepath, epsg = 0, filedata = None):
|
|
|
|
|
|
|
|
|
|
|
|
with self.conn.cursor() as cursor:
|
2020-10-02 19:31:14 +02:00
|
|
|
|
cursor.execute("BEGIN;")
|
2022-02-07 15:51:38 +01:00
|
|
|
|
|
2020-08-10 22:55:08 +02:00
|
|
|
|
hash = self.add_file(filepath, cursor)
|
|
|
|
|
|
|
|
|
|
|
|
qry = """
|
2021-05-24 13:30:25 +02:00
|
|
|
|
INSERT INTO final_lines (sequence, line, remarks, meta)
|
|
|
|
|
|
VALUES (%s, %s, '', %s)
|
2022-05-08 15:25:11 +02:00
|
|
|
|
ON CONFLICT (sequence) DO UPDATE SET
|
|
|
|
|
|
line = EXCLUDED.line,
|
|
|
|
|
|
meta = EXCLUDED.meta;
|
2020-08-10 22:55:08 +02:00
|
|
|
|
"""
|
|
|
|
|
|
|
2021-05-24 13:30:25 +02:00
|
|
|
|
cursor.execute(qry, (fileinfo["sequence"], fileinfo["line"], json.dumps(fileinfo["meta"])))
|
2022-02-07 15:51:38 +01:00
|
|
|
|
|
2021-05-25 03:19:42 +02:00
|
|
|
|
qry = """
|
|
|
|
|
|
UPDATE raw_lines
|
|
|
|
|
|
SET meta = meta || %s
|
|
|
|
|
|
WHERE sequence = %s;
|
|
|
|
|
|
"""
|
2022-02-07 15:51:38 +01:00
|
|
|
|
|
2021-05-25 03:19:42 +02:00
|
|
|
|
cursor.execute(qry, (json.dumps(fileinfo["meta"]), fileinfo["sequence"]))
|
2020-08-10 22:55:08 +02:00
|
|
|
|
|
|
|
|
|
|
qry = """
|
|
|
|
|
|
INSERT INTO final_lines_files (sequence, hash)
|
|
|
|
|
|
VALUES (%s, %s)
|
|
|
|
|
|
ON CONFLICT DO NOTHING;
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
cursor.execute(qry, (fileinfo["sequence"], hash))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
shots = [ (fileinfo["sequence"], p111.line(r), p111.point_number(r), r["Object Ref. Number"], r["tstamp"], hash, p111.easting(r), p111.northing(r), epsg) for r in records ]
|
|
|
|
|
|
|
|
|
|
|
|
qry = """
|
|
|
|
|
|
INSERT INTO final_shots (sequence, line, point, objref, tstamp, hash, geometry)
|
|
|
|
|
|
VALUES (%s, %s, %s, %s, %s, %s, ST_SetSRID(ST_MakePoint(%s, %s), %s))
|
|
|
|
|
|
ON CONFLICT (sequence, point) DO UPDATE
|
|
|
|
|
|
SET
|
|
|
|
|
|
objref = EXCLUDED.objref, tstamp = EXCLUDED.tstamp,
|
|
|
|
|
|
hash = EXCLUDED.hash, geometry = EXCLUDED.geometry;
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
cursor.executemany(qry, shots)
|
|
|
|
|
|
|
|
|
|
|
|
if filedata is not None:
|
|
|
|
|
|
self.save_file_data(filepath, json.dumps(filedata), cursor)
|
2022-02-07 15:51:38 +01:00
|
|
|
|
|
2021-05-24 16:59:56 +02:00
|
|
|
|
cursor.execute("CALL final_line_post_import(%s);", (fileinfo["sequence"],))
|
2020-08-10 22:55:08 +02:00
|
|
|
|
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
|
2020-09-06 13:45:56 +02:00
|
|
|
|
def save_raw_smsrc (self, records, fileinfo, filepath, filedata = None):
|
|
|
|
|
|
|
|
|
|
|
|
with self.conn.cursor() as cursor:
|
2020-10-02 19:31:14 +02:00
|
|
|
|
cursor.execute("BEGIN;")
|
|
|
|
|
|
|
2020-09-06 13:45:56 +02:00
|
|
|
|
hash = self.add_file(filepath, cursor)
|
|
|
|
|
|
|
|
|
|
|
|
# Start by deleting any online data we may have for this sequence
|
|
|
|
|
|
# NOTE: Do I need to do this?
|
|
|
|
|
|
#self.del_hash("*online*", cursor)
|
|
|
|
|
|
|
|
|
|
|
|
# The shots should already exist, e.g., from a P1 import
|
2020-10-03 00:44:55 +02:00
|
|
|
|
# …but what about if the SMSRC file gets read *before* the P1?
|
|
|
|
|
|
# We need to check
|
2020-10-06 18:31:45 +02:00
|
|
|
|
qry = "SELECT count(*) FROM raw_shots WHERE sequence = %s AND hash != '*online*';"
|
2020-10-03 00:44:55 +02:00
|
|
|
|
values = (fileinfo["sequence"],)
|
|
|
|
|
|
cursor.execute(qry, values)
|
|
|
|
|
|
shotcount = cursor.fetchone()[0]
|
|
|
|
|
|
if shotcount == 0:
|
|
|
|
|
|
# No shots yet or not all imported, so we do *not*
|
|
|
|
|
|
# save the gun data. It will eventually get picked
|
|
|
|
|
|
# up in the next run.
|
|
|
|
|
|
# Let's remove the file from the file list and bail
|
|
|
|
|
|
# out.
|
|
|
|
|
|
print("No raw shots for sequence", fileinfo["sequence"])
|
|
|
|
|
|
self.conn.rollback()
|
|
|
|
|
|
return
|
2020-09-06 13:45:56 +02:00
|
|
|
|
|
|
|
|
|
|
values = [ (json.dumps(record), fileinfo["sequence"], record["shot"]) for record in records ]
|
|
|
|
|
|
|
|
|
|
|
|
qry = """
|
|
|
|
|
|
UPDATE raw_shots
|
2021-05-26 00:30:58 +02:00
|
|
|
|
SET meta = jsonb_set(meta, '{smsrc}', %s::jsonb, true) - 'qc'
|
2020-09-06 13:45:56 +02:00
|
|
|
|
WHERE sequence = %s AND point = %s;
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
cursor.executemany(qry, values)
|
|
|
|
|
|
|
|
|
|
|
|
if filedata is not None:
|
|
|
|
|
|
self.save_file_data(filepath, json.dumps(filedata), cursor)
|
|
|
|
|
|
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
|
2020-08-10 22:55:08 +02:00
|
|
|
|
|
2020-08-08 23:59:13 +02:00
|
|
|
|
def save_file_data(self, path, filedata, cursor = None):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Save arbitrary data associated with a file.
|
|
|
|
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
|
|
|
|
|
|
|
|
path (string): the full path to the file that the data is to be associated with.
|
|
|
|
|
|
|
|
|
|
|
|
filedata (object): arbitrary data – will be converted to JSON.
|
|
|
|
|
|
|
|
|
|
|
|
cursor (DB Cursor): if a cursor is passed by the caller, this function will not
|
|
|
|
|
|
call conn.commit() even if autocommit is True.
|
|
|
|
|
|
"""
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
cur = self.conn.cursor()
|
|
|
|
|
|
else:
|
|
|
|
|
|
cur = cursor
|
|
|
|
|
|
|
|
|
|
|
|
hash = self.add_file(path, cursor)
|
|
|
|
|
|
|
|
|
|
|
|
qry = """
|
|
|
|
|
|
INSERT INTO file_data (hash, data)
|
|
|
|
|
|
VALUES (%s, %s::json)
|
|
|
|
|
|
ON CONFLICT (hash) DO UPDATE
|
|
|
|
|
|
SET data = EXCLUDED.data;
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
cur.execute(qry, (hash, filedata))
|
|
|
|
|
|
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
# We do not commit if we've been passed a cursor, instead
|
|
|
|
|
|
# we assume that we are in the middle of a transaction
|
2020-08-25 17:52:17 +02:00
|
|
|
|
|
2023-09-12 11:04:37 +02:00
|
|
|
|
def get_file_data(self, path, cursor = None):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Retrieve arbitrary data associated with a file.
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
cur = self.conn.cursor()
|
|
|
|
|
|
else:
|
|
|
|
|
|
cur = cursor
|
|
|
|
|
|
|
|
|
|
|
|
realpath = configuration.translate_path(path)
|
|
|
|
|
|
hash = file_hash(realpath)
|
2020-08-25 17:52:17 +02:00
|
|
|
|
|
2023-09-12 11:04:37 +02:00
|
|
|
|
qry = """
|
|
|
|
|
|
SELECT data
|
|
|
|
|
|
FROM file_data
|
|
|
|
|
|
WHERE hash = %s;
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
cur.execute(qry, (hash,))
|
|
|
|
|
|
res = cur.fetchone()
|
|
|
|
|
|
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
# We do not commit if we've been passed a cursor, instead
|
|
|
|
|
|
# we assume that we are in the middle of a transaction
|
|
|
|
|
|
return res[0]
|
2023-08-30 14:19:56 +02:00
|
|
|
|
|
|
|
|
|
|
def surveys (self, include_archived = False):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Return list of survey definitions.
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
if self.conn is None:
|
|
|
|
|
|
self.connect()
|
|
|
|
|
|
|
|
|
|
|
|
if include_archived:
|
|
|
|
|
|
qry = """
|
2024-04-30 13:25:40 +02:00
|
|
|
|
SELECT meta, schema
|
2023-08-30 14:19:56 +02:00
|
|
|
|
FROM public.projects;
|
|
|
|
|
|
"""
|
|
|
|
|
|
else:
|
|
|
|
|
|
qry = """
|
2024-04-30 13:25:40 +02:00
|
|
|
|
SELECT meta, schema
|
2023-08-30 14:19:56 +02:00
|
|
|
|
FROM public.projects
|
|
|
|
|
|
WHERE NOT (meta->'archived')::boolean IS true
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
with self.conn:
|
|
|
|
|
|
with self.conn.cursor() as cursor:
|
|
|
|
|
|
|
|
|
|
|
|
cursor.execute(qry)
|
|
|
|
|
|
results = cursor.fetchall()
|
2024-04-30 13:25:40 +02:00
|
|
|
|
surveys = []
|
|
|
|
|
|
for r in results:
|
|
|
|
|
|
if r[0]:
|
|
|
|
|
|
r[0]['schema'] = r[1]
|
|
|
|
|
|
surveys.append(r[0])
|
|
|
|
|
|
return surveys
|
2023-08-30 14:19:56 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# TODO Does this need tweaking on account of #246?
|
2020-08-25 17:52:17 +02:00
|
|
|
|
def apply_survey_configuration(self, cursor = None):
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
cur = self.conn.cursor()
|
|
|
|
|
|
else:
|
|
|
|
|
|
cur = cursor
|
|
|
|
|
|
|
|
|
|
|
|
qry = """
|
|
|
|
|
|
INSERT INTO labels (name, data)
|
|
|
|
|
|
SELECT l.key, l.value
|
|
|
|
|
|
FROM file_data fd,
|
2020-10-04 04:29:14 +02:00
|
|
|
|
jsonb_each(fd.data->'labels') l
|
2020-08-25 17:52:17 +02:00
|
|
|
|
WHERE fd.data::jsonb ? 'labels'
|
|
|
|
|
|
ON CONFLICT (name) DO UPDATE SET data = excluded.data;
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
cur.execute(qry)
|
|
|
|
|
|
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
# We do not commit if we've been passed a cursor, instead
|
|
|
|
|
|
# we assume that we are in the middle of a transaction
|
2020-09-10 20:39:06 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def add_info(self, key, value, cursor = None):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Add an item of information to the project
|
|
|
|
|
|
"""
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
cur = self.conn.cursor()
|
|
|
|
|
|
else:
|
|
|
|
|
|
cur = cursor
|
|
|
|
|
|
|
|
|
|
|
|
qry = """
|
|
|
|
|
|
INSERT INTO info (key, value)
|
|
|
|
|
|
VALUES(%s, %s)
|
|
|
|
|
|
ON CONFLICT (key) DO UPDATE
|
|
|
|
|
|
SET value = EXCLUDED.value;
|
|
|
|
|
|
"""
|
|
|
|
|
|
cur.execute(qry, (key, value))
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
# We do not commit if we've been passed a cursor, instead
|
|
|
|
|
|
# we assume that we are in the middle of a transaction
|
|
|
|
|
|
|
|
|
|
|
|
def get_info(self, key, cursor = None):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Retrieve an item of information from the project
|
|
|
|
|
|
"""
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
cur = self.conn.cursor()
|
|
|
|
|
|
else:
|
|
|
|
|
|
cur = cursor
|
|
|
|
|
|
|
|
|
|
|
|
qry = "SELECT value FROM info WHERE key = %s;"
|
|
|
|
|
|
cur.execute(qry, (key,))
|
|
|
|
|
|
res = cur.fetchone()
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
# We do not commit if we've been passed a cursor, instead
|
|
|
|
|
|
# we assume that we are in the middle of a transaction
|
|
|
|
|
|
|
|
|
|
|
|
return res
|
|
|
|
|
|
|
|
|
|
|
|
def del_info(self, key, cursor = None):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Remove a an item of information from the project
|
|
|
|
|
|
"""
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
cur = self.conn.cursor()
|
|
|
|
|
|
else:
|
|
|
|
|
|
cur = cursor
|
|
|
|
|
|
|
|
|
|
|
|
qry = "DELETE FROM info WHERE key = %s;"
|
|
|
|
|
|
cur.execute(qry, (key,))
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
# We do not commit if we've been passed a cursor, instead
|
|
|
|
|
|
# we assume that we are in the middle of a transaction
|
2021-05-21 15:15:15 +02:00
|
|
|
|
|
|
|
|
|
|
def del_sequence_final(self, sequence, cursor = None):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Remove final data for a sequence.
|
|
|
|
|
|
"""
|
2022-02-07 15:51:38 +01:00
|
|
|
|
|
2021-05-21 15:15:15 +02:00
|
|
|
|
if cursor is None:
|
|
|
|
|
|
cur = self.conn.cursor()
|
|
|
|
|
|
else:
|
|
|
|
|
|
cur = cursor
|
|
|
|
|
|
|
|
|
|
|
|
qry = "DELETE FROM files WHERE hash = (SELECT hash FROM final_lines_files WHERE sequence = %s);"
|
|
|
|
|
|
cur.execute(qry, (sequence,))
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
# We do not commit if we've been passed a cursor, instead
|
|
|
|
|
|
# we assume that we are in the middle of a transaction
|
2022-02-07 15:51:38 +01:00
|
|
|
|
|
2022-05-01 20:27:19 +02:00
|
|
|
|
def adjust_planner(self, cursor = None):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Adjust estimated times on the planner
|
|
|
|
|
|
"""
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
cur = self.conn.cursor()
|
|
|
|
|
|
else:
|
|
|
|
|
|
cur = cursor
|
|
|
|
|
|
|
|
|
|
|
|
qry = "CALL adjust_planner();"
|
|
|
|
|
|
cur.execute(qry)
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
# We do not commit if we've been passed a cursor, instead
|
|
|
|
|
|
# we assume that we are in the middle of a transaction
|
2022-05-01 19:57:16 +02:00
|
|
|
|
|
|
|
|
|
|
def housekeep_event_log(self, cursor = None):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Call housekeeping actions on the event log
|
|
|
|
|
|
"""
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
cur = self.conn.cursor()
|
|
|
|
|
|
else:
|
|
|
|
|
|
cur = cursor
|
|
|
|
|
|
|
|
|
|
|
|
qry = "CALL augment_event_data();"
|
|
|
|
|
|
cur.execute(qry)
|
2022-05-12 22:57:23 +02:00
|
|
|
|
|
|
|
|
|
|
qry = "CALL scan_placeholders();"
|
|
|
|
|
|
cur.execute(qry)
|
|
|
|
|
|
|
2022-05-01 19:57:16 +02:00
|
|
|
|
if cursor is None:
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
# We do not commit if we've been passed a cursor, instead
|
|
|
|
|
|
# we assume that we are in the middle of a transaction
|
2022-05-13 19:04:39 +02:00
|
|
|
|
|
|
|
|
|
|
def run_daily_tasks(self, cursor = None):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Run once-a-day tasks
|
|
|
|
|
|
"""
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
cur = self.conn.cursor()
|
|
|
|
|
|
else:
|
|
|
|
|
|
cur = cursor
|
|
|
|
|
|
|
|
|
|
|
|
qry = "CALL log_midnight_shots();"
|
|
|
|
|
|
cur.execute(qry)
|
|
|
|
|
|
|
|
|
|
|
|
if cursor is None:
|
|
|
|
|
|
self.maybe_commit()
|
|
|
|
|
|
# We do not commit if we've been passed a cursor, instead
|
|
|
|
|
|
# we assume that we are in the middle of a transaction
|