diff --git a/bin/datastore.py b/bin/datastore.py index 2b787b2..56ceb08 100644 --- a/bin/datastore.py +++ b/bin/datastore.py @@ -256,6 +256,62 @@ class Datastore: self.maybe_commit() + + def save_preplot_line_info(self, lines, filepath, filedata = None): + """ + Save preplot line information + + Arguments: + + lines (iterable): should be a collection of lines returned from + one of the line info reading functions (see preplots.py). + + filepath (string): the full path to the preplot file from where the lines + have been read. It will be added to the survey's `file` table so that + it can be monitored for changes. + """ + + with self.conn.cursor() as cursor: + cursor.execute("BEGIN;") + + hash = self.add_file(filepath, cursor) + count=0 + for line in lines: + count += 1 + print(f"\u001b[2KSaving line {count} / {len(lines)}", end="\r", flush=True) + + sail_line = line["sail_line"] + incr = line.get("incr", True) + ntba = line.get("ntba", False) + remarks = line.get("remarks", None) + meta = json.dumps(line.get("meta", {})) + source_lines = line.get("source_line", []) + + for source_line in source_lines: + qry = """ + INSERT INTO preplot_saillines AS ps + (sailline, line, sailline_class, line_class, incr, ntba, remarks, meta, hash) + VALUES + (%s, %s, 'V', 'S', %s, %s, %s, %s, %s) + ON CONFLICT (sailline, sailline_class, line, line_class, incr) DO UPDATE + SET + incr = EXCLUDED.incr, + ntba = EXCLUDED.ntba, + remarks = COALESCE(EXCLUDED.remarks, ps.remarks), + meta = ps.meta || EXCLUDED.meta, + hash = EXCLUDED.hash; + """ + + # NOTE Consider using cursor.executemany() instead. Then again, + # we're only expecting a few hundred lines at most. + cursor.execute(qry, (sail_line, source_line, incr, ntba, remarks, meta, hash)) + + if filedata is not None: + self.save_file_data(filepath, json.dumps(filedata), cursor) + + self.maybe_commit() + + def save_raw_p190(self, records, fileinfo, filepath, epsg = 0, filedata = None, ntbp = False): """ Save raw P1 data. @@ -627,12 +683,12 @@ class Datastore: if include_archived: qry = """ - SELECT meta + SELECT meta, schema FROM public.projects; """ else: qry = """ - SELECT meta + SELECT meta, schema FROM public.projects WHERE NOT (meta->'archived')::boolean IS true """ @@ -642,7 +698,12 @@ class Datastore: cursor.execute(qry) results = cursor.fetchall() - return [r[0] for r in results if r[0]] + surveys = [] + for r in results: + if r[0]: + r[0]['schema'] = r[1] + surveys.append(r[0]) + return surveys # TODO Does this need tweaking on account of #246? diff --git a/bin/delimited.py b/bin/delimited.py new file mode 100644 index 0000000..0a83a242 --- /dev/null +++ b/bin/delimited.py @@ -0,0 +1,163 @@ +#!/usr/bin/python3 + +""" +Delimited record importing functions. +""" + +import csv +import builtins + +def to_bool (v): + try: + return bool(int(v)) + except ValueError: + if type(v) == str: + return v.strip().lower().startswith("t") + return False + +transform = { + "int": lambda v: builtins.int(float(v)), + "float": float, + "string": str, + "bool": to_bool +} + +def cast_values (row, fields): + + def enum_for (key): + field = fields.get(key, {}) + def enum (val): + if "enum" in field: + ret_val = field.get("default", val) + enums = field.get("enum", []) + for enum_key in enums: + if enum_key == val: + ret_val = enums[enum_key] + return ret_val + return val + return enum + + # Get rid of any unwanted data + if None in row: + del(row[None]) + + for key in row: + + val = row[key] + enum = enum_for(key) + transformer = transform.get(fields.get(key, {}).get("type"), str) + + if type(val) == list: + for i, v in enumerate(val): + row[key][i] = transformer(enum(v)) + elif type(val) == dict: + continue + else: + row[key] = transformer(enum(val)) + return row + +def build_fieldnames (spec): #(arr, key, val): + fieldnames = [] + + if "fields" in spec: + for key in spec["fields"]: + index = spec["fields"][key]["column"] + try: + fieldnames[index] = key + except IndexError: + assert index >= 0 + fieldnames.extend(((index + 1) - len(fieldnames)) * [None]) + fieldnames[index] = key + + return fieldnames + + +def from_file_delimited (path, spec): + + fieldnames = build_fieldnames(spec) + fields = spec.get("fields", []) + delimiter = spec.get("delimiter", ",") + firstRow = spec.get("firstRow", 0) + headerRow = spec.get("headerRow", False) + if headerRow: + firstRow += 1 + + records = [] + with open(path, "r", errors="ignore") as fd: + + if spec.get("type") == "x-sl+csv": + fieldnames = None # Pick from header row + firstRow = 0 + reader = csv.DictReader(fd, delimiter=delimiter) + else: + reader = csv.DictReader(fd, fieldnames=fieldnames, delimiter=delimiter) + + row = 0 + for line in reader: + skip = False + + if row < firstRow: + skip = True + + if not skip: + records.append(cast_values(dict(line), fields)) + + row += 1 + + return records + + +def remap (line, headers): + row = dict() + for i, key in enumerate(headers): + if "." in key[1:-1]: + # This is an object + k, attr = key.split(".") + if not k in row: + row[k] = dict() + row[k][attr] = line[i] + elif key in row: + if type(row[key]) == list: + row[key].append(line[i]) + else: + row[key] = [ row[key], line[i] ] + else: + row[key] = line[i] + return row + +def from_file_saillines (path, spec): + + fields = { + "sail_line": { "type": "int" }, + "source_line": { "type": "int" }, + "incr": { "type": "bool" }, + "ntba": { "type": "bool" } + } + + # fields = spec.get("fields", sl_fields) + delimiter = spec.get("delimiter", ",") + firstRow = spec.get("firstRow", 0) + + records = [] + with open(path, "r", errors="ignore") as fd: + row = 0 + reader = csv.reader(fd, delimiter=delimiter) + while row < firstRow: + next(reader) + row += 1 + headers = [ h.strip() for h in next(reader) if len(h.strip()) ] + + for line in reader: + records.append(cast_values(remap(line, headers), fields)) + + return records + + +def from_file_p111 (path, spec): + pass + +def from_file (path, spec): + if spec.get("type") == "x-sl+csv": + return from_file_saillines(path, spec) + else: + return from_file_delimited(path, spec) diff --git a/bin/fwr.py b/bin/fwr.py new file mode 100644 index 0000000..fe75d50 --- /dev/null +++ b/bin/fwr.py @@ -0,0 +1,126 @@ +#!/usr/bin/python3 + +""" +Fixed width record importing functions. +""" + +import builtins + +def to_bool (v): + try: + return bool(int(v)) + except ValueError: + if type(v) == str: + return v.strip().lower().startswith("t") + return False + +transform = { + "int": lambda v: builtins.int(float(v)), + "float": float, + "string": str, + "str": str, + "bool": to_bool +} + +def parse_line (line, fields, fixed = None): + data = dict() + + if fixed: + for value in fixed: + start = value["offset"] + end = start + len(value["text"]) + text = line[start:end] + if text != value["text"]: + return f"Expected text `{value['text']}` at position {start} but found `{text}` instead." + + for key in fields: + spec = fields[key] + transformer = transform[spec.get("type", "str")] + pos_from = spec["offset"] + pos_to = pos_from + spec["length"] + text = line[pos_from:pos_to] + value = transformer(text) + if "enum" in spec: + if "default" in spec: + value = spec["default"] + for enum_key in spec["enum"]: + if enum_key == text: + enum_value = transformer(spec["enum"][enum_key]) + value = enum_value + break + + data[key] = value + + return data + + +specfields = { + "sps1": { + "line_name": { "offset": 1, "length": 16, "type": "int" }, + "point_number": { "offset": 17, "length": 8, "type": "int" }, + "easting": { "offset": 46, "length": 9, "type": "float" }, + "northing": { "offset": 55, "length": 10, "type": "float" } + }, + "sps21": { + "line_name": { "offset": 1, "length": 7, "type": "int" }, + "point_number": { "offset": 11, "length": 7, "type": "int" }, + "easting": { "offset": 46, "length": 9, "type": "float" }, + "northing": { "offset": 55, "length": 10, "type": "float" } + }, + "p190": { + "line_name": { "offset": 1, "length": 12, "type": "int" }, + "point_number": { "offset": 19, "length": 6, "type": "int" }, + "easting": { "offset": 46, "length": 9, "type": "float" }, + "northing": { "offset": 55, "length": 9, "type": "float" } + }, +} + +def from_file(path, spec): + + # If spec.fields is not present, deduce it from spec.type ("sps1", "sps21", "p190", etc.) + if "fields" in spec: + fields = spec["fields"] + elif "type" in spec and spec["type"] in specfields: + fields = specfields[spec["type"]] + else: + # TODO: Should default to looking for spec.format and doing a legacy import on it + return "Neither 'type' nor 'fields' given. I don't know how to import this fixed-width dataset." + + firstRow = spec.get("firstRow", 0) + + skipStart = [] # Skip lines starting with any of these values + skipMatch = [] # Skip lines matching any of these values + + if "type" in spec: + if spec["type"] == "sps1" or spec["type"] == "sps21" or spec["type"] == "p190": + skipStart = "H" + skipMatch = "EOF" + + records = [] + with open(path, "r", errors="ignore") as fd: + row = 0 + line = fd.readline() + + while line: + skip = False + + if row < firstRow: + skip = True + + if not skip: + for v in skipStart: + if line.startswith(v): + skip = True + break + for v in skipMatch: + if line == v: + skip = True + break + + if not skip: + records.append(parse_line(line, fields)) + + row += 1 + line = fd.readline() + + return records diff --git a/bin/import_final_p111.py b/bin/import_final_p111.py index bc0c146..3513218 100755 --- a/bin/import_final_p111.py +++ b/bin/import_final_p111.py @@ -15,6 +15,7 @@ import re import time import configuration import p111 +import fwr from datastore import Datastore def add_pending_remark(db, sequence): @@ -69,8 +70,12 @@ if __name__ == '__main__': print("No final P1/11 configuration") exit(0) - pattern = final_p111["pattern"] - rx = re.compile(pattern["regex"]) + + lineNameInfo = final_p111.get("lineNameInfo") + pattern = final_p111.get("pattern") + rx = None + if pattern and pattern.get("regex"): + rx = re.compile(pattern["regex"]) if "pending" in survey["final"]: pendingRx = re.compile(survey["final"]["pending"]["pattern"]["regex"]) @@ -98,15 +103,38 @@ if __name__ == '__main__': print("Importing") - match = rx.match(os.path.basename(logical_filepath)) - if not match: - error_message = f"File path not match the expected format! ({logical_filepath} ~ {pattern['regex']})" - print(error_message, file=sys.stderr) - print("This file will be ignored!") - continue + if rx: + match = rx.match(os.path.basename(logical_filepath)) + if not match: + error_message = f"File path not match the expected format! ({logical_filepath} ~ {pattern['regex']})" + print(error_message, file=sys.stderr) + print("This file will be ignored!") + continue - file_info = dict(zip(pattern["captures"], match.groups())) - file_info["meta"] = {} + file_info = dict(zip(pattern["captures"], match.groups())) + file_info["meta"] = {} + + if lineNameInfo: + basename = os.path.basename(physical_filepath) + fields = lineNameInfo.get("fields", {}) + fixed = lineNameInfo.get("fixed") + try: + parsed_line = fwr.parse_line(basename, fields, fixed) + except ValueError as err: + parsed_line = "Line format error: " + str(err) + if type(parsed_line) == str: + print(parsed_line, file=sys.stderr) + print("This file will be ignored!") + continue + + file_info = {} + file_info["sequence"] = parsed_line["sequence"] + file_info["line"] = parsed_line["line"] + del(parsed_line["sequence"]) + del(parsed_line["line"]) + file_info["meta"] = { + "fileInfo": parsed_line + } if pending: print("Skipping / removing final file because marked as PENDING", logical_filepath) diff --git a/bin/import_preplots.py b/bin/import_preplots.py index 10c5c1b..697e24e 100755 --- a/bin/import_preplots.py +++ b/bin/import_preplots.py @@ -15,6 +15,12 @@ import configuration import preplots from datastore import Datastore +def preplots_sorter (preplot): + rank = { + "x-sl+csv": 10 + } + return rank.get(preplot.get("type"), 0) + if __name__ == '__main__': print("Connecting to database") @@ -28,7 +34,10 @@ if __name__ == '__main__': for survey in surveys: print(f'Survey: {survey["id"]} ({survey["schema"]})') db.set_survey(survey["schema"]) - for file in survey["preplots"]: + + # We sort the preplots so that ancillary line info always comes last, + # after the actual line + point data has been imported + for file in sorted(survey["preplots"], key=preplots_sorter): realpath = configuration.translate_path(file["path"]) print(f"Preplot: {file['path']}") @@ -48,7 +57,10 @@ if __name__ == '__main__': if type(preplot) is list: print("Saving to DB") - db.save_preplots(preplot, file["path"], file["class"], survey["epsg"], file) + if file.get("type") == "x-sl+csv": + db.save_preplot_line_info(preplot, file["path"], file) + else: + db.save_preplots(preplot, file["path"], file["class"], survey["epsg"], file) elif type(preplot) is str: print(preplot) else: diff --git a/bin/import_raw_p111.py b/bin/import_raw_p111.py index c6a38ab..4e00dd0 100755 --- a/bin/import_raw_p111.py +++ b/bin/import_raw_p111.py @@ -15,6 +15,7 @@ import re import time import configuration import p111 +import fwr from datastore import Datastore if __name__ == '__main__': @@ -38,8 +39,11 @@ if __name__ == '__main__': print("No raw P1/11 configuration") exit(0) - pattern = raw_p111["pattern"] - rx = re.compile(pattern["regex"]) + lineNameInfo = raw_p111.get("lineNameInfo") + pattern = raw_p111.get("pattern") + rx = None + if pattern and pattern.get("regex"): + rx = re.compile(pattern["regex"]) if "ntbp" in survey["raw"]: ntbpRx = re.compile(survey["raw"]["ntbp"]["pattern"]["regex"]) @@ -68,16 +72,38 @@ if __name__ == '__main__': print("Importing") - match = rx.match(os.path.basename(logical_filepath)) - if not match: - error_message = f"File path not matching the expected format! ({logical_filepath} ~ {pattern['regex']})" - print(error_message, file=sys.stderr) - print("This file will be ignored!") - continue + if rx: + match = rx.match(os.path.basename(logical_filepath)) + if not match: + error_message = f"File path not matching the expected format! ({logical_filepath} ~ {pattern['regex']})" + print(error_message, file=sys.stderr) + print("This file will be ignored!") + continue - file_info = dict(zip(pattern["captures"], match.groups())) - file_info["meta"] = {} + file_info = dict(zip(pattern["captures"], match.groups())) + file_info["meta"] = {} + if lineNameInfo: + basename = os.path.basename(physical_filepath) + fields = lineNameInfo.get("fields", {}) + fixed = lineNameInfo.get("fixed") + try: + parsed_line = fwr.parse_line(basename, fields, fixed) + except ValueError as err: + parsed_line = "Line format error: " + str(err) + if type(parsed_line) == str: + print(parsed_line, file=sys.stderr) + print("This file will be ignored!") + continue + + file_info = {} + file_info["sequence"] = parsed_line["sequence"] + file_info["line"] = parsed_line["line"] + del(parsed_line["sequence"]) + del(parsed_line["line"]) + file_info["meta"] = { + "fileInfo": parsed_line + } p111_data = p111.from_file(physical_filepath) print("Saving") diff --git a/bin/import_smsrc.py b/bin/import_smsrc.py index b3fd4e7..7f91e81 100755 --- a/bin/import_smsrc.py +++ b/bin/import_smsrc.py @@ -15,6 +15,7 @@ import re import time import configuration import smsrc +import fwr from datastore import Datastore if __name__ == '__main__': @@ -33,17 +34,21 @@ if __name__ == '__main__': db.set_survey(survey["schema"]) try: - raw_smsrc = survey["raw"]["smsrc"] + raw_smsrc = survey["raw"]["source"]["smsrc"]["header"] except KeyError: print("No SmartSource data configuration") continue - flags = 0 - if "flags" in raw_smsrc: - configuration.rxflags(raw_smsrc["flags"]) + # NOTE I've no idea what this is šŸ¤” + # flags = 0 + # if "flags" in raw_smsrc: + # configuration.rxflags(raw_smsrc["flags"]) - pattern = raw_smsrc["pattern"] - rx = re.compile(pattern["regex"], flags) + lineNameInfo = raw_smsrc.get("lineNameInfo") + pattern = raw_smsrc.get("pattern") + rx = None + if pattern and pattern.get("regex"): + rx = re.compile(pattern["regex"]) for fileprefix in raw_smsrc["paths"]: realprefix = configuration.translate_path(fileprefix) @@ -64,14 +69,39 @@ if __name__ == '__main__': print("Importing") - match = rx.match(os.path.basename(logical_filepath)) - if not match: - error_message = f"File path not matching the expected format! ({logical_filepath} ~ {pattern['regex']})" - print(error_message, file=sys.stderr) - print("This file will be ignored!") - continue + if rx: + match = rx.match(os.path.basename(logical_filepath)) + if not match: + error_message = f"File path not matching the expected format! ({logical_filepath} ~ {pattern['regex']})" + print(error_message, file=sys.stderr) + print("This file will be ignored!") + continue - file_info = dict(zip(pattern["captures"], match.groups())) + file_info = dict(zip(pattern["captures"], match.groups())) + file_info["meta"] = {} + + + if lineNameInfo: + basename = os.path.basename(physical_filepath) + fields = lineNameInfo.get("fields", {}) + fixed = lineNameInfo.get("fixed") + try: + parsed_line = fwr.parse_line(basename, fields, fixed) + except ValueError as err: + parsed_line = "Line format error: " + str(err) + if type(parsed_line) == str: + print(parsed_line, file=sys.stderr) + print("This file will be ignored!") + continue + + file_info = {} + file_info["sequence"] = parsed_line["sequence"] + file_info["line"] = parsed_line["line"] + del(parsed_line["sequence"]) + del(parsed_line["line"]) + file_info["meta"] = { + "fileInfo": parsed_line + } smsrc_records = smsrc.from_file(physical_filepath) diff --git a/bin/p111.py b/bin/p111.py index cc36575..646fe7d 100644 --- a/bin/p111.py +++ b/bin/p111.py @@ -7,7 +7,6 @@ P1/11 parsing functions. import math import re from datetime import datetime, timedelta, timezone -from parse_fwr import parse_fwr def _int (string): return int(float(string)) diff --git a/bin/parse_fwr.py b/bin/parse_fwr.py deleted file mode 100644 index 77d3b4e..0000000 --- a/bin/parse_fwr.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/python3 - -def parse_fwr (string, widths, start=0): - """Parse a fixed-width record. - - string: the string to parse. - widths: a list of record widths. A negative width denotes a field to be skipped. - start: optional start index. - - Returns a list of strings. - """ - results = [] - current_index = start - for width in widths: - if width > 0: - results.append(string[current_index : current_index + width]) - current_index += width - else: - current_index -= width - - return results diff --git a/bin/preplots.py b/bin/preplots.py index 58be431..8f2de1a 100644 --- a/bin/preplots.py +++ b/bin/preplots.py @@ -1,15 +1,51 @@ -import sps +import fwr +import delimited """ Preplot importing functions. """ + +def is_fixed_width (file): + fixed_width_types = [ "sps1", "sps21", "p190", "fixed-width" ] + return type(file) == dict and "type" in file and file["type"] in fixed_width_types + +def is_delimited (file): + delimited_types = [ "csv", "p111", "x-sl+csv" ] + return type(file) == dict and "type" in file and file["type"] in delimited_types + def from_file (file, realpath = None): + """ + Return a list of dicts, where each dict has the structure: + { + "line_name": , + "points": [ + { + "line_name": , + "point_number": , + "easting": , + "northing": + }, + … + ] + } + On error, return a string describing the error condition. + """ + filepath = realpath or file["path"] - if not "type" in file or file["type"] == "sps": - records = sps.from_file(filepath, file["format"] if "format" in file else None ) + if is_fixed_width(file): + records = fwr.from_file(filepath, file) + elif is_delimited(file): + records = delimited.from_file(filepath, file) else: - return "Not an SPS file" + return "Unrecognised file format" + + if type(records) == str: + # This is an error message + return records + + if file.get("type") == "x-sl+csv": + return records lines = [] line_names = set([r["line_name"] for r in records]) diff --git a/bin/sps.py b/bin/sps.py deleted file mode 100644 index e0c33e3..0000000 --- a/bin/sps.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/python3 - -""" -SPS importing functions. - -And by SPS, we mean more or less any line-delimited, fixed-width record format. -""" - -import builtins -from parse_fwr import parse_fwr - -def int (v): - return builtins.int(float(v)) - -def parse_line (string, spec): - """Parse a line from an SPS file.""" - names = spec["names"] - widths = spec["widths"] - normalisers = spec["normalisers"] - record = [ t[0](t[1]) for t in zip(normalisers, parse_fwr(string, widths)) ] - return dict(zip(names, record)) - -def from_file(path, spec = None): - if spec is None: - spec = { - "names": [ "line_name", "point_number", "easting", "northing" ], - "widths": [ -1, 10, 10, -25, 10, 10 ], - "normalisers": [ int, int, float, float ] - } - else: - normaliser_tokens = [ "int", "float", "str", "bool" ] - spec["normalisers"] = [ eval(t) for t in spec["types"] if t in normaliser_tokens ] - - records = [] - with open(path) as fd: - cnt = 0 - line = fd.readline() - while line: - cnt = cnt+1 - - if line == "EOF": - break - - record = parse_line(line, spec) - if record is not None: - records.append(record) - - line = fd.readline() - - del spec["normalisers"] - return records diff --git a/etc/db/database-version.sql b/etc/db/database-version.sql index d4530ce..15ae342 100644 --- a/etc/db/database-version.sql +++ b/etc/db/database-version.sql @@ -1,5 +1,5 @@ \connect dougal -INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.2"}') +INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.5"}') ON CONFLICT (key) DO UPDATE - SET value = public.info.value || '{"db_schema": "0.4.2"}' WHERE public.info.key = 'version'; + SET value = public.info.value || '{"db_schema": "0.4.5"}' WHERE public.info.key = 'version'; diff --git a/etc/db/schema-template.sql b/etc/db/schema-template.sql index 0f6ed68..02726e7 100644 --- a/etc/db/schema-template.sql +++ b/etc/db/schema-template.sql @@ -399,6 +399,62 @@ $$; ALTER FUNCTION _SURVEY__TEMPLATE_.clear_shot_qc() OWNER TO postgres; +-- +-- Name: event_log_uid_seq; Type: SEQUENCE; Schema: _SURVEY__TEMPLATE_; Owner: postgres +-- + +CREATE SEQUENCE _SURVEY__TEMPLATE_.event_log_uid_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE _SURVEY__TEMPLATE_.event_log_uid_seq OWNER TO postgres; + +SET default_tablespace = ''; + +SET default_table_access_method = heap; + +-- +-- Name: event_log_full; Type: TABLE; Schema: _SURVEY__TEMPLATE_; Owner: postgres +-- + +CREATE TABLE _SURVEY__TEMPLATE_.event_log_full ( + uid integer DEFAULT nextval('_SURVEY__TEMPLATE_.event_log_uid_seq'::regclass) NOT NULL, + id integer NOT NULL, + tstamp timestamp with time zone, + sequence integer, + point integer, + remarks text DEFAULT ''::text NOT NULL, + labels text[] DEFAULT ARRAY[]::text[] NOT NULL, + meta jsonb DEFAULT '{}'::jsonb NOT NULL, + validity tstzrange NOT NULL, + CONSTRAINT event_log_full_check CHECK ((((tstamp IS NOT NULL) AND (sequence IS NOT NULL) AND (point IS NOT NULL)) OR ((tstamp IS NOT NULL) AND (sequence IS NULL) AND (point IS NULL)) OR ((tstamp IS NULL) AND (sequence IS NOT NULL) AND (point IS NOT NULL)))), + CONSTRAINT event_log_full_validity_check CHECK ((NOT isempty(validity))) +); + + +ALTER TABLE _SURVEY__TEMPLATE_.event_log_full OWNER TO postgres; + +-- +-- Name: event_log_changes(timestamp with time zone); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres +-- + +CREATE FUNCTION _SURVEY__TEMPLATE_.event_log_changes(ts0 timestamp with time zone) RETURNS SETOF _SURVEY__TEMPLATE_.event_log_full + LANGUAGE sql + AS $$ + SELECT * + FROM event_log_full + WHERE lower(validity) > ts0 OR upper(validity) IS NOT NULL AND upper(validity) > ts0 + ORDER BY lower(validity); + $$; + + +ALTER FUNCTION _SURVEY__TEMPLATE_.event_log_changes(ts0 timestamp with time zone) OWNER TO postgres; + -- -- Name: event_log_full_insert(); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres -- @@ -881,46 +937,6 @@ $$; ALTER FUNCTION _SURVEY__TEMPLATE_.ij_error(line double precision, point double precision, geom public.geometry) OWNER TO postgres; --- --- Name: event_log_uid_seq; Type: SEQUENCE; Schema: _SURVEY__TEMPLATE_; Owner: postgres --- - -CREATE SEQUENCE _SURVEY__TEMPLATE_.event_log_uid_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - -ALTER TABLE _SURVEY__TEMPLATE_.event_log_uid_seq OWNER TO postgres; - -SET default_tablespace = ''; - -SET default_table_access_method = heap; - --- --- Name: event_log_full; Type: TABLE; Schema: _SURVEY__TEMPLATE_; Owner: postgres --- - -CREATE TABLE _SURVEY__TEMPLATE_.event_log_full ( - uid integer DEFAULT nextval('_SURVEY__TEMPLATE_.event_log_uid_seq'::regclass) NOT NULL, - id integer NOT NULL, - tstamp timestamp with time zone, - sequence integer, - point integer, - remarks text DEFAULT ''::text NOT NULL, - labels text[] DEFAULT ARRAY[]::text[] NOT NULL, - meta jsonb DEFAULT '{}'::jsonb NOT NULL, - validity tstzrange NOT NULL, - CONSTRAINT event_log_full_check CHECK ((((tstamp IS NOT NULL) AND (sequence IS NOT NULL) AND (point IS NOT NULL)) OR ((tstamp IS NOT NULL) AND (sequence IS NULL) AND (point IS NULL)) OR ((tstamp IS NULL) AND (sequence IS NOT NULL) AND (point IS NOT NULL)))), - CONSTRAINT event_log_full_validity_check CHECK ((NOT isempty(validity))) -); - - -ALTER TABLE _SURVEY__TEMPLATE_.event_log_full OWNER TO postgres; - -- -- Name: event_log; Type: VIEW; Schema: _SURVEY__TEMPLATE_; Owner: postgres -- @@ -1519,9 +1535,9 @@ CREATE VIEW _SURVEY__TEMPLATE_.final_lines_summary AS s.ts1, (s.ts1 - s.ts0) AS duration, s.num_points, - ( SELECT count(*) AS count - FROM _SURVEY__TEMPLATE_.missing_sequence_final_points - WHERE missing_sequence_final_points.sequence = s.sequence) AS missing_shots, + (( SELECT count(*) AS count + FROM _SURVEY__TEMPLATE_.preplot_points + WHERE ((preplot_points.line = fl.line) AND (((preplot_points.point >= s.fsp) AND (preplot_points.point <= s.lsp)) OR ((preplot_points.point >= s.lsp) AND (preplot_points.point <= s.fsp))))) - s.num_points) AS missing_shots, s.length, s.azimuth, fl.remarks, @@ -2077,10 +2093,10 @@ CREATE VIEW _SURVEY__TEMPLATE_.preplot_summary AS ALTER TABLE _SURVEY__TEMPLATE_.preplot_summary OWNER TO postgres; -- --- Name: project_summary; Type: VIEW; Schema: _SURVEY__TEMPLATE_; Owner: postgres +-- Name: project_summary; Type: MATERIALIZED VIEW; Schema: _SURVEY__TEMPLATE_; Owner: postgres -- -CREATE VIEW _SURVEY__TEMPLATE_.project_summary AS +CREATE MATERIALIZED VIEW _SURVEY__TEMPLATE_.project_summary AS WITH fls AS ( SELECT avg((final_lines_summary.duration / ((final_lines_summary.num_points - 1))::double precision)) AS shooting_rate, avg((final_lines_summary.length / date_part('epoch'::text, final_lines_summary.duration))) AS speed, @@ -2123,7 +2139,8 @@ CREATE VIEW _SURVEY__TEMPLATE_.project_summary AS fls.speed AS shooting_rate FROM _SURVEY__TEMPLATE_.preplot_summary ps, fls, - project; + project + WITH NO DATA; ALTER TABLE _SURVEY__TEMPLATE_.project_summary OWNER TO postgres; @@ -2168,9 +2185,9 @@ CREATE VIEW _SURVEY__TEMPLATE_.raw_lines_summary AS (s.ts1 - s.ts0) AS duration, s.num_points, s.num_preplots, - (SELECT count(*) AS count - FROM _SURVEY__TEMPLATE_.missing_sequence_raw_points - WHERE missing_sequence_raw_points.sequence = s.sequence) AS missing_shots, + (( SELECT count(*) AS count + FROM _SURVEY__TEMPLATE_.preplot_points + WHERE ((preplot_points.line = rl.line) AND (((preplot_points.point >= s.fsp) AND (preplot_points.point <= s.lsp)) OR ((preplot_points.point >= s.lsp) AND (preplot_points.point <= s.fsp))))) - s.num_preplots) AS missing_shots, s.length, s.azimuth, rl.remarks, diff --git a/etc/db/upgrades/upgrade33-v0.5.0-sailline-ancillary-data.sql b/etc/db/upgrades/upgrade33-v0.5.0-sailline-ancillary-data.sql new file mode 100644 index 0000000..303d0b4 --- /dev/null +++ b/etc/db/upgrades/upgrade33-v0.5.0-sailline-ancillary-data.sql @@ -0,0 +1,164 @@ +-- Sailline ancillary data +-- +-- New schema version: 0.5.0 +-- +-- ATTENTION: +-- +-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT. +-- +-- +-- NOTE: This upgrade affects all schemas in the database. +-- NOTE: Each application starts a transaction, which must be committed +-- or rolled back. +-- +-- Issue #264 calls for associating sail and acquisition lines as well +-- as indicating expected acquisition direction, and other data which +-- cannot be provided via standard import formats such as SPS or P1/90. +-- +-- We support this via an additional table that holds most of the required +-- data. This data can simply be inferred from regular preplots, e.g., line +-- direction can be deduced from preplot point order, and sail / source +-- line offsets can be taken from P1/90 headers or from a configuration +-- parameter. Alternatively, and in preference, the data can be provided +-- explicitly, which is what issue #264 asks for. +-- +-- In principle, this makes at least some of the attributes of `preplot_lines` +-- redundant (at least `incr` and `ntba`) but we will leave them there for +-- the time being as technical debt. +-- +-- To apply, run as the dougal user: +-- +-- psql <>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version'; + + IF current_db_version >= '0.5.0' THEN + RAISE EXCEPTION + USING MESSAGE='Patch already applied'; + END IF; + + IF current_db_version != '0.4.5' THEN + RAISE EXCEPTION + USING MESSAGE='Invalid database version: ' || current_db_version, + HINT='Ensure all previous patches have been applied.'; + END IF; + + FOR row IN + SELECT schema_name FROM information_schema.schemata + WHERE schema_name LIKE 'survey_%' + ORDER BY schema_name + LOOP + CALL pg_temp.upgrade_survey_schema(row.schema_name); + END LOOP; +END; +$outer$ LANGUAGE plpgsql; + +CALL pg_temp.upgrade(); + +CALL pg_temp.show_notice('Cleaning up'); +DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text); +DROP PROCEDURE pg_temp.upgrade (); + +CALL pg_temp.show_notice('Updating db_schema version'); +INSERT INTO public.info VALUES ('version', '{"db_schema": "0.5.0"}') +ON CONFLICT (key) DO UPDATE + SET value = public.info.value || '{"db_schema": "0.5.0"}' WHERE public.info.key = 'version'; + + +CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes'); +DROP PROCEDURE pg_temp.show_notice (notice text); + +-- +--NOTE Run `COMMIT;` now if all went well +-- diff --git a/etc/db/upgrades/upgrade34-v0.5.1-fix-sequences-detail-view.sql b/etc/db/upgrades/upgrade34-v0.5.1-fix-sequences-detail-view.sql new file mode 100644 index 0000000..7c1966e --- /dev/null +++ b/etc/db/upgrades/upgrade34-v0.5.1-fix-sequences-detail-view.sql @@ -0,0 +1,119 @@ +-- Sailline ancillary data +-- +-- New schema version: 0.5.1 +-- +-- ATTENTION: +-- +-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT. +-- +-- +-- NOTE: This upgrade affects all schemas in the database. +-- NOTE: Each application starts a transaction, which must be committed +-- or rolled back. +-- +-- The sequences_detail view wrongly associates source lines and shot +-- points when it should be associating saillines and shot points instead. +-- +-- This updates fixes that issue (#307). +-- +-- To apply, run as the dougal user: +-- +-- psql <>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version'; + + IF current_db_version >= '0.5.1' THEN + RAISE EXCEPTION + USING MESSAGE='Patch already applied'; + END IF; + + IF current_db_version != '0.5.0' THEN + RAISE EXCEPTION + USING MESSAGE='Invalid database version: ' || current_db_version, + HINT='Ensure all previous patches have been applied.'; + END IF; + + FOR row IN + SELECT schema_name FROM information_schema.schemata + WHERE schema_name LIKE 'survey_%' + ORDER BY schema_name + LOOP + CALL pg_temp.upgrade_survey_schema(row.schema_name); + END LOOP; +END; +$outer$ LANGUAGE plpgsql; + +CALL pg_temp.upgrade(); + +CALL pg_temp.show_notice('Cleaning up'); +DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text); +DROP PROCEDURE pg_temp.upgrade (); + +CALL pg_temp.show_notice('Updating db_schema version'); +INSERT INTO public.info VALUES ('version', '{"db_schema": "0.5.1"}') +ON CONFLICT (key) DO UPDATE + SET value = public.info.value || '{"db_schema": "0.5.1"}' WHERE public.info.key = 'version'; + + +CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes'); +DROP PROCEDURE pg_temp.show_notice (notice text); + +-- +--NOTE Run `COMMIT;` now if all went well +-- diff --git a/etc/db/upgrades/upgrade35-v0.5.2-fix-preplot_lines_summary-view.sql b/etc/db/upgrades/upgrade35-v0.5.2-fix-preplot_lines_summary-view.sql new file mode 100644 index 0000000..9617e59 --- /dev/null +++ b/etc/db/upgrades/upgrade35-v0.5.2-fix-preplot_lines_summary-view.sql @@ -0,0 +1,142 @@ +-- Fix preplot_lines_summary view +-- +-- New schema version: 0.5.2 +-- +-- ATTENTION: +-- +-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT. +-- +-- +-- NOTE: This upgrade affects all schemas in the database. +-- NOTE: Each application starts a transaction, which must be committed +-- or rolled back. +-- +-- Following introduction of `preplot_saillines` (0.5.0), the incr and +-- ntba statuses are stored in a separate table, not in `preplot_lines` +-- (TODO: a future upgrade should remove those columns from `preplot_lines`) +-- +-- Now any views referencing `incr` and `ntba` must be updated to point to +-- the new location of those attributes. +-- +-- This update fixes #312. +-- +-- To apply, run as the dougal user: +-- +-- psql <>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version'; + + IF current_db_version >= '0.5.2' THEN + RAISE EXCEPTION + USING MESSAGE='Patch already applied'; + END IF; + + IF current_db_version != '0.5.1' THEN + RAISE EXCEPTION + USING MESSAGE='Invalid database version: ' || current_db_version, + HINT='Ensure all previous patches have been applied.'; + END IF; + + FOR row IN + SELECT schema_name FROM information_schema.schemata + WHERE schema_name LIKE 'survey_%' + ORDER BY schema_name + LOOP + CALL pg_temp.upgrade_survey_schema(row.schema_name); + END LOOP; +END; +$outer$ LANGUAGE plpgsql; + +CALL pg_temp.upgrade(); + +CALL pg_temp.show_notice('Cleaning up'); +DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text); +DROP PROCEDURE pg_temp.upgrade (); + +CALL pg_temp.show_notice('Updating db_schema version'); +INSERT INTO public.info VALUES ('version', '{"db_schema": "0.5.2"}') +ON CONFLICT (key) DO UPDATE + SET value = public.info.value || '{"db_schema": "0.5.2"}' WHERE public.info.key = 'version'; + + +CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes'); +DROP PROCEDURE pg_temp.show_notice (notice text); + +-- +--NOTE Run `COMMIT;` now if all went well +-- diff --git a/etc/db/upgrades/upgrade36-v0.5.3-fix-final_lines_summary-view.sql b/etc/db/upgrades/upgrade36-v0.5.3-fix-final_lines_summary-view.sql new file mode 100644 index 0000000..441161a --- /dev/null +++ b/etc/db/upgrades/upgrade36-v0.5.3-fix-final_lines_summary-view.sql @@ -0,0 +1,132 @@ +-- Fix final_lines_summary view +-- +-- New schema version: 0.5.3 +-- +-- ATTENTION: +-- +-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT. +-- +-- +-- NOTE: This upgrade affects all schemas in the database. +-- NOTE: Each application starts a transaction, which must be committed +-- or rolled back. +-- +-- This fixes a long-standing bug, where if the sail and source lines are +-- the same, the number of missing shots will be miscounted. +-- +-- This update fixes #313. +-- +-- To apply, run as the dougal user: +-- +-- psql <= s.fsp AND preplot_points.point <= s.lsp OR preplot_points.point >= s.lsp AND preplot_points.point <= s.fsp))) - s.num_preplots AS missing_shots, + s.length, + s.azimuth, + fl.remarks, + fl.meta + FROM summary s + JOIN final_lines fl USING (sequence); + + ALTER TABLE final_lines_summary + OWNER TO postgres; + +END; +$outer$ LANGUAGE plpgsql; + +CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$ +DECLARE + row RECORD; + current_db_version TEXT; +BEGIN + + SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version'; + + IF current_db_version >= '0.5.3' THEN + RAISE EXCEPTION + USING MESSAGE='Patch already applied'; + END IF; + + IF current_db_version != '0.5.2' THEN + RAISE EXCEPTION + USING MESSAGE='Invalid database version: ' || current_db_version, + HINT='Ensure all previous patches have been applied.'; + END IF; + + FOR row IN + SELECT schema_name FROM information_schema.schemata + WHERE schema_name LIKE 'survey_%' + ORDER BY schema_name + LOOP + CALL pg_temp.upgrade_survey_schema(row.schema_name); + END LOOP; +END; +$outer$ LANGUAGE plpgsql; + +CALL pg_temp.upgrade(); + +CALL pg_temp.show_notice('Cleaning up'); +DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text); +DROP PROCEDURE pg_temp.upgrade (); + +CALL pg_temp.show_notice('Updating db_schema version'); +INSERT INTO public.info VALUES ('version', '{"db_schema": "0.5.3"}') +ON CONFLICT (key) DO UPDATE + SET value = public.info.value || '{"db_schema": "0.5.3"}' WHERE public.info.key = 'version'; + + +CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes'); +DROP PROCEDURE pg_temp.show_notice (notice text); + +-- +--NOTE Run `COMMIT;` now if all went well +-- diff --git a/lib/www/client/source/package-lock.json b/lib/www/client/source/package-lock.json index 8f0f191..7f919b5 100644 --- a/lib/www/client/source/package-lock.json +++ b/lib/www/client/source/package-lock.json @@ -10,7 +10,9 @@ "license": "UNLICENSED", "dependencies": { "@mdi/font": "^7.2.96", + "buffer": "^6.0.3", "core-js": "^3.6.5", + "csv-parse": "^5.5.2", "d3": "^7.0.1", "jwt-decode": "^3.0.0", "leaflet": "^1.7.1", @@ -26,7 +28,8 @@ "vue-debounce": "^2.6.0", "vue-router": "^3.5.1", "vuetify": "^2.5.0", - "vuex": "^3.6.2" + "vuex": "^3.6.2", + "yaml": "^2.3.4" }, "devDependencies": { "@babel/plugin-proposal-logical-assignment-operators": "^7.14.5", @@ -3457,7 +3460,6 @@ "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "dev": true, "funding": [ { "type": "github", @@ -3508,6 +3510,30 @@ "readable-stream": "^3.4.0" } }, + "node_modules/bl/node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, "node_modules/bluebird": { "version": "3.7.2", "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", @@ -3647,10 +3673,9 @@ } }, "node_modules/buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "dev": true, + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", "funding": [ { "type": "github", @@ -3667,7 +3692,7 @@ ], "dependencies": { "base64-js": "^1.3.1", - "ieee754": "^1.1.13" + "ieee754": "^1.2.1" } }, "node_modules/buffer-from": { @@ -3752,9 +3777,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001559", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001559.tgz", - "integrity": "sha512-cPiMKZgqgkg5LY3/ntGeLFUpi6tzddBNS58A4tnTgQw1zON7u2sZMU7SzOeVH4tj20++9ggL+V6FDOFMTaFFYA==", + "version": "1.0.30001726", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001726.tgz", + "integrity": "sha512-VQAUIUzBiZ/UnlM28fSp2CRF3ivUn1BWEvxMcVTNwpw91Py1pGbPIyIKtd+tzct9C3ouceCVdGAXxZOpZAsgdw==", "dev": true, "funding": [ { @@ -3769,7 +3794,8 @@ "type": "github", "url": "https://github.com/sponsors/ai" } - ] + ], + "license": "CC-BY-4.0" }, "node_modules/case-sensitive-paths-webpack-plugin": { "version": "2.4.0", @@ -4283,6 +4309,15 @@ "node": ">=10" } }, + "node_modules/cosmiconfig/node_modules/yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, "node_modules/cross-spawn": { "version": "6.0.5", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", @@ -4620,6 +4655,15 @@ "postcss": "^8.2.15" } }, + "node_modules/cssnano/node_modules/yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, "node_modules/csso": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/csso/-/csso-4.2.0.tgz", @@ -4632,6 +4676,11 @@ "node": ">=8.0.0" } }, + "node_modules/csv-parse": { + "version": "5.5.2", + "resolved": "https://registry.npmjs.org/csv-parse/-/csv-parse-5.5.2.tgz", + "integrity": "sha512-YRVtvdtUNXZCMyK5zd5Wty1W6dNTpGKdqQd4EQ8tl/c6KW1aMBB1Kg1ppky5FONKmEqGJ/8WjLlTNLPne4ioVA==" + }, "node_modules/d3": { "version": "7.3.0", "resolved": "https://registry.npmjs.org/d3/-/d3-7.3.0.tgz", @@ -6448,7 +6497,6 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "dev": true, "funding": [ { "type": "github", @@ -11341,12 +11389,11 @@ "dev": true }, "node_modules/yaml": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", - "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", - "dev": true, + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz", + "integrity": "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==", "engines": { - "node": ">= 6" + "node": ">= 14" } }, "node_modules/yargs": { @@ -13920,8 +13967,7 @@ "base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "dev": true + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" }, "batch": { "version": "0.6.1", @@ -13950,6 +13996,18 @@ "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" + }, + "dependencies": { + "buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "requires": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + } } }, "bluebird": { @@ -14060,13 +14118,12 @@ } }, "buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "dev": true, + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", "requires": { "base64-js": "^1.3.1", - "ieee754": "^1.1.13" + "ieee754": "^1.2.1" } }, "buffer-from": { @@ -14133,9 +14190,9 @@ } }, "caniuse-lite": { - "version": "1.0.30001559", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001559.tgz", - "integrity": "sha512-cPiMKZgqgkg5LY3/ntGeLFUpi6tzddBNS58A4tnTgQw1zON7u2sZMU7SzOeVH4tj20++9ggL+V6FDOFMTaFFYA==", + "version": "1.0.30001726", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001726.tgz", + "integrity": "sha512-VQAUIUzBiZ/UnlM28fSp2CRF3ivUn1BWEvxMcVTNwpw91Py1pGbPIyIKtd+tzct9C3ouceCVdGAXxZOpZAsgdw==", "dev": true }, "case-sensitive-paths-webpack-plugin": { @@ -14518,6 +14575,14 @@ "parse-json": "^5.0.0", "path-type": "^4.0.0", "yaml": "^1.10.0" + }, + "dependencies": { + "yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "dev": true + } } }, "cross-spawn": { @@ -14710,6 +14775,14 @@ "cssnano-preset-default": "^5.2.14", "lilconfig": "^2.0.3", "yaml": "^1.10.2" + }, + "dependencies": { + "yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "dev": true + } } }, "cssnano-preset-default": { @@ -14765,6 +14838,11 @@ "css-tree": "^1.1.2" } }, + "csv-parse": { + "version": "5.5.2", + "resolved": "https://registry.npmjs.org/csv-parse/-/csv-parse-5.5.2.tgz", + "integrity": "sha512-YRVtvdtUNXZCMyK5zd5Wty1W6dNTpGKdqQd4EQ8tl/c6KW1aMBB1Kg1ppky5FONKmEqGJ/8WjLlTNLPne4ioVA==" + }, "d3": { "version": "7.3.0", "resolved": "https://registry.npmjs.org/d3/-/d3-7.3.0.tgz", @@ -16108,8 +16186,7 @@ "ieee754": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "dev": true + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==" }, "ignore": { "version": "5.2.4", @@ -19700,10 +19777,9 @@ "dev": true }, "yaml": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", - "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", - "dev": true + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz", + "integrity": "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==" }, "yargs": { "version": "16.2.0", diff --git a/lib/www/client/source/package.json b/lib/www/client/source/package.json index 004831a..36aa669 100644 --- a/lib/www/client/source/package.json +++ b/lib/www/client/source/package.json @@ -8,7 +8,9 @@ }, "dependencies": { "@mdi/font": "^7.2.96", + "buffer": "^6.0.3", "core-js": "^3.6.5", + "csv-parse": "^5.5.2", "d3": "^7.0.1", "jwt-decode": "^3.0.0", "leaflet": "^1.7.1", @@ -24,7 +26,8 @@ "vue-debounce": "^2.6.0", "vue-router": "^3.5.1", "vuetify": "^2.5.0", - "vuex": "^3.6.2" + "vuex": "^3.6.2", + "yaml": "^2.3.4" }, "devDependencies": { "@babel/plugin-proposal-logical-assignment-operators": "^7.14.5", diff --git a/lib/www/client/source/src/components/app-bar-extension-project-list.vue b/lib/www/client/source/src/components/app-bar-extension-project-list.vue new file mode 100644 index 0000000..fb2b252 --- /dev/null +++ b/lib/www/client/source/src/components/app-bar-extension-project-list.vue @@ -0,0 +1,85 @@ + + + diff --git a/lib/www/client/source/src/components/app-bar-extension-project.vue b/lib/www/client/source/src/components/app-bar-extension-project.vue index 5ac8907..5bf35c7 100644 --- a/lib/www/client/source/src/components/app-bar-extension-project.vue +++ b/lib/www/client/source/src/components/app-bar-extension-project.vue @@ -1,6 +1,15 @@ @@ -35,6 +44,7 @@ export default { return this.tabs.findIndex(t => t.href == this.page); }, + ...mapGetters(["adminaccess"]) }, methods: { diff --git a/lib/www/client/source/src/components/decoder/delimited-string-decoder-field.vue b/lib/www/client/source/src/components/decoder/delimited-string-decoder-field.vue new file mode 100644 index 0000000..7f714f0 --- /dev/null +++ b/lib/www/client/source/src/components/decoder/delimited-string-decoder-field.vue @@ -0,0 +1,82 @@ + + + + + diff --git a/lib/www/client/source/src/components/decoder/delimited-string-decoder.vue b/lib/www/client/source/src/components/decoder/delimited-string-decoder.vue new file mode 100644 index 0000000..960afa8 --- /dev/null +++ b/lib/www/client/source/src/components/decoder/delimited-string-decoder.vue @@ -0,0 +1,366 @@ + + + + + diff --git a/lib/www/client/source/src/components/decoder/fixed-string-decoder-field.vue b/lib/www/client/source/src/components/decoder/fixed-string-decoder-field.vue new file mode 100644 index 0000000..b9b3f84 --- /dev/null +++ b/lib/www/client/source/src/components/decoder/fixed-string-decoder-field.vue @@ -0,0 +1,140 @@ + + + + + diff --git a/lib/www/client/source/src/components/decoder/fixed-string-decoder.vue b/lib/www/client/source/src/components/decoder/fixed-string-decoder.vue new file mode 100644 index 0000000..6386da8 --- /dev/null +++ b/lib/www/client/source/src/components/decoder/fixed-string-decoder.vue @@ -0,0 +1,486 @@ + + + + + diff --git a/lib/www/client/source/src/components/decoder/fixed-string-text.vue b/lib/www/client/source/src/components/decoder/fixed-string-text.vue new file mode 100644 index 0000000..dafe495 --- /dev/null +++ b/lib/www/client/source/src/components/decoder/fixed-string-text.vue @@ -0,0 +1,122 @@ + + + + + diff --git a/lib/www/client/source/src/components/decoder/saillines-string-decoder.vue b/lib/www/client/source/src/components/decoder/saillines-string-decoder.vue new file mode 100644 index 0000000..b4cdf21 --- /dev/null +++ b/lib/www/client/source/src/components/decoder/saillines-string-decoder.vue @@ -0,0 +1,301 @@ + + + + + diff --git a/lib/www/client/source/src/components/encoder/fixed-string-encoder-field.vue b/lib/www/client/source/src/components/encoder/fixed-string-encoder-field.vue new file mode 100644 index 0000000..8a2b43b --- /dev/null +++ b/lib/www/client/source/src/components/encoder/fixed-string-encoder-field.vue @@ -0,0 +1,269 @@ + + + + + diff --git a/lib/www/client/source/src/components/encoder/fixed-string-encoder-sample.vue b/lib/www/client/source/src/components/encoder/fixed-string-encoder-sample.vue new file mode 100644 index 0000000..02ac914 --- /dev/null +++ b/lib/www/client/source/src/components/encoder/fixed-string-encoder-sample.vue @@ -0,0 +1,351 @@ + + + + + diff --git a/lib/www/client/source/src/components/encoder/fixed-string-encoder.vue b/lib/www/client/source/src/components/encoder/fixed-string-encoder.vue new file mode 100644 index 0000000..f38fd11 --- /dev/null +++ b/lib/www/client/source/src/components/encoder/fixed-string-encoder.vue @@ -0,0 +1,307 @@ + + + + + diff --git a/lib/www/client/source/src/components/fields/field-content-dialog.vue b/lib/www/client/source/src/components/fields/field-content-dialog.vue new file mode 100644 index 0000000..70e94e0 --- /dev/null +++ b/lib/www/client/source/src/components/fields/field-content-dialog.vue @@ -0,0 +1,109 @@ + + + diff --git a/lib/www/client/source/src/components/fields/field-content.vue b/lib/www/client/source/src/components/fields/field-content.vue new file mode 100644 index 0000000..682c9d7 --- /dev/null +++ b/lib/www/client/source/src/components/fields/field-content.vue @@ -0,0 +1,242 @@ + + + diff --git a/lib/www/client/source/src/components/file-browser/file-browser-dialog.vue b/lib/www/client/source/src/components/file-browser/file-browser-dialog.vue new file mode 100644 index 0000000..52a03fa --- /dev/null +++ b/lib/www/client/source/src/components/file-browser/file-browser-dialog.vue @@ -0,0 +1,83 @@ + + + diff --git a/lib/www/client/source/src/components/file-browser/file-browser.vue b/lib/www/client/source/src/components/file-browser/file-browser.vue new file mode 100644 index 0000000..2e578ee --- /dev/null +++ b/lib/www/client/source/src/components/file-browser/file-browser.vue @@ -0,0 +1,150 @@ + + + diff --git a/lib/www/client/source/src/components/json-builder/json-builder.vue b/lib/www/client/source/src/components/json-builder/json-builder.vue new file mode 100644 index 0000000..ed3bdca --- /dev/null +++ b/lib/www/client/source/src/components/json-builder/json-builder.vue @@ -0,0 +1,557 @@ + + + diff --git a/lib/www/client/source/src/components/json-builder/property-dialog.vue b/lib/www/client/source/src/components/json-builder/property-dialog.vue new file mode 100644 index 0000000..db5de0c --- /dev/null +++ b/lib/www/client/source/src/components/json-builder/property-dialog.vue @@ -0,0 +1,125 @@ + + + diff --git a/lib/www/client/source/src/components/project-settings/asaqc.vue b/lib/www/client/source/src/components/project-settings/asaqc.vue new file mode 100644 index 0000000..e0fb85d --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/asaqc.vue @@ -0,0 +1,130 @@ + + + diff --git a/lib/www/client/source/src/components/project-settings/binning.vue b/lib/www/client/source/src/components/project-settings/binning.vue new file mode 100644 index 0000000..88e536f --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/binning.vue @@ -0,0 +1,196 @@ + + + diff --git a/lib/www/client/source/src/components/project-settings/file-matching-parameters.vue b/lib/www/client/source/src/components/project-settings/file-matching-parameters.vue new file mode 100644 index 0000000..86b3e4b --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/file-matching-parameters.vue @@ -0,0 +1,312 @@ + + + + + diff --git a/lib/www/client/source/src/components/project-settings/file-path.vue b/lib/www/client/source/src/components/project-settings/file-path.vue new file mode 100644 index 0000000..a392147 --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/file-path.vue @@ -0,0 +1,101 @@ + + + diff --git a/lib/www/client/source/src/components/project-settings/geodetics.vue b/lib/www/client/source/src/components/project-settings/geodetics.vue new file mode 100644 index 0000000..ebacebc --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/geodetics.vue @@ -0,0 +1,94 @@ + + + diff --git a/lib/www/client/source/src/components/project-settings/groups.vue b/lib/www/client/source/src/components/project-settings/groups.vue new file mode 100644 index 0000000..4b8bde7 --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/groups.vue @@ -0,0 +1,157 @@ + + + diff --git a/lib/www/client/source/src/components/project-settings/input-final-p111.vue b/lib/www/client/source/src/components/project-settings/input-final-p111.vue new file mode 100644 index 0000000..bad6553 --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/input-final-p111.vue @@ -0,0 +1,201 @@ + + + + + diff --git a/lib/www/client/source/src/components/project-settings/input-final-pending.vue b/lib/www/client/source/src/components/project-settings/input-final-pending.vue new file mode 100644 index 0000000..7193348 --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/input-final-pending.vue @@ -0,0 +1,115 @@ + + + diff --git a/lib/www/client/source/src/components/project-settings/input-raw-ntbp.vue b/lib/www/client/source/src/components/project-settings/input-raw-ntbp.vue new file mode 100644 index 0000000..7d81227 --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/input-raw-ntbp.vue @@ -0,0 +1,115 @@ + + + diff --git a/lib/www/client/source/src/components/project-settings/input-raw-p111.vue b/lib/www/client/source/src/components/project-settings/input-raw-p111.vue new file mode 100644 index 0000000..18ed0e5 --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/input-raw-p111.vue @@ -0,0 +1,202 @@ + + + + + diff --git a/lib/www/client/source/src/components/project-settings/input-smartsource-header.vue b/lib/www/client/source/src/components/project-settings/input-smartsource-header.vue new file mode 100644 index 0000000..6f6c491 --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/input-smartsource-header.vue @@ -0,0 +1,137 @@ + + + + + diff --git a/lib/www/client/source/src/components/project-settings/input-smartsource-segy.vue b/lib/www/client/source/src/components/project-settings/input-smartsource-segy.vue new file mode 100644 index 0000000..17e12ac --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/input-smartsource-segy.vue @@ -0,0 +1,138 @@ + + + + + diff --git a/lib/www/client/source/src/components/project-settings/name-id-geodetics.vue b/lib/www/client/source/src/components/project-settings/name-id-geodetics.vue new file mode 100644 index 0000000..cfd9da5 --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/name-id-geodetics.vue @@ -0,0 +1,94 @@ + + + diff --git a/lib/www/client/source/src/components/project-settings/name-id-rootpath.vue b/lib/www/client/source/src/components/project-settings/name-id-rootpath.vue new file mode 100644 index 0000000..43ead5f --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/name-id-rootpath.vue @@ -0,0 +1,101 @@ + + + diff --git a/lib/www/client/source/src/components/project-settings/name-id.vue b/lib/www/client/source/src/components/project-settings/name-id.vue new file mode 100644 index 0000000..554cb45 --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/name-id.vue @@ -0,0 +1,100 @@ + + + diff --git a/lib/www/client/source/src/components/project-settings/not-implemented.vue b/lib/www/client/source/src/components/project-settings/not-implemented.vue new file mode 100644 index 0000000..9488eb7 --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/not-implemented.vue @@ -0,0 +1,65 @@ + + + diff --git a/lib/www/client/source/src/components/project-settings/online-line-name-format.vue b/lib/www/client/source/src/components/project-settings/online-line-name-format.vue new file mode 100644 index 0000000..03efedc --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/online-line-name-format.vue @@ -0,0 +1,113 @@ + + + + + diff --git a/lib/www/client/source/src/components/project-settings/planner.vue b/lib/www/client/source/src/components/project-settings/planner.vue new file mode 100644 index 0000000..72393ff --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/planner.vue @@ -0,0 +1,132 @@ + + + diff --git a/lib/www/client/source/src/components/project-settings/preplots-preplot.vue b/lib/www/client/source/src/components/project-settings/preplots-preplot.vue new file mode 100644 index 0000000..f36fed3 --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/preplots-preplot.vue @@ -0,0 +1,520 @@ + + + diff --git a/lib/www/client/source/src/components/project-settings/preplots.vue b/lib/www/client/source/src/components/project-settings/preplots.vue new file mode 100644 index 0000000..a55ec14 --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/preplots.vue @@ -0,0 +1,214 @@ + + + + + diff --git a/lib/www/client/source/src/components/project-settings/production.vue b/lib/www/client/source/src/components/project-settings/production.vue new file mode 100644 index 0000000..1ae1a90 --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/production.vue @@ -0,0 +1,103 @@ + + + diff --git a/lib/www/client/source/src/components/project-settings/regex-pattern-captures.vue b/lib/www/client/source/src/components/project-settings/regex-pattern-captures.vue new file mode 100644 index 0000000..944f6ef --- /dev/null +++ b/lib/www/client/source/src/components/project-settings/regex-pattern-captures.vue @@ -0,0 +1,117 @@ + + + + + diff --git a/lib/www/client/source/src/lib/deepMerge.js b/lib/www/client/source/src/lib/deepMerge.js new file mode 120000 index 0000000..0efaea4 --- /dev/null +++ b/lib/www/client/source/src/lib/deepMerge.js @@ -0,0 +1 @@ +../../../../server/lib/utils/deepMerge.js \ No newline at end of file diff --git a/lib/www/client/source/src/lib/hsl.js b/lib/www/client/source/src/lib/hsl.js new file mode 100644 index 0000000..a3e342f --- /dev/null +++ b/lib/www/client/source/src/lib/hsl.js @@ -0,0 +1,47 @@ +/** Return an HSL colour as a function of an input value + * `str`. + * + * Consider using as getHSL.bind(this) in Vue components + * in order to get access to the Vuetify theme configuration. + */ +function getHSL (str, saturation = 1, lightness = 0.25, offset = 0) { + + function getHash (v) { + if (typeof (v??false)[Symbol.iterator] != "function") { + // Not an iterable, make it one + v = String(v); + } + + return Math.abs([...v, ..." "].reduce( (acc, cur) => String(cur).charCodeAt(0) + ((acc << 5) - acc), 0 )); + } + + const h = (getHash(str) + offset) % 360; + const s = saturation * 100; + const l = this?.$vuetify?.theme?.isDark + ? (1-lightness) * 100 + : lightness * 100; + + return {h, s, l}; + +} + +/** Return a CSS hsl() or hsla() colour + * representation as a function of an input value. + * + * Consider using as getHSLColourFor.bind(this) – See + * note for getHSL() above. + */ +function getHSLColourFor (str, opacity = 1, saturation, lightness, offset) { + const _getHSL = getHSL.bind(this); + const {h, s, l} = _getHSL(str, saturation, lightness, offset); + if (opacity == 1) { + return `hsl(${h},${s}%,${l}%)`; + } else { + return `hsla(${h},${s}%,${l}%, ${opacity})`; + } +} + +export { + getHSL, + getHSLColourFor +} diff --git a/lib/www/client/source/src/lib/truncate-text.js b/lib/www/client/source/src/lib/truncate-text.js new file mode 100644 index 0000000..554190b --- /dev/null +++ b/lib/www/client/source/src/lib/truncate-text.js @@ -0,0 +1,10 @@ + +function truncateText (text, length=20) { + if (text?.length <= length) { + return text; + } else { + return text.slice(0, length/2)+"…"+text.slice(-(length/2)); + } +} + +export default truncateText; diff --git a/lib/www/client/source/src/lib/utils.js b/lib/www/client/source/src/lib/utils.js index 5ca7aa1..a88095c 100644 --- a/lib/www/client/source/src/lib/utils.js +++ b/lib/www/client/source/src/lib/utils.js @@ -93,25 +93,25 @@ function geometryAsString (item, opts = {}) { } /** Extract preferences by prefix. - * + * * This function returns a lambda which, given * a key or a prefix, extracts the relevant * preferences from the designated preferences * store. - * + * * For instance, assume preferences = { * "a.b.c.d": 1, * "a.b.e.f": 2, * "g.h": 3 * } - * + * * And Ī» = preferencesĪ»(preferences). Then: - * + * * Ī»("a.b") → { "a.b.c.d": 1, "a.b.e.f": 2 } * Ī»("a.b.e.f") → { "a.b.e.f": 2 } * Ī»("g.x", {"g.x.": 99}) → { "g.x.": 99 } * Ī»("a.c", {"g.x.": 99}) → { "g.x.": 99 } - * + * * Note from the last two examples that a default value * may be provided and will be returned if a key does * not exist or is not searched for. @@ -133,9 +133,109 @@ function preferencesĪ» (preferences) { } +/** Compare two possibly complex values for + * loose equality, going as deep as required in the + * case of complex objects. + */ +function deepCompare (a, b) { + if (typeof a == "object" && typeof b == "object") { + return !Object.entries(a).some( ([k, v]) => !deepCompare(v, b[k])) && + !Object.entries(b).some( ([k, v]) => !deepCompare(v, a[k])); + } else { + return a == b; + } +} + +/** Compare two possibly complex values for + * strict equality. + */ +function deepEqual (a, b) { + if (typeof a === "object" && typeof b === "object") { + return !Object.entries(a).some( ([k, v]) => !deepEqual(v, b[k])) && + !Object.entries(b).some( ([k, v]) => !deepEqual(v, a[k])); + } else { + return a === b; + } +} + +/** Traverses an object and sets a nested value. + * + * Example: + * + * const obj = {a: {b: {c: "X"} } } + * deepSet(obj, ["a", "b", "c"], "d") + * → {a: {b: {c: "d"} } } + * + * This would be the equivalent of: + * + * obj?.a?.b?.c = "d"; + * + * Except that the above is not a legal expression. + * + * If a non-leaf property does not exist, this function + * creates it as an empty object ({}) and keeps traversing. + * + * The last member of `path` may be `null`, in which case, + * if the object pointed to by the next to last member is + * an array, an insert operation will take place. + * + */ +function deepSet (obj, path, value) { + const key = path.shift(); + if (!path.length) { + if (key === null && Array.isArray(obj)) { + obj.push(value); + } else { + obj[key] = value; + } + } else { + if (!Object.hasOwn(obj, key)) { + obj[key] = {}; + } + deepSet(obj[key], path, value); + } +} + +/** Returns a nested property. + * + * Example: + * + * const obj = {a: {b: {c: "d"} } } + * deepSet(obj, ["a", "b", "c"]) + * → "d" + * + * If `path` is known in advance, this is effectively + * the same as: + * + * obj?.a?.b?.c + * + * This might be useful when `path` is dynamic. + */ +function deepValue (obj, path) { + if (obj !== undefined) { + const key = path.shift(); + if (!path.length) { + if (key === undefined) { + return obj; + } else { + return obj[key]; + } + } else { + return deepValue(obj[key], path); + } + } +} + +// Just to have all the deep*()s in one place +import deepMerge from './deepMerge' export { withParentProps, geometryAsString, - preferencesĪ» + preferencesĪ», + deepMerge, + deepCompare, + deepEqual, + deepSet, + deepValue } diff --git a/lib/www/client/source/src/lib/watcher-mixin.js b/lib/www/client/source/src/lib/watcher-mixin.js new file mode 100644 index 0000000..f0ab525 --- /dev/null +++ b/lib/www/client/source/src/lib/watcher-mixin.js @@ -0,0 +1,14 @@ +import { deepCompare } from './utils'; + +function setIfDifferent(propsLocals) { + return Object.fromEntries(Object.entries(propsLocals).map( ([prop, local]) => [ + local, + () => { + if (!deepCompare(this[prop], this[local])){ + this[local] = structuredClone(this[prop]); + } + } + ])); +} + +export default setIfDifferent; diff --git a/lib/www/client/source/src/router/index.js b/lib/www/client/source/src/router/index.js index 199eae7..4cc2ddb 100644 --- a/lib/www/client/source/src/router/index.js +++ b/lib/www/client/source/src/router/index.js @@ -16,7 +16,9 @@ import Log from '../views/Log.vue' import QC from '../views/QC.vue' import Graphs from '../views/Graphs.vue' import Map from '../views/Map.vue' +import ProjectSettings from '../views/ProjectSettings.vue' import DougalAppBarExtensionProject from '../components/app-bar-extension-project' +import DougalAppBarExtensionProjectList from '../components/app-bar-extension-project-list' Vue.use(VueRouter) @@ -80,7 +82,10 @@ Vue.use(VueRouter) meta: { breadcrumbs: [ { text: "Projects", href: "/projects", disabled: true } - ] + ], + appBarExtension: { + // component: DougalAppBarExtensionProjectList + } } }, { @@ -168,6 +173,11 @@ Vue.use(VueRouter) path: "map", name: "map", component: Map + }, + { + path: "configuration", + name: "configuration", + component: ProjectSettings } ] } diff --git a/lib/www/client/source/src/views/Log.vue b/lib/www/client/source/src/views/Log.vue index 7fe91d4..111f5c3 100644 --- a/lib/www/client/source/src/views/Log.vue +++ b/lib/www/client/source/src/views/Log.vue @@ -488,6 +488,9 @@ export default { rows () { const rows = {}; this.items + .filter(i => { + return !this.$route.params.sequence || (this.$route.params.sequence == i.sequence) + }) .filter(i => { for (const label of this.filterableLabels) { if (!this.shownLabels.includes(label) && i.labels.includes(label)) { diff --git a/lib/www/client/source/src/views/ProjectList.vue b/lib/www/client/source/src/views/ProjectList.vue index 9fd7a82..da7d2e9 100644 --- a/lib/www/client/source/src/views/ProjectList.vue +++ b/lib/www/client/source/src/views/ProjectList.vue @@ -6,6 +6,7 @@ :items="displayItems" :options.sync="options" :loading="loading" + @contextmenu:row="contextMenu" > + + @@ -54,6 +62,38 @@ + + + + mdi-file-document-edit-outline + Edit project settings + + + + mdi-sheep + Clone project + + + + + + + + + @@ -65,10 +105,15 @@ td p:last-of-type { diff --git a/lib/www/client/source/src/views/SequenceList.vue b/lib/www/client/source/src/views/SequenceList.vue index 56163f9..6e0bcbc 100644 --- a/lib/www/client/source/src/views/SequenceList.vue +++ b/lib/www/client/source/src/views/SequenceList.vue @@ -712,7 +712,11 @@ export default { line: this.contextMenuItem.line, fsp: sp0, lsp: sp1, - remarks: `Reshoot of sequence ${this.contextMenuItem.sequence}.` + remarks: `Reshoot of sequence ${this.contextMenuItem.sequence}.`, + meta: { + is_reshoot: true, + original_sequence: this.contextMenuItem.sequence + } } console.log("Plan", payload); const url = `/project/${this.$route.params.project}/plan`; diff --git a/lib/www/client/source/vue.config.js b/lib/www/client/source/vue.config.js index 0a24ebd..075e22f 100644 --- a/lib/www/client/source/vue.config.js +++ b/lib/www/client/source/vue.config.js @@ -1,3 +1,5 @@ +const webpack = require('webpack'); + module.exports = { "transpileDependencies": [ "vuetify", @@ -41,6 +43,13 @@ module.exports = { path: require.resolve("path-browserify") } }, + plugins: [ + // Work around for Buffer is undefined: + // https://github.com/webpack/changelog-v5/issues/10 + new webpack.ProvidePlugin({ + Buffer: ['buffer', 'Buffer'], + }) + ], module: { rules: [ { diff --git a/lib/www/server/api/index.js b/lib/www/server/api/index.js index 3065a0b..1f86ac3 100644 --- a/lib/www/server/api/index.js +++ b/lib/www/server/api/index.js @@ -114,6 +114,7 @@ app.map({ '/project/:project/configuration': { get: [ mw.project.configuration.get ], // Get project configuration patch: [ mw.auth.access.admin, mw.project.configuration.patch ], // Modify project configuration + put: [ mw.auth.access.admin, mw.project.configuration.put ], // Overwrite configuration }, /* @@ -184,6 +185,17 @@ app.map({ delete: [ mw.auth.access.write, mw.plan.delete ] }, + /* + * Line name endpoints + */ + + '/project/:project/linename': { + post: [ mw.linename.post ], // Get a linename + }, + '/project/:project/linename/properties': { + get: [ mw.linename.properties.get ], // Get linename properties + }, + /* * Event log endpoints */ @@ -311,6 +323,9 @@ app.map({ } } }, + '/diagnostics/': { + get: [ mw.auth.access.write, mw.etag.noSave, mw.admin.diagnostics.get ] + }, '/rss/': { get: [ mw.rss.get ] } @@ -361,7 +376,9 @@ app.use(function (err, req, res, next) { }); app.get("*", (req, res, next) => { - res.status(404).send({status: 404, message: "This endpoint does not exist"}); + if (!res.headersSent) { + res.status(404).send({status: 404, message: "This endpoint does not exist"}); + } }); app.disable('x-powered-by'); diff --git a/lib/www/server/api/middleware/admin/diagnostics/get.js b/lib/www/server/api/middleware/admin/diagnostics/get.js new file mode 100644 index 0000000..28d6e1b --- /dev/null +++ b/lib/www/server/api/middleware/admin/diagnostics/get.js @@ -0,0 +1,17 @@ + +const diagnostics = require('../../../../lib/diagnostics'); + +module.exports = async function (req, res, next) { + + try { + const d = await diagnostics(); + if (req.user?.role != "admin" && req.user?.role != "user") { + } + res.status(200).json(d); + } catch (err) { + next(err); + return; + } + next(); + +}; diff --git a/lib/www/server/api/middleware/admin/diagnostics/index.js b/lib/www/server/api/middleware/admin/diagnostics/index.js new file mode 100644 index 0000000..fe5c66b --- /dev/null +++ b/lib/www/server/api/middleware/admin/diagnostics/index.js @@ -0,0 +1,4 @@ + +module.exports = { + get: require('./get') +} diff --git a/lib/www/server/api/middleware/admin/index.js b/lib/www/server/api/middleware/admin/index.js new file mode 100644 index 0000000..622d97a --- /dev/null +++ b/lib/www/server/api/middleware/admin/index.js @@ -0,0 +1,3 @@ +module.exports = { + diagnostics: require('./diagnostics') +}; diff --git a/lib/www/server/api/middleware/index.js b/lib/www/server/api/middleware/index.js index c765936..645179b 100644 --- a/lib/www/server/api/middleware/index.js +++ b/lib/www/server/api/middleware/index.js @@ -3,6 +3,7 @@ module.exports = { files: require('./files'), plan: require('./plan'), line: require('./line'), + linename: require('./linename'), project: require('./project'), sequence: require('./sequence'), user: require('./user'), @@ -18,5 +19,6 @@ module.exports = { openapi: require('./openapi'), rss: require('./rss'), etag: require('./etag'), - version: require('./version') + version: require('./version'), + admin: require('./admin') }; diff --git a/lib/www/server/api/middleware/linename/index.js b/lib/www/server/api/middleware/linename/index.js new file mode 100644 index 0000000..0f5e59b --- /dev/null +++ b/lib/www/server/api/middleware/linename/index.js @@ -0,0 +1,4 @@ +module.exports = { + properties: require('./properties'), + post: require('./post'), +}; diff --git a/lib/www/server/api/middleware/linename/post.js b/lib/www/server/api/middleware/linename/post.js new file mode 100644 index 0000000..b95cdae --- /dev/null +++ b/lib/www/server/api/middleware/linename/post.js @@ -0,0 +1,21 @@ + +const { linename } = require('../../../lib/db/linename'); + +module.exports = async function (req, res, next) { + + try { + const payload = req.body; + + const line = await linename.post(req.params.project, payload); + if (line) { + res.status(200).type("text/plain").send(line); + } else { + res.status(404).send(); + } + next(); + } catch (err) { + next(err); + } + + +}; diff --git a/lib/www/server/api/middleware/linename/properties/get.js b/lib/www/server/api/middleware/linename/properties/get.js new file mode 100644 index 0000000..b951bba --- /dev/null +++ b/lib/www/server/api/middleware/linename/properties/get.js @@ -0,0 +1,21 @@ + +const { linename } = require('../../../../lib/db'); + +module.exports = async function (req, res, next) { + + try { + const payload = req.body; + + const properties = await linename.properties.get(req.params.project, payload); + if (properties) { + res.status(200).send(properties); + } else { + res.status(404).send(); + } + next(); + } catch (err) { + next(err); + } + + +}; diff --git a/lib/www/server/api/middleware/linename/properties/index.js b/lib/www/server/api/middleware/linename/properties/index.js new file mode 100644 index 0000000..14c855b --- /dev/null +++ b/lib/www/server/api/middleware/linename/properties/index.js @@ -0,0 +1,3 @@ +module.exports = { + get: require('./get'), +}; diff --git a/lib/www/server/api/middleware/project/configuration/index.js b/lib/www/server/api/middleware/project/configuration/index.js index 6b40791..821d84c 100644 --- a/lib/www/server/api/middleware/project/configuration/index.js +++ b/lib/www/server/api/middleware/project/configuration/index.js @@ -2,7 +2,7 @@ module.exports = { get: require('./get'), // post: require('./post'), - // put: require('./put'), + put: require('./put'), patch: require('./patch'), // delete: require('./delete'), }; diff --git a/lib/www/server/api/middleware/project/configuration/put.js b/lib/www/server/api/middleware/project/configuration/put.js new file mode 100644 index 0000000..082877b --- /dev/null +++ b/lib/www/server/api/middleware/project/configuration/put.js @@ -0,0 +1,16 @@ + +const { project } = require('../../../../lib/db'); + +module.exports = async function (req, res, next) { + + try { + // TODO + // Implement If-Match header requirements + res.send(await project.configuration.put(req.params.project, req.body)); + next(); + } catch (err) { + next(err); + } + + +}; diff --git a/lib/www/server/lib/db/index.js b/lib/www/server/lib/db/index.js index 2748564..ae7c2f0 100644 --- a/lib/www/server/lib/db/index.js +++ b/lib/www/server/lib/db/index.js @@ -2,6 +2,7 @@ module.exports = { project: require('./project'), line: require('./line'), + linename: require('./linename'), sequence: require('./sequence'), event: require('./event'), plan: require('./plan'), diff --git a/lib/www/server/lib/db/linename/index.js b/lib/www/server/lib/db/linename/index.js new file mode 100644 index 0000000..0f5e59b --- /dev/null +++ b/lib/www/server/lib/db/linename/index.js @@ -0,0 +1,4 @@ +module.exports = { + properties: require('./properties'), + post: require('./post'), +}; diff --git a/lib/www/server/lib/db/linename/post.js b/lib/www/server/lib/db/linename/post.js new file mode 100644 index 0000000..faed019 --- /dev/null +++ b/lib/www/server/lib/db/linename/post.js @@ -0,0 +1,39 @@ +const { setSurvey, transaction } = require('../connection'); +const lib = require('../plan/lib'); + +async function post (projectId, payload, opts = {}) { + + const client = await setSurvey(projectId); + try { + + if (!payload.sequence) { + payload.sequence = await lib.getSequence(client); + } + // if (!payload.ts0 || !payload.ts1) { + // const ts = await lib.getTimestamps(client, projectId, payload); + // if (!payload.ts0) { + // payload.ts0 = ts.ts0; + // } + // if (!payload.ts1) { + // payload.ts1 = ts.ts1; + // } + // } + const name = await lib.getLineName(client, projectId, payload); + + return name; + } catch (err) { + if (err.code && Math.trunc(err.code/1000) == 23) { + // Class 23 — Integrity Constraint Violation + console.error(err); + throw { status: 400, message: "Malformed request" }; + } else { + throw err; + } + } finally { + client.release(); + } + + return; +} + +module.exports = post; diff --git a/lib/www/server/lib/db/linename/properties/get.js b/lib/www/server/lib/db/linename/properties/get.js new file mode 100644 index 0000000..661bd61 --- /dev/null +++ b/lib/www/server/lib/db/linename/properties/get.js @@ -0,0 +1,15 @@ +const lib = require('../../plan/lib'); + +async function get (projectId, payload, opts = {}) { + + try { + + return await lib.getLineNameProperties(); + + } catch (err) { + throw err; + } + +} + +module.exports = get; diff --git a/lib/www/server/lib/db/linename/properties/index.js b/lib/www/server/lib/db/linename/properties/index.js new file mode 100644 index 0000000..14c855b --- /dev/null +++ b/lib/www/server/lib/db/linename/properties/index.js @@ -0,0 +1,3 @@ +module.exports = { + get: require('./get'), +}; diff --git a/lib/www/server/lib/db/plan/lib/index.js b/lib/www/server/lib/db/plan/lib/index.js index abcf369..a78872a 100644 --- a/lib/www/server/lib/db/plan/lib/index.js +++ b/lib/www/server/lib/db/plan/lib/index.js @@ -1,6 +1,12 @@ +const YAML = require('yaml'); +const fs = require('fs').promises; +const path = require('path'); + const alert = require("../../../alerts"); const configuration = require('../../configuration'); +let lineNameProperties; + async function getDistance (client, payload) { const text = ` SELECT ST_Distance(pp0.geometry, pp1.geometry) distance @@ -88,8 +94,6 @@ async function getPlanned (client) { async function getLineName (client, projectId, payload) { - // FIXME TODO Get line name script from configuration - // Ref.: https://gitlab.com/wgp/dougal/software/-/issues/129 // This is to monitor #165 // https://gitlab.com/wgp/dougal/software/-/issues/incident/165 @@ -97,6 +101,36 @@ async function getLineName (client, projectId, payload) { alert({function: "getLineName", client, projectId, payload}); } + const lineNameBuilder = await configuration.get(projectId, "online/line/lineNameBuilder"); + const fields = lineNameBuilder?.fields; + + if (fields) { + const properties = await getLineNameProperties(); + const values = await getLineNameValues(client, projectId, payload, lineNameBuilder?.values); + return buildLineName(properties, fields, values, payload?.name); + } else { + // TODO send a user notification via WS to let them know + // they haven't configured the line name parameters + } + + // return undefined +} + +/** Get line properties that go into making a line name. + * + * The properties are defined in a separate YAML file for + * convenience. + */ +async function getLineNameProperties () { + if (!lineNameProperties) { + const buffer = await fs.readFile(path.join(__dirname, 'linename-properties.yaml')); + lineNameProperties = YAML.parse(buffer.toString()); + } + + return lineNameProperties; +} + +async function getLineNameValues (client, projectId, payload, otherValues = {}) { const planned = await getPlanned(client); const previous = await getSequencesForLine(client, payload.line); const attempt = planned.filter(r => r.line == payload.line).concat(previous).length; @@ -104,9 +138,79 @@ async function getLineName (client, projectId, payload) { const incr = p.lsp > p.fsp; const sequence = p.sequence || 1; const line = p.line; - return `${incr?"1":"2"}0${line}${attempt}${sequence.toString().padStart(3, "0")}S00000`; + + return { + ...structuredClone(otherValues), + line_number: payload.line, + sequence_number: payload.sequence || 1, + original_sequence: payload.meta?.original_sequence, + pass_number: attempt, + is_prime: attempt == 0, + is_reshoot: payload.meta?.is_reshoot ?? (!payload.meta?.is_infill && attempt > 0), + is_infill: payload.meta?.is_infill ?? false, + direction: null, // TODO + is_incrementing: incr + }; } +/** Compute the string representation of a line name field + */ +function fieldValue (properties, field, values) { + let value; + + if (field.item == "text") { + value = field.value; + } else if (properties[field.item]?.type == "boolean") { + if (values[field.item] === field.when) { + value = field.value; + } + } else { + value = values[field.item]; + } + + if (value != null) { + + if (properties[field.item]?.type == "number") { + if (field.scale_multiplier != null) { + value *= field.scale_multiplier; + } + if (field.scale_offset != null) { + value += field.scale_offset; + } + + if (field.format == "integer") { + value = Math.round(value); + } + } + + value = String(value); + if (field.pad_side == "left") { + value = value.padStart(field.length, field.pad_string ?? " "); + } else if (field.pad_side == "right") { + value = value.padEnd(field.length, field.pad_string ?? " "); + } + + return value; + } +} + + +/** Build a line name out of its component properties, fields and values. + * + * NOTE: This is the same function as available client-side on + * `fixed-string-encoder.vue`. Consider merging them. + */ +function buildLineName (properties, fields, values, str = "") { + const length = fields.reduce( (acc, cur) => (cur.offset + cur.length) > acc ? (cur.offset + cur.length) : acc, str.length ) + str = str.padEnd(length); + for (const field of fields) { + const value = fieldValue(properties, field, values); + if (value != null) { + str = str.slice(0, field.offset) + value + str.slice(field.offset + field.length); + } + } + return str; +} module.exports = { getDistance, @@ -114,5 +218,8 @@ module.exports = { getTimestamps, getSequencesForLine, getPlanned, - getLineName + getLineNameProperties, + getLineNameValues, + getLineName, + buildLineName }; diff --git a/lib/www/server/lib/db/plan/lib/linename-properties.yaml b/lib/www/server/lib/db/plan/lib/linename-properties.yaml new file mode 100644 index 0000000..0c442cb --- /dev/null +++ b/lib/www/server/lib/db/plan/lib/linename-properties.yaml @@ -0,0 +1,51 @@ +# +# These are the properties that can be used to build +# line names. +# + +line_number: + summary: Line number + description: The sailline number that is to be acquired + type: number + format: integer +sequence_number: + summary: Sequence + description: The sequence number that will be assigned to this line + type: number + format: integer +original_sequence: + summary: Original sequence + description: The original sequence number of the line that is being reshot + type: number + format: integer +pass_number: + summary: Pass number + description: The number of times this line, or section of line, has been shot + type: number + format: integer +is_prime: + summary: Prime line + description: Whether this is the first time this line is being acquired + type: boolean +is_reshoot: + summary: Reshoot + description: Whether this is a reshoot (mutually exclusive with `is_prime` and `is_infill`) + type: boolean +is_infill: + summary: Infill line + description: Whether this is an infill line (mutually exclusive with `is_prime` and `is_reshoot`) + type: boolean +direction: + summary: Line azimuth + direction: The line azimuth in the Incrementing shotpoints direction + type: number + format: float +is_incrementing: + summary: Incrementing + description: Whether the line is being shot low to high point numbers or vice versa + type: boolean +text: + summary: Fixed text + description: Arbitrary user-entered text (line prefix, suffix, etc.) + type: text + diff --git a/lib/www/server/lib/db/project/configuration/index.js b/lib/www/server/lib/db/project/configuration/index.js index 6b40791..821d84c 100644 --- a/lib/www/server/lib/db/project/configuration/index.js +++ b/lib/www/server/lib/db/project/configuration/index.js @@ -2,7 +2,7 @@ module.exports = { get: require('./get'), // post: require('./post'), - // put: require('./put'), + put: require('./put'), patch: require('./patch'), // delete: require('./delete'), }; diff --git a/lib/www/server/lib/db/project/configuration/put.js b/lib/www/server/lib/db/project/configuration/put.js new file mode 100644 index 0000000..ace06cc --- /dev/null +++ b/lib/www/server/lib/db/project/configuration/put.js @@ -0,0 +1,58 @@ +const { setSurvey } = require('../../connection'); +const { deepMerge, removeNulls } = require('../../../utils'); +const { modify } = require('../create'); + + +async function put (projectId, payload, opts = {}) { + let client; + try { + client = await setSurvey(); // Use public schema + + const text = ` + SELECT meta + FROM projects + WHERE pid = $1; + `; + + const res = await client.query(text, [projectId]); + + const source = res.rows[0].meta; + + if (!source) { + throw { status: 404, message: "Not found" }; + } + + console.log("PAYLOAD ID", payload.id, typeof payload); + if (("id" in payload) && (projectId.toLowerCase() != payload.id.toLowerCase())) { + throw { + status: 422, + message: "Project ID cannot be changed in this Dougal version" + } + } + + if (("name" in payload) && source.name && (source.name != payload.name)) { + throw { + status: 422, + message: "Project name cannot be changed in this Dougal version" + } + } + + // We do not allow users to change the schema + delete payload.schema; + + const dest = removeNulls(payload); + await modify(projectId, dest); + return dest; + + } catch (err) { + if (err.code == "42P01") { + throw { status: 404, message: "Not found" }; + } else { + throw err; + } + } finally { + client.release(); + } +} + +module.exports = put; diff --git a/lib/www/server/lib/diagnostics.js b/lib/www/server/lib/diagnostics.js new file mode 100644 index 0000000..1f6f181 --- /dev/null +++ b/lib/www/server/lib/diagnostics.js @@ -0,0 +1,102 @@ +const os = require('os'); +const { statfs } = require('fs').promises; +const { pool } = require('./db/connection'); +const cfg = require('./config'); +const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename); + + +/** Return filesystem statistics + */ +async function df (fs="/") { + const s = await statfs(fs); + + if (s) { + const total = (s.bsize * s.blocks); // bytes + const free = (s.bfree * s.bsize); + const available = (s.bavail * s.bsize); + const used = total - free; + const usedPercent = used/total * 100 + return { + total, + free, + available, + used, + usedPercent + } + } +} + +/** Return the size of the Dougal database + */ +async function dbSize () { + const client = await pool.connect(); + let res; + try { + res = (await client.query("SELECT pg_database_size(current_database()) size;"))?.rows[0]; + } catch (err) { + ERROR(err); + } finally { + client.release(); + } + return res; +} + +async function dbSchemaSizes () { + const text = ` + SELECT pid, + (sum(table_size)::bigint) size, + ((sum(table_size) / pg_database_size(current_database())) * 100) percent + FROM ( + SELECT pg_catalog.pg_namespace.nspname as schema_name, + pg_relation_size(pg_catalog.pg_class.oid) as table_size + FROM pg_catalog.pg_class + JOIN pg_catalog.pg_namespace ON relnamespace = pg_catalog.pg_namespace.oid + ) t + JOIN public.projects p ON schema_name = p.schema + GROUP BY pid + ORDER BY pid + `; + + const client = await pool.connect(); + let res; + try { + res = (await client.query(text))?.rows; + } catch (err) { + ERROR(err); + } finally { + client.release(); + } + return res; +} + +async function diagnostics () { + const paths = cfg._("global.imports.paths") ?? {}; + const data = {}; + for (path in paths) { + data[path] = await df(paths[path]); + } + const res = { + hostname: os.hostname(), + memory: { + total: os.totalmem(), + free: os.freemem() + }, + uptime: os.uptime(), + loadavg: os.loadavg(), + networkInterfaces: os.networkInterfaces(), + cpus: os.cpus(), + storage: { + root: await df("/"), + data + }, + database: { + ...(await dbSize()), + projects: Object.fromEntries((await dbSchemaSizes()).map(row => [ row.pid, {size: row.size, percent: row.percent} ])) + } + }; + + return res; +} + + +module.exports = diagnostics; diff --git a/lib/www/server/lib/utils/FormatTimestamp.js b/lib/www/server/lib/utils/FormatTimestamp.js new file mode 100644 index 0000000..1ed936f --- /dev/null +++ b/lib/www/server/lib/utils/FormatTimestamp.js @@ -0,0 +1,13 @@ + + +function FormatTimestamp (str) { + const d = new Date(str); + if (isNaN(d)) { + return str; + } else { + // Get rid of milliseconds + return d.toISOString().substring(0,19)+"Z"; + } +} + +module.exports = FormatTimestamp; diff --git a/lib/www/server/lib/utils/deep.js b/lib/www/server/lib/utils/deep.js new file mode 100644 index 0000000..f33ce56 --- /dev/null +++ b/lib/www/server/lib/utils/deep.js @@ -0,0 +1,168 @@ + +/** Compare two possibly complex values for + * loose equality, going as deep as required in the + * case of complex objects. + */ +function deepCompare (a, b) { + if (typeof a == "object" && typeof b == "object") { + return !Object.entries(a).some( ([k, v]) => !deepCompare(v, b[k])) && + !Object.entries(b).some( ([k, v]) => !deepCompare(v, a[k])); + } else { + return a == b; + } +} + + +/** Compare two possibly complex values for + * strict equality. + */ +function deepEqual (a, b) { + if (typeof a === "object" && typeof b === "object") { + return !Object.entries(a).some( ([k, v]) => !deepEqual(v, b[k])) && + !Object.entries(b).some( ([k, v]) => !deepEqual(v, a[k])); + } else { + return a === b; + } +} + +/** Traverses an object and sets a nested value. + * + * Example: + * + * const obj = {a: {b: {c: "X"} } } + * deepSet(obj, ["a", "b", "c"], "d") + * → {a: {b: {c: "d"} } } + * + * This would be the equivalent of: + * + * obj?.a?.b?.c = "d"; + * + * Except that the above is not a legal expression. + * + * If a non-leaf property does not exist, this function + * creates it as an empty object ({}) and keeps traversing. + * + * The last member of `path` may be `null`, in which case, + * if the object pointed to by the next to last member is + * an array, an insert operation will take place. + * + */ +function deepSet (obj, path, value) { + const key = path.shift(); + if (!path.length) { + if (key === null && Array.isArray(obj)) { + obj.push(value); + } else { + obj[key] = value; + } + } else { + if (!Object.hasOwn(obj, key)) { + obj[key] = {}; + } + deepSet(obj[key], path, value); + } +} + +/** Returns a nested property. + * + * Example: + * + * const obj = {a: {b: {c: "d"} } } + * deepSet(obj, ["a", "b", "c"]) + * → "d" + * + * If `path` is known in advance, this is effectively + * the same as: + * + * obj?.a?.b?.c + * + * This might be useful when `path` is dynamic. + */ +function deepValue (obj, path) { + if (obj !== undefined) { + const key = path.shift(); + if (!path.length) { + if (key === undefined) { + return obj; + } else { + return obj[key]; + } + } else { + return deepValue(obj[key], path); + } + } +} + +// Copied from: +// https://gomakethings.com/how-to-deep-merge-arrays-and-objects-with-javascript/ + +/*! + * Deep merge two or more objects or arrays. + * (c) 2023 Chris Ferdinandi, MIT License, https://gomakethings.com + * @param {*} ...objs The arrays or objects to merge + * @returns {*} The merged arrays or objects + */ +function deepMerge (...objs) { + + /** + * Get the object type + * @param {*} obj The object + * @return {String} The object type + */ + function getType (obj) { + return Object.prototype.toString.call(obj).slice(8, -1).toLowerCase(); + } + + /** + * Deep merge two objects + * @return {Object} + */ + function mergeObj (clone, obj) { + for (let [key, value] of Object.entries(obj)) { + let type = getType(value); + if (clone[key] !== undefined && getType(clone[key]) === type && ['array', 'object'].includes(type)) { + clone[key] = deepMerge(clone[key], value); + } else { + clone[key] = structuredClone(value); + } + } + } + + // Create a clone of the first item in the objs array + let clone = structuredClone(objs.shift()); + + // Loop through each item + for (let obj of objs) { + + // Get the object type + let type = getType(obj); + + // If the current item isn't the same type as the clone, replace it + if (getType(clone) !== type) { + clone = structuredClone(obj); + continue; + } + + // Otherwise, merge + if (type === 'array') { + // Replace old array with new + clone = [...structuredClone(obj)]; + } else if (type === 'object') { + mergeObj(clone, obj); + } else { + clone = obj; + } + + } + + return clone; + +} + +module.exports = { + deepCompare, + deepEqual, + deepSet, + deepValue, + deepMerge +}; diff --git a/lib/www/server/lib/utils/hsl.js b/lib/www/server/lib/utils/hsl.js new file mode 100644 index 0000000..5d6de6f --- /dev/null +++ b/lib/www/server/lib/utils/hsl.js @@ -0,0 +1,47 @@ +/** Return an HSL colour as a function of an input value + * `str`. + * + * Consider using as getHSL.bind(this) in Vue components + * in order to get access to the Vuetify theme configuration. + */ +function getHSL (str, saturation = 1, lightness = 0.25, offset = 0) { + + function getHash (v) { + if (typeof (v??false)[Symbol.iterator] != "function") { + // Not an iterable, make it one + v = String(v); + } + + return Math.abs([...v, ..." "].reduce( (acc, cur) => String(cur).charCodeAt(0) + ((acc << 5) - acc), 0 )); + } + + const h = (getHash(str) + offset) % 360; + const s = saturation * 100; + const l = this?.$vuetify?.theme?.isDark + ? (1-lightness) * 100 + : lightness * 100; + + return {h, s, l}; + +} + +/** Return a CSS hsl() or hsla() colour + * representation as a function of an input value. + * + * Consider using as getHSLColourFor.bind(this) – See + * note for getHSL() above. + */ +function getHSLColourFor (str, opacity = 1, saturation, lightness, offset) { + const _getHSL = getHSL.bind(this); + const {h, s, l} = _getHSL(str, saturation, lightness, offset); + if (opacity == 1) { + return `hsl(${h},${s}%,${l}%)`; + } else { + return `hsla(${h},${s}%,${l}%, ${opacity})`; + } +} + +module.exports = { + getHSL, + getHSLColourFor +} diff --git a/lib/www/server/lib/utils/index.js b/lib/www/server/lib/utils/index.js index bae0fee..42a47c8 100644 --- a/lib/www/server/lib/utils/index.js +++ b/lib/www/server/lib/utils/index.js @@ -1,13 +1,24 @@ module.exports = { - geometryAsString: require('./geometryAsString'), + ...require('./deep'), dms: require('./dms'), - replaceMarkers: require('./replaceMarkers'), + ...require('./flatEntries'), flattenQCDefinitions: require('./flattenQCDefinitions'), - deepMerge: require('./deepMerge'), + FormatTimestamp: require('./FormatTimestamp'), + geometryAsString: require('./geometryAsString'), + ...require('./hsl'), + ...require('./logicalPath'), // FIXME Breaking change (used to be logicalPath.…) + logicalPath: require('./logicalPath'), // NOTE For compatibility, see above + ...require('./markdown'), + preferencesĪ»: require('./preferencesĪ»'), + ...require('./ranges'), // FIXME Breaking change (used to be ranges.…) + ranges: require('./ranges'), // NOTE For compatibility, see above. removeNulls: require('./removeNulls'), - logicalPath: require('./logicalPath'), - ranges: require('./ranges'), + replaceMarkers: require('./replaceMarkers'), + setContentDisposition: require('./setContentDisposition'), + throttle: require('./throttle'), + truncateText: require('./truncateText'), unique: require('./unique'), - setContentDisposition: require('./setContentDisposition') + unpack: require('./unpack'), + withParentProps: require('./withParentProps') }; diff --git a/lib/www/server/lib/utils/logicalPath.js b/lib/www/server/lib/utils/logicalPath.js index a5adb29..72ffa97 100644 --- a/lib/www/server/lib/utils/logicalPath.js +++ b/lib/www/server/lib/utils/logicalPath.js @@ -22,7 +22,7 @@ function translatePath (file) { if (typeof importPaths === "string") { // Substitute the root for the real physical path // NOTE: `root` deals with import_paths not being absolute - const prefix = Path.resolve(Path.join(root, importPaths)); + const prefix = Path.resolve(root, importPaths); const suffix = Path.resolve(file).replace(/^\/+/, ""); const physicalPath = Path.resolve(Path.join(prefix, suffix)); return validate(physicalPath, prefix); diff --git a/lib/www/server/lib/utils/markdown.js b/lib/www/server/lib/utils/markdown.js new file mode 100644 index 0000000..4957207 --- /dev/null +++ b/lib/www/server/lib/utils/markdown.js @@ -0,0 +1,11 @@ +const { marked, parseInline } = require('marked'); + +function markdown (str) { + return marked(String(str)); +} + +function markdownInline (str) { + return parseInline(String(str)); +} + +module.exports = { markdown, markdownInline }; diff --git a/lib/www/server/lib/utils/preferencesĪ».js b/lib/www/server/lib/utils/preferencesĪ».js new file mode 100644 index 0000000..429a252 --- /dev/null +++ b/lib/www/server/lib/utils/preferencesĪ».js @@ -0,0 +1,44 @@ + + +/** Extract preferences by prefix. + * + * This function returns a lambda which, given + * a key or a prefix, extracts the relevant + * preferences from the designated preferences + * store. + * + * For instance, assume preferences = { + * "a.b.c.d": 1, + * "a.b.e.f": 2, + * "g.h": 3 + * } + * + * And Ī» = preferencesĪ»(preferences). Then: + * + * Ī»("a.b") → { "a.b.c.d": 1, "a.b.e.f": 2 } + * Ī»("a.b.e.f") → { "a.b.e.f": 2 } + * Ī»("g.x", {"g.x.": 99}) → { "g.x.": 99 } + * Ī»("a.c", {"g.x.": 99}) → { "g.x.": 99 } + * + * Note from the last two examples that a default value + * may be provided and will be returned if a key does + * not exist or is not searched for. + */ +function preferencesĪ» (preferences) { + + return function (key, defaults={}) { + const keys = Object.keys(preferences).filter(str => str.startsWith(key+".") || str == key); + + const settings = {...defaults}; + for (const str of keys) { + const k = str == key ? str : str.substring(key.length+1); + const v = preferences[str]; + settings[k] = v; + } + + return settings; + } + +} + +module.exports = preferencesĪ»; diff --git a/lib/www/server/lib/utils/throttle.js b/lib/www/server/lib/utils/throttle.js new file mode 100644 index 0000000..ff9a90e --- /dev/null +++ b/lib/www/server/lib/utils/throttle.js @@ -0,0 +1,33 @@ +/** + * Throttle a function call. + * + * It delays `callback` by `delay` ms and ignores any + * repeated calls from `caller` within at most `maxWait` + * milliseconds. + * + * Used to react to server events in cases where we get + * a separate notification for each row of a bulk update. + */ +function throttle (callback, caller, delay = 100, maxWait = 500) { + + const schedule = async () => { + caller.triggeredAt = Date.now(); + caller.timer = setTimeout(async () => { + await callback(); + caller.timer = null; + }, delay); + } + + if (!caller.timer) { + schedule(); + } else { + const elapsed = Date.now() - caller.triggeredAt; + if (elapsed > maxWait) { + cancelTimeout(caller.timer); + schedule(); + } + } + +} + +module.exports = throttle; diff --git a/lib/www/server/lib/utils/truncateText.js b/lib/www/server/lib/utils/truncateText.js new file mode 100644 index 0000000..a38ce71 --- /dev/null +++ b/lib/www/server/lib/utils/truncateText.js @@ -0,0 +1,10 @@ + +function truncateText (text, length=20) { + if (text?.length <= length) { + return text; + } else { + return text.slice(0, length/2)+"…"+text.slice(-(length/2)); + } +} + +module.exports = truncateText; diff --git a/lib/www/server/lib/utils/unpack.js b/lib/www/server/lib/utils/unpack.js new file mode 100644 index 0000000..1728eb1 --- /dev/null +++ b/lib/www/server/lib/utils/unpack.js @@ -0,0 +1,35 @@ +/** Unpacks attributes from array items. + * + * At it simplest, given an array of objects, + * the call unpack(rows, "x") returns an array + * of the "x" attribute of every item in rows. + * + * `key` may also be: + * + * - a function with the signature + * (Object) => any + * the result of applying the function to + * the object will be used as the unpacked + * value. + * + * - an array of strings, functions or other + * arrays. In this case, it does a recursive + * fold operation. NOTE: it mutates `key`. + * + */ +function unpack(rows, key) { + if (typeof key === "function") { + return rows && rows.map( row => key(row) ); + } else if (Array.isArray(key)) { + const car = key.shift(); + if (key.length) { + return unpack(unpack(rows, car), key); + } else { + return unpack(rows, car); + } + } else { + return rows && rows.map( row => row?.[key] ); + } +}; + +module.exports = unpack; diff --git a/lib/www/server/lib/utils/withParentProps.js b/lib/www/server/lib/utils/withParentProps.js new file mode 100644 index 0000000..e813a59 --- /dev/null +++ b/lib/www/server/lib/utils/withParentProps.js @@ -0,0 +1,28 @@ + +function withParentProps(item, parent, childrenKey, prop, currentValue) { + if (!Array.isArray(parent)) { + return; + } + + let currentPropValue = currentValue || parent[prop]; + + for (const entry of parent) { + if (entry[prop]) { + currentPropValue = entry[prop]; + } + + if (entry === item) { + return [item, currentPropValue]; + } + + if (entry[childrenKey]) { + const res = withParentProps(item, entry[childrenKey], childrenKey, prop, currentPropValue); + if (res[1]) { + return res; + } + } + } + return []; +} + +module.exports = withParentProps; diff --git a/lib/www/server/package.json b/lib/www/server/package.json index 463e0ac..259699c 100644 --- a/lib/www/server/package.json +++ b/lib/www/server/package.json @@ -16,7 +16,7 @@ "api": "0.4.0" }, "wanted": { - "db_schema": "^0.4.5" + "db_schema": "^0.5.0" } }, "engines": { diff --git a/sbin/upgrade-project-configurations-20231113.js b/sbin/upgrade-project-configurations-20231113.js new file mode 100755 index 0000000..66271ae --- /dev/null +++ b/sbin/upgrade-project-configurations-20231113.js @@ -0,0 +1,924 @@ +#!/usr/bin/node + +const path = require('path'); +const fs = require('fs'); +const YAML = dougal_require("yaml"); +const db = dougal_require("db"); +const { deepSet } = dougal_require("utils"); + +function dougal_require(id) { + try { + return require(path.join(__dirname, "../lib/www/server/lib", id)); + } catch (err) { + if (err.code == "MODULE_NOT_FOUND") { + console.log("Trying alternative path"); + return require(path.join(__dirname, "../lib/www/server/node_modules", id)); + } else { + console.error(err); + throw err; + } + } +} + + +// +// https://gitlab.com/wgp/dougal/software/-/work_items/291 +// + +function check_asaqc (cfg) { + if (!cfg.cloud?.asaqc?.id) { + return apply_cloud_asaqc; + } +} + +function apply_cloud_asaqc (cfg) { + const asaqc = cfg.asaqc; + if (asaqc) { + console.log("Applying ASAQC changes"); + deepSet(cfg, [ "cloud", "asaqc" ], asaqc); + } else { + console.log("ASAQC configuration not found. Will create empty ASAQC object"); + deepSet(cfg, [ "cloud", "asaqc" ], { + id: null, + imo: null, + mmsi: null + }); + } + return cfg; +} + + +// +// https://gitlab.com/wgp/dougal/software/-/work_items/296 +// + +function check_asaqc_subscription_key (cfg) { + if (!cfg.cloud?.asaqc?.subscriptionKey) { + return apply_asaqc_subscription_key; + } +} + +function apply_asaqc_subscription_key (cfg) { + console.log("Adding subscriptionKey to ASAQC configuration"); + const subscriptionKey = process.env.DOUGAL_ASAQC_SUBSCRIPTION_KEY; + if (subscriptionKey) { + deepSet(cfg, [ "cloud", "asaqc", "subscriptionKey" ] , subscriptionKey); + } else { + throw new Error("The ASAQC subscription key must be supplied via the DOUGAL_ASAQC_SUBSCRIPTION_KEY environment variable"); + } + return cfg; +} + + +// +// https://gitlab.com/wgp/dougal/software/-/work_items/297 +// + +function check_online_line_name_info (cfg) { + if (!cfg.online?.line?.lineNameInfo?.fields) { + return apply_online_line_name_info; + } +} + +function apply_online_line_name_info (cfg) { + console.log("Applying online line name info changes"); + let lineNameInfo = { + example: null, + fields: { + line: { + offset: null, + length: 4, + type: "int", + }, + sequence: { + offset: null, + length: 3, + type: "int" + }, + incr: { + offset: null, + length: 2, + type: "bool", + enum: {} + }, + attempt: { + offset: null, + length: 1, + type: "int" + } + } + }; + switch (process.env.HOST) { + case "dougal04": + lineNameInfo = { + "example": "EQ22200-2213130-007", + "fields": { + "line": { + "length": 4, + "type": "int", + "offset": 10 + }, + "sequence": { + "length": 3, + "type": "int", + "offset": 16 + }, + "incr": { + "enum": { + "1": true, + "2": false + }, + "length": 1, + "type": "bool", + "offset": 8 + }, + "attempt": { + "length": 1, + "type": "int", + "offset": 14 + }, + "file_no": { + "length": 3, + "type": "int", + "offset": 20 + }, + "year": { + "offset": 2, + "length": 2, + "type": "int" + }, + "survey_type": { + "enum": { + "0": "Marine", + "2": "OBS/PRM" + }, + "offset": 4, + "length": 1, + "default": "Unknown", + "type": "str" + }, + "project_number": { + "offset": 5, + "length": 2, + "type": "int" + }, + "num_sources": { + "enum": { + "0": "2", + "1": "1", + "2": "3" + }, + "offset": 9, + "length": 1, + "type": "int" + } + } + } + break; + case "dougal03": + // Don't know what they use + break; + case "dougal02": + case "dougal01": + default: // Includes dev servers + lineNameInfo = { + example: "1054282180S00000", + fields: { + line: { + offset: 2, + length: 4, + type: "int", + }, + sequence: { + offset: 7, + length: 3, + type: "int" + }, + incr: { + offset: 0, + length: 2, + type: "bool", + enum: { + "10": true, + "20": false + } + }, + attempt: { + offset: 6, + length: 1, + type: "int" + } + } + }; + } + + deepSet(cfg, [ "online", "line", "lineNameInfo" ], lineNameInfo); + return cfg; +} + + +// +// https://gitlab.com/wgp/dougal/software/-/work_items/292 +// + +function check_raw_p111_line_name_info (cfg) { + if (!cfg.raw?.p111?.lineNameInfo?.fields) { + return apply_raw_p111_line_name_info; + } +} + +function apply_raw_p111_line_name_info (cfg) { + console.log("Applying raw P1/11 name info changes"); + let lineNameInfo = { + example: null, + fields: { + line: { + offset: null, + length: 4, + type: "int", + }, + sequence: { + offset: null, + length: 3, + type: "int" + }, + incr: { + offset: null, + length: 2, + type: "bool", + enum: {} + }, + attempt: { + offset: null, + length: 1, + type: "int" + } + } + }; + switch (process.env.HOST) { + case "dougal04": + lineNameInfo = { + "example": "EQ22200-2213130-007.000.P111", + "fields": { + "line": { + "length": 4, + "type": "int", + "offset": 10 + }, + "sequence": { + "length": 3, + "type": "int", + "offset": 16 + }, + "incr": { + "enum": { + "1": true, + "2": false + }, + "length": 1, + "type": "bool", + "offset": 8 + }, + "attempt": { + "length": 1, + "type": "int", + "offset": 14 + }, + "file_no": { + "length": 3, + "type": "int", + "offset": 20 + }, + "year": { + "offset": 2, + "length": 2, + "type": "int" + }, + "survey_type": { + "enum": { + "0": "Marine", + "2": "OBS/PRM" + }, + "offset": 4, + "length": 1, + "default": "Unknown", + "type": "str" + }, + "project_number": { + "offset": 5, + "length": 2, + "type": "int" + }, + "num_sources": { + "enum": { + "0": "2", + "1": "1", + "2": "3" + }, + "offset": 9, + "length": 1, + "type": "int" + } + } + } + break; + case "dougal03": + // Don't know what they use + break; + case "dougal02": + case "dougal01": + default: // Includes dev servers + lineNameInfo = { + example: "1054282180S00000.000.p111", + fields: { + line: { + offset: 2, + length: 4, + type: "int", + }, + sequence: { + offset: 7, + length: 3, + type: "int" + }, + incr: { + offset: 0, + length: 2, + type: "bool", + enum: { + "10": true, + "20": false + } + }, + attempt: { + offset: 6, + length: 1, + type: "int" + } + } + }; + } + + deepSet(cfg, [ "raw", "p111", "lineNameInfo" ], lineNameInfo); + return cfg; +} + + +// +// https://gitlab.com/wgp/dougal/software/-/work_items/293 +// + +function check_final_p111_line_name_info (cfg) { + if (!cfg.final?.p111?.lineNameInfo?.fields) { + return apply_final_p111_line_name_info; + } +} + +function apply_final_p111_line_name_info (cfg) { + console.log("Applying final P1/11 name info changes"); + let lineNameInfo = { + example: null, + fields: { + line: { + offset: null, + length: 4, + type: "int", + }, + sequence: { + offset: null, + length: 3, + type: "int" + }, + incr: { + offset: null, + length: 2, + type: "bool", + enum: {} + }, + attempt: { + offset: null, + length: 1, + type: "int" + } + } + }; + switch (process.env.HOST) { + case "dougal04": + lineNameInfo = { + "example": "EQ22200-2213130-007.000.P111", + "fields": { + "line": { + "length": 4, + "type": "int", + "offset": 10 + }, + "sequence": { + "length": 3, + "type": "int", + "offset": 16 + }, + "incr": { + "enum": { + "1": true, + "2": false + }, + "length": 1, + "type": "bool", + "offset": 8 + }, + "attempt": { + "length": 1, + "type": "int", + "offset": 14 + }, + "file_no": { + "length": 3, + "type": "int", + "offset": 20 + }, + "year": { + "offset": 2, + "length": 2, + "type": "int" + }, + "survey_type": { + "enum": { + "0": "Marine", + "2": "OBS/PRM" + }, + "offset": 4, + "length": 1, + "default": "Unknown", + "type": "str" + }, + "project_number": { + "offset": 5, + "length": 2, + "type": "int" + }, + "num_sources": { + "enum": { + "0": "2", + "1": "1", + "2": "3" + }, + "offset": 9, + "length": 1, + "type": "int" + } + } + } + break; + case "dougal03": + // Don't know what they use + break; + case "dougal02": + case "dougal01": + default: // Includes dev servers + lineNameInfo = { + example: "1054282180S00000.000.p111", + fields: { + line: { + offset: 2, + length: 4, + type: "int", + }, + sequence: { + offset: 7, + length: 3, + type: "int" + }, + incr: { + offset: 0, + length: 2, + type: "bool", + enum: { + "10": true, + "20": false + } + }, + attempt: { + offset: 6, + length: 1, + type: "int" + } + } + }; + } + + deepSet(cfg, [ "final", "p111", "lineNameInfo" ], lineNameInfo); + return cfg; +} + + +// +// https://gitlab.com/wgp/dougal/software/-/work_items/294 +// + +function check_smsrc_headers_glob_path (cfg) { + if (!cfg.raw?.source?.smsrc?.header?.glob?.length) { + return apply_smsrc_headers_glob_path; + } +} + +function apply_smsrc_headers_glob_path (cfg) { + console.log("Copying Smartsource header glob and path values to new location"); + + const globs = cfg?.raw?.smsrc?.globs; + const paths = cfg?.raw?.smsrc?.paths; + + if (globs) { + deepSet(cfg, [ "raw", "source", "smsrc", "header", "globs" ], globs); + } + + if (paths) { + deepSet(cfg, [ "raw", "source", "smsrc", "header", "paths" ], paths); + } + + return cfg; +} + + + +// +// https://gitlab.com/wgp/dougal/software/-/work_items/294 +// + +function check_smsrc_headers_line_name_info (cfg) { + if (!cfg.raw?.source?.smsrc?.header?.lineNameInfo?.fields) { + return apply_smsrc_headers_line_name_info; + } +} + +function apply_smsrc_headers_line_name_info (cfg) { + console.log("Applying raw P1/11 name info changes"); + let lineNameInfo = { + example: null, + fields: { + line: { + offset: null, + length: 4, + type: "int", + }, + sequence: { + offset: null, + length: 3, + type: "int" + }, + incr: { + offset: null, + length: 2, + type: "bool", + enum: {} + }, + attempt: { + offset: null, + length: 1, + type: "int" + } + } + }; + switch (process.env.HOST) { + case "dougal04": + lineNameInfo = { + "example": "EQ22200-2213130-007.000.P111", + "fields": { + "line": { + "length": 4, + "type": "int", + "offset": 10 + }, + "sequence": { + "length": 3, + "type": "int", + "offset": 16 + }, + "incr": { + "enum": { + "1": true, + "2": false + }, + "length": 1, + "type": "bool", + "offset": 8 + }, + "attempt": { + "length": 1, + "type": "int", + "offset": 14 + }, + "file_no": { + "length": 3, + "type": "int", + "offset": 20 + }, + "year": { + "offset": 2, + "length": 2, + "type": "int" + }, + "survey_type": { + "enum": { + "0": "Marine", + "2": "OBS/PRM" + }, + "offset": 4, + "length": 1, + "default": "Unknown", + "type": "str" + }, + "project_number": { + "offset": 5, + "length": 2, + "type": "int" + }, + "num_sources": { + "enum": { + "0": "2", + "1": "1", + "2": "3" + }, + "offset": 9, + "length": 1, + "type": "int" + } + } + } + break; + case "dougal03": + // Don't know what they use + break; + case "dougal02": + case "dougal01": + default: // Includes dev servers + lineNameInfo = { + example: "1054282180S00000.HDR", + fields: { + line: { + offset: 2, + length: 4, + type: "int", + }, + sequence: { + offset: 7, + length: 3, + type: "int" + }, + incr: { + offset: 0, + length: 2, + type: "bool", + enum: { + "10": true, + "20": false + } + }, + attempt: { + offset: 6, + length: 1, + type: "int" + } + } + }; + } + + deepSet(cfg, [ "raw", "source", "smsrc", "header", "lineNameInfo" ], lineNameInfo); + return cfg; +} + + +// +// https://gitlab.com/wgp/dougal/software/-/work_items/295 +// + +function check_smsrc_segy (cfg) { + // We only do this on installations where we know there is, or there + // might be, SEG-Y data available. + const supported_hosts = [ + "dougal02", + "dougal01" + ]; + if (supported_hosts.includes(process.env.HOST)) { + if (!cfg.raw?.source?.smsrc?.segy?.lineNameInfo?.fields) { + return apply_smsrc_segy; + } + } +} + +function apply_smsrc_segy (cfg) { + // We don't need to run a switch() for hosts here, since + // we've already done that in check_smsrc_segy(). + + // Use the paths for *.HDR files as a reference + const paths = cfg.raw?.source?.smsrc?.header?.paths?.map( p => + path.join(path.dirname(p), "10 SEG-Y")); + const globs = [ "**/*-hyd.sgy" ]; + const lineNameInfo = { + "example": "1051460070S00000-hyd.sgy", + "fields": { + "sequence": { + "length": 3, + "type": "int", + "offset": 7 + }, + "line": { + "length": 4, + "offset": 2 + } + } + }; + const segy = { paths, globs, lineNameInfo }; + + deepSet(cfg, [ "raw", "source", "smsrc", "segy" ], segy); + + return cfg; +} + + +// +// https://gitlab.com/wgp/dougal/software/-/work_items/298 +// + +function check_preplots_fields (cfg) { + if (cfg.preplots?.length) { + const indices = []; + for (const idx in cfg.preplots) { + const preplot = cfg.preplots[idx]; + if (!preplot?.fields?.line_name) { + indices.push(idx); + } + } + if (indices.length) { + return apply_preplots_fieldsĪ»(indices); + } + } +} + +function apply_preplots_fieldsĪ» (indices) { + function fix_preplot (preplot) { + const names = preplot.format.names; + const types = preplot.format.types; + const widths = preplot.format.widths; + const offsets_widths = widths.reduce ((acc, cur) => { + if (cur < 0) { + acc.p -= cur; // Advances the position by -cur + } else { + acc.f.push({offset: acc.p, width: cur}); + acc.p += cur; + } + return acc; + }, {f: [], p: 0}) + + const fields = {}; + names.forEach( (name, ι) => { + const field = { + type: types[ι], + ...offsets_widths[ι] + }; + fields[name] = field; + }); + preplot.fields = fields; + return preplot; + } + + return function apply_preplots_fields (cfg) { + for (const idx of indices) { + console.log("Fixing preplot", idx); + const preplot = fix_preplot(cfg.preplots[idx]); + cfg.preplots.splice(idx, 1, preplot); + } + } +} + + +/* Template for more upgrade actions + +// +// https://gitlab.com/wgp/dougal/software/-/work_items/ +// + +function check_ (cfg) { +} + +function apply_ (cfg) { +} + +*/ + + +const checkers = [ + check_asaqc, + check_asaqc_subscription_key, + check_online_line_name_info, + check_raw_p111_line_name_info, + check_final_p111_line_name_info, + check_smsrc_headers_glob_path, + check_smsrc_headers_line_name_info, + check_smsrc_segy, + check_preplots_fields +] + +const now = new Date(); +const tstamp = now.toISOString().substr(0, 19)+"Z"; + +function fnames(pid) { + return { + backup: `${pid}-configuration-${tstamp}.yaml`, + upgrade: `NEW-${pid}-configuration-${tstamp}.yaml` + }; +} + +function save_scripts (pid) { + const cwd = process.cwd(); + const fn_backup = path.resolve(path.join(cwd, fnames(pid).backup)); + const fn_upgrade = path.resolve(path.join(cwd, fnames(pid).upgrade)); + + console.log("Creating script to restore old / new configurations"); + const backup = `# Restore pre-upgrade configuration for ${pid} +curl -vs "http://localhost:3000/api/project/${pid}/configuration" -X PUT -H "Content-Type: application/yaml" --data-binary @${fn_backup}\n`; + const upgrade = `# Restore post-upgrade configuration for ${pid} +curl -vs "http://localhost:3000/api/project/${pid}/configuration" -X PUT -H "Content-Type: application/yaml" --data-binary @${fn_upgrade}\n`; + + fs.writeFileSync(`restore-20231113-pre-${pid}.sh`, backup); + fs.writeFileSync(`restore-20231113-post-${pid}.sh`, upgrade); +} + +async function backup (pid, cfg) { + const fname = fnames(pid).backup; + console.log(`Backing up configuration for ${pid} as ${fname} into current directory`); + const text = YAML.stringify(cfg); + fs.writeFileSync(fname, text); +} + +async function save_configuration (pid, cfg) { + console.log("Saving configuration for", pid); + + console.log("Saving copy of NEW configuration to file"); + const fname = fnames(pid).upgrade; + const text = YAML.stringify(cfg); + fs.writeFileSync(fname, text); + save_scripts(pid); + + //console.log("Uploading configuration to server"); + try { + //await db.project.configuration.put(pid); + } catch (err) { + console.log("Configuration upload failed"); + console.error(err); + throw err; + } +} + +async function upgrade_configuration (pid) { + const configuration = await db.project.configuration.get(pid); + + console.log(`Checking configuration for ${configuration.id} (${configuration.schema})`); + + const appliers = checkers.map( checker => checker(configuration) ).filter( i => !!i ); + + if (appliers.length) { + console.log("Configuration needs changes."); + await backup(pid, configuration); + + console.log("Applying changes"); + console.log(appliers); + for (const applier of appliers) { + applier(configuration); + } + + await save_configuration(pid, configuration); + } + +} + +async function main () { + + const cla = process.argv.slice(2).map(i => i.toLowerCase()); + + function project_filter (project) { + if (cla.length == 0) + return true; + return cla.includes(project.pid.toLowerCase()); + } + + const projects = (await db.project.get()).filter(project_filter); + + projects.sort( (a, b) => + a.pid > b.pid + ? 1 + : a.pid < b.pid + ? -1 + : 0); + + console.log(projects); + + for (const project of projects) { + await upgrade_configuration(project.pid); + } + + console.log("All done"); + process.exit(0); +} + + +main();