mirror of
https://gitlab.com/wgp/dougal/software.git
synced 2025-12-06 10:07:08 +00:00
Compare commits
2 Commits
6b6f5ab511
...
210-loadin
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8eb49e5e09 | ||
|
|
e40ea33345 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -11,5 +11,3 @@ lib/www/client/dist/
|
||||
etc/surveys/*.yaml
|
||||
!etc/surveys/_*.yaml
|
||||
etc/ssl/*
|
||||
etc/config.yaml
|
||||
var/*
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
Check if any of the directories provided in the imports.mounts configuration
|
||||
section are empty.
|
||||
|
||||
Returns 0 if all arguments are non-empty, 1 otherwise. It stops at the first
|
||||
empty directory.
|
||||
"""
|
||||
|
||||
import os
|
||||
import configuration
|
||||
|
||||
cfg = configuration.read()
|
||||
|
||||
if cfg and "imports" in cfg and "mounts" in cfg["imports"]:
|
||||
|
||||
mounts = cfg["imports"]["mounts"]
|
||||
for item in mounts:
|
||||
with os.scandir(item) as contents:
|
||||
if not any(contents):
|
||||
exit(1)
|
||||
|
||||
else:
|
||||
print("No mounts in configuration")
|
||||
|
||||
exit(0)
|
||||
@@ -1,5 +1,4 @@
|
||||
import os
|
||||
import pathlib
|
||||
from glob import glob
|
||||
from yaml import full_load as _load
|
||||
|
||||
@@ -12,18 +11,6 @@ surveys should be under $HOME/etc/surveys/*.yaml. In both cases,
|
||||
$HOME is the home directory of the user running this script.
|
||||
"""
|
||||
|
||||
def is_relative_to(it, other):
|
||||
"""
|
||||
is_relative_to() is not present version Python 3.9, so we
|
||||
need this kludge to get Dougal to run on OpenSUSE 15.4
|
||||
"""
|
||||
|
||||
if "is_relative_to" in dir(it):
|
||||
return it.is_relative_to(other)
|
||||
|
||||
return str(it.absolute()).startswith(str(other.absolute()))
|
||||
|
||||
|
||||
prefix = os.environ.get("DOUGAL_ROOT", os.environ.get("HOME", ".")+"/software")
|
||||
|
||||
DOUGAL_ROOT = os.environ.get("DOUGAL_ROOT", os.environ.get("HOME", ".")+"/software")
|
||||
@@ -67,10 +54,6 @@ def files (globspec = None, include_archived = False):
|
||||
quickly and temporarily “disabling” a survey configuration by renaming
|
||||
the relevant file.
|
||||
"""
|
||||
|
||||
print("This method is obsolete")
|
||||
return
|
||||
|
||||
tuples = []
|
||||
|
||||
if globspec is None:
|
||||
@@ -104,73 +87,3 @@ def rxflags (flagstr):
|
||||
for flag in flagstr:
|
||||
flags |= cases.get(flag, 0)
|
||||
return flags
|
||||
|
||||
def translate_path (file):
|
||||
"""
|
||||
Translate a path from a Dougal import directory to an actual
|
||||
physical path on disk.
|
||||
|
||||
Any user files accessible by Dougal must be under a path prefixed
|
||||
by `(config.yaml).imports.paths`. The value of `imports.paths` may
|
||||
be either a string, in which case this represents the prefix under
|
||||
which all Dougal data resides, or a dictionary where the keys are
|
||||
logical paths and their values the corresponding physical path.
|
||||
"""
|
||||
cfg = read()
|
||||
root = pathlib.Path(DOUGAL_ROOT)
|
||||
filepath = pathlib.Path(file).resolve()
|
||||
import_paths = cfg["imports"]["paths"]
|
||||
|
||||
if filepath.is_absolute():
|
||||
if type(import_paths) == str:
|
||||
# Substitute the root for the real physical path
|
||||
# NOTE: `root` deals with import_paths not being absolute
|
||||
prefix = root.joinpath(pathlib.Path(import_paths)).resolve()
|
||||
return str(pathlib.Path(prefix).joinpath(*filepath.parts[2:]))
|
||||
else:
|
||||
# Look for a match on the second path element
|
||||
if filepath.parts[1] in import_paths:
|
||||
# NOTE: `root` deals with import_paths[…] not being absolute
|
||||
prefix = root.joinpath(import_paths[filepath.parts[1]])
|
||||
return str(pathlib.Path(prefix).joinpath(*filepath.parts[2:]))
|
||||
else:
|
||||
# This path is invalid
|
||||
raise TypeError("invalid path or file: {0!r}".format(filepath))
|
||||
else:
|
||||
# A relative filepath is always resolved relative to the logical root
|
||||
root = pathlib.Path("/")
|
||||
return translate_path(root.joinpath(filepath))
|
||||
|
||||
def untranslate_path (file):
|
||||
"""
|
||||
Attempt to convert a physical path into a logical one.
|
||||
See `translate_path()` above for details.
|
||||
"""
|
||||
cfg = read()
|
||||
dougal_root = pathlib.Path(DOUGAL_ROOT)
|
||||
filepath = pathlib.Path(file).resolve()
|
||||
import_paths = cfg["imports"]["paths"]
|
||||
physical_root = pathlib.Path("/")
|
||||
|
||||
if filepath.is_absolute():
|
||||
if type(import_paths) == str:
|
||||
if is_relative_to(filepath, import_paths):
|
||||
physical_root = pathlib.Path("/")
|
||||
physical_prefix = pathlib.Path(import_paths)
|
||||
return str(root.joinpath(filepath.relative_to(physical_prefix)))
|
||||
else:
|
||||
raise TypeError("invalid path or file: {0!r}".format(filepath))
|
||||
else:
|
||||
for key, value in import_paths.items():
|
||||
value = dougal_root.joinpath(value)
|
||||
physical_prefix = pathlib.Path(value)
|
||||
if is_relative_to(filepath, physical_prefix):
|
||||
logical_prefix = physical_root.joinpath(pathlib.Path(key)).resolve()
|
||||
return str(logical_prefix.joinpath(filepath.relative_to(physical_prefix)))
|
||||
|
||||
# If we got here with no matches, this is not a valid
|
||||
# Dougal data path
|
||||
raise TypeError("invalid path or file: {0!r}".format(filepath))
|
||||
else:
|
||||
# A relative filepath is always resolved relative to DOUGAL_ROOT
|
||||
return untranslate_path(root.joinpath(filepath))
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
Do daily housekeeping on the database.
|
||||
|
||||
This is meant to run shortly after midnight every day.
|
||||
"""
|
||||
|
||||
import configuration
|
||||
from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
db.set_survey(survey["schema"])
|
||||
|
||||
print("Daily tasks")
|
||||
db.run_daily_tasks()
|
||||
|
||||
print("Done")
|
||||
163
bin/datastore.py
163
bin/datastore.py
@@ -52,7 +52,7 @@ class Datastore:
|
||||
|
||||
self.conn = psycopg2.connect(configuration.read()["db"]["connection_string"], **opts)
|
||||
|
||||
def set_autocommit(self, value = True):
|
||||
def set_autocommit(value = True):
|
||||
"""
|
||||
Enable or disable autocommit.
|
||||
|
||||
@@ -95,7 +95,7 @@ class Datastore:
|
||||
cursor.execute(qry, (filepath,))
|
||||
results = cursor.fetchall()
|
||||
if len(results):
|
||||
return (filepath, file_hash(configuration.translate_path(filepath))) in results
|
||||
return (filepath, file_hash(filepath)) in results
|
||||
|
||||
|
||||
def add_file(self, path, cursor = None):
|
||||
@@ -107,8 +107,7 @@ class Datastore:
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
realpath = configuration.translate_path(path)
|
||||
hash = file_hash(realpath)
|
||||
hash = file_hash(path)
|
||||
qry = "CALL add_file(%s, %s);"
|
||||
cur.execute(qry, (path, hash))
|
||||
if cursor is None:
|
||||
@@ -177,7 +176,7 @@ class Datastore:
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
hash = file_hash(configuration.translate_path(path))
|
||||
hash = file_hash(path)
|
||||
qry = """
|
||||
UPDATE raw_lines rl
|
||||
SET ntbp = %s
|
||||
@@ -256,78 +255,6 @@ class Datastore:
|
||||
|
||||
self.maybe_commit()
|
||||
|
||||
|
||||
def save_preplot_line_info(self, lines, filepath, filedata = None):
|
||||
"""
|
||||
Save preplot line information
|
||||
|
||||
Arguments:
|
||||
|
||||
lines (iterable): should be a collection of lines returned from
|
||||
one of the line info reading functions (see preplots.py).
|
||||
|
||||
filepath (string): the full path to the preplot file from where the lines
|
||||
have been read. It will be added to the survey's `file` table so that
|
||||
it can be monitored for changes.
|
||||
"""
|
||||
|
||||
with self.conn.cursor() as cursor:
|
||||
cursor.execute("BEGIN;")
|
||||
|
||||
# Check which preplot lines we actually have already imported,
|
||||
# as the line info file may contain extra lines.
|
||||
|
||||
qry = """
|
||||
SELECT line, class
|
||||
FROM preplot_lines
|
||||
ORDER BY line, class;
|
||||
"""
|
||||
cursor.execute(qry)
|
||||
preplot_lines = cursor.fetchall()
|
||||
|
||||
hash = self.add_file(filepath, cursor)
|
||||
count=0
|
||||
for line in lines:
|
||||
count += 1
|
||||
|
||||
if not (line["sail_line"], "V") in preplot_lines:
|
||||
print(f"\u001b[2KSkipping line {count} / {len(lines)}", end="\n", flush=True)
|
||||
continue
|
||||
|
||||
print(f"\u001b[2KSaving line {count} / {len(lines)} ", end="\n", flush=True)
|
||||
|
||||
sail_line = line["sail_line"]
|
||||
incr = line.get("incr", True)
|
||||
ntba = line.get("ntba", False)
|
||||
remarks = line.get("remarks", None)
|
||||
meta = json.dumps(line.get("meta", {}))
|
||||
source_lines = line.get("source_line", [])
|
||||
|
||||
for source_line in source_lines:
|
||||
qry = """
|
||||
INSERT INTO preplot_saillines AS ps
|
||||
(sailline, line, sailline_class, line_class, incr, ntba, remarks, meta, hash)
|
||||
VALUES
|
||||
(%s, %s, 'V', 'S', %s, %s, %s, %s, %s)
|
||||
ON CONFLICT (sailline, sailline_class, line, line_class, incr) DO UPDATE
|
||||
SET
|
||||
incr = EXCLUDED.incr,
|
||||
ntba = EXCLUDED.ntba,
|
||||
remarks = COALESCE(EXCLUDED.remarks, ps.remarks),
|
||||
meta = ps.meta || EXCLUDED.meta,
|
||||
hash = EXCLUDED.hash;
|
||||
"""
|
||||
|
||||
# NOTE Consider using cursor.executemany() instead. Then again,
|
||||
# we're only expecting a few hundred lines at most.
|
||||
cursor.execute(qry, (sail_line, source_line, incr, ntba, remarks, meta, hash))
|
||||
|
||||
if filedata is not None:
|
||||
self.save_file_data(filepath, json.dumps(filedata), cursor)
|
||||
|
||||
self.maybe_commit()
|
||||
|
||||
|
||||
def save_raw_p190(self, records, fileinfo, filepath, epsg = 0, filedata = None, ntbp = False):
|
||||
"""
|
||||
Save raw P1 data.
|
||||
@@ -661,68 +588,7 @@ class Datastore:
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
# we assume that we are in the middle of a transaction
|
||||
|
||||
def get_file_data(self, path, cursor = None):
|
||||
"""
|
||||
Retrieve arbitrary data associated with a file.
|
||||
"""
|
||||
|
||||
if cursor is None:
|
||||
cur = self.conn.cursor()
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
realpath = configuration.translate_path(path)
|
||||
hash = file_hash(realpath)
|
||||
|
||||
qry = """
|
||||
SELECT data
|
||||
FROM file_data
|
||||
WHERE hash = %s;
|
||||
"""
|
||||
|
||||
cur.execute(qry, (hash,))
|
||||
res = cur.fetchone()
|
||||
|
||||
if cursor is None:
|
||||
self.maybe_commit()
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
# we assume that we are in the middle of a transaction
|
||||
return res[0]
|
||||
|
||||
def surveys (self, include_archived = False):
|
||||
"""
|
||||
Return list of survey definitions.
|
||||
"""
|
||||
|
||||
if self.conn is None:
|
||||
self.connect()
|
||||
|
||||
if include_archived:
|
||||
qry = """
|
||||
SELECT meta, schema
|
||||
FROM public.projects;
|
||||
"""
|
||||
else:
|
||||
qry = """
|
||||
SELECT meta, schema
|
||||
FROM public.projects
|
||||
WHERE NOT (meta->'archived')::boolean IS true
|
||||
"""
|
||||
|
||||
with self.conn:
|
||||
with self.conn.cursor() as cursor:
|
||||
|
||||
cursor.execute(qry)
|
||||
results = cursor.fetchall()
|
||||
surveys = []
|
||||
for r in results:
|
||||
if r[0]:
|
||||
r[0]['schema'] = r[1]
|
||||
surveys.append(r[0])
|
||||
return surveys
|
||||
|
||||
|
||||
# TODO Does this need tweaking on account of #246?
|
||||
def apply_survey_configuration(self, cursor = None):
|
||||
if cursor is None:
|
||||
cur = self.conn.cursor()
|
||||
@@ -846,27 +712,6 @@ class Datastore:
|
||||
|
||||
qry = "CALL augment_event_data();"
|
||||
cur.execute(qry)
|
||||
|
||||
qry = "CALL scan_placeholders();"
|
||||
cur.execute(qry)
|
||||
|
||||
if cursor is None:
|
||||
self.maybe_commit()
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
# we assume that we are in the middle of a transaction
|
||||
|
||||
def run_daily_tasks(self, cursor = None):
|
||||
"""
|
||||
Run once-a-day tasks
|
||||
"""
|
||||
if cursor is None:
|
||||
cur = self.conn.cursor()
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
qry = "CALL log_midnight_shots();"
|
||||
cur.execute(qry)
|
||||
|
||||
if cursor is None:
|
||||
self.maybe_commit()
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
|
||||
163
bin/delimited.py
163
bin/delimited.py
@@ -1,163 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
Delimited record importing functions.
|
||||
"""
|
||||
|
||||
import csv
|
||||
import builtins
|
||||
|
||||
def to_bool (v):
|
||||
try:
|
||||
return bool(int(v))
|
||||
except ValueError:
|
||||
if type(v) == str:
|
||||
return v.strip().lower().startswith("t")
|
||||
return False
|
||||
|
||||
transform = {
|
||||
"int": lambda v: builtins.int(float(v)),
|
||||
"float": float,
|
||||
"string": str,
|
||||
"bool": to_bool
|
||||
}
|
||||
|
||||
def cast_values (row, fields):
|
||||
|
||||
def enum_for (key):
|
||||
field = fields.get(key, {})
|
||||
def enum (val):
|
||||
if "enum" in field:
|
||||
ret_val = field.get("default", val)
|
||||
enums = field.get("enum", [])
|
||||
for enum_key in enums:
|
||||
if enum_key == val:
|
||||
ret_val = enums[enum_key]
|
||||
return ret_val
|
||||
return val
|
||||
return enum
|
||||
|
||||
# Get rid of any unwanted data
|
||||
if None in row:
|
||||
del(row[None])
|
||||
|
||||
for key in row:
|
||||
|
||||
val = row[key]
|
||||
enum = enum_for(key)
|
||||
transformer = transform.get(fields.get(key, {}).get("type"), str)
|
||||
|
||||
if type(val) == list:
|
||||
for i, v in enumerate(val):
|
||||
row[key][i] = transformer(enum(v))
|
||||
elif type(val) == dict:
|
||||
continue
|
||||
else:
|
||||
row[key] = transformer(enum(val))
|
||||
return row
|
||||
|
||||
def build_fieldnames (spec): #(arr, key, val):
|
||||
fieldnames = []
|
||||
|
||||
if "fields" in spec:
|
||||
for key in spec["fields"]:
|
||||
index = spec["fields"][key]["column"]
|
||||
try:
|
||||
fieldnames[index] = key
|
||||
except IndexError:
|
||||
assert index >= 0
|
||||
fieldnames.extend(((index + 1) - len(fieldnames)) * [None])
|
||||
fieldnames[index] = key
|
||||
|
||||
return fieldnames
|
||||
|
||||
|
||||
def from_file_delimited (path, spec):
|
||||
|
||||
fieldnames = build_fieldnames(spec)
|
||||
fields = spec.get("fields", [])
|
||||
delimiter = spec.get("delimiter", ",")
|
||||
firstRow = spec.get("firstRow", 0)
|
||||
headerRow = spec.get("headerRow", False)
|
||||
if headerRow:
|
||||
firstRow += 1
|
||||
|
||||
records = []
|
||||
with open(path, "r", errors="ignore") as fd:
|
||||
|
||||
if spec.get("type") == "x-sl+csv":
|
||||
fieldnames = None # Pick from header row
|
||||
firstRow = 0
|
||||
reader = csv.DictReader(fd, delimiter=delimiter)
|
||||
else:
|
||||
reader = csv.DictReader(fd, fieldnames=fieldnames, delimiter=delimiter)
|
||||
|
||||
row = 0
|
||||
for line in reader:
|
||||
skip = False
|
||||
|
||||
if row < firstRow:
|
||||
skip = True
|
||||
|
||||
if not skip:
|
||||
records.append(cast_values(dict(line), fields))
|
||||
|
||||
row += 1
|
||||
|
||||
return records
|
||||
|
||||
|
||||
def remap (line, headers):
|
||||
row = dict()
|
||||
for i, key in enumerate(headers):
|
||||
if "." in key[1:-1]:
|
||||
# This is an object
|
||||
k, attr = key.split(".")
|
||||
if not k in row:
|
||||
row[k] = dict()
|
||||
row[k][attr] = line[i]
|
||||
elif key in row:
|
||||
if type(row[key]) == list:
|
||||
row[key].append(line[i])
|
||||
else:
|
||||
row[key] = [ row[key], line[i] ]
|
||||
else:
|
||||
row[key] = line[i]
|
||||
return row
|
||||
|
||||
def from_file_saillines (path, spec):
|
||||
|
||||
fields = {
|
||||
"sail_line": { "type": "int" },
|
||||
"source_line": { "type": "int" },
|
||||
"incr": { "type": "bool" },
|
||||
"ntba": { "type": "bool" }
|
||||
}
|
||||
|
||||
# fields = spec.get("fields", sl_fields)
|
||||
delimiter = spec.get("delimiter", ",")
|
||||
firstRow = spec.get("firstRow", 0)
|
||||
|
||||
records = []
|
||||
with open(path, "r", errors="ignore") as fd:
|
||||
row = 0
|
||||
reader = csv.reader(fd, delimiter=delimiter)
|
||||
while row < firstRow:
|
||||
next(reader)
|
||||
row += 1
|
||||
headers = [ h.strip() for h in next(reader) if len(h.strip()) ]
|
||||
|
||||
for line in reader:
|
||||
records.append(cast_values(remap(line, headers), fields))
|
||||
|
||||
return records
|
||||
|
||||
|
||||
def from_file_p111 (path, spec):
|
||||
pass
|
||||
|
||||
def from_file (path, spec):
|
||||
if spec.get("type") == "x-sl+csv":
|
||||
return from_file_saillines(path, spec)
|
||||
else:
|
||||
return from_file_delimited(path, spec)
|
||||
128
bin/fwr.py
128
bin/fwr.py
@@ -1,128 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
Fixed width record importing functions.
|
||||
"""
|
||||
|
||||
import builtins
|
||||
|
||||
def to_bool (v):
|
||||
try:
|
||||
return bool(int(v))
|
||||
except ValueError:
|
||||
if type(v) == str:
|
||||
return v.strip().lower().startswith("t")
|
||||
return False
|
||||
|
||||
transform = {
|
||||
"int": lambda v: builtins.int(float(v)),
|
||||
"float": float,
|
||||
"string": str,
|
||||
"str": str,
|
||||
"bool": to_bool
|
||||
}
|
||||
|
||||
def parse_line (line, fields, fixed = None):
|
||||
# print("parse_line", line, fields, fixed)
|
||||
data = dict()
|
||||
|
||||
if fixed:
|
||||
for value in fixed:
|
||||
start = value["offset"]
|
||||
end = start + len(value["text"])
|
||||
text = line[start:end]
|
||||
if text != value["text"]:
|
||||
return f"Expected text `{value['text']}` at position {start} but found `{text}` instead."
|
||||
|
||||
for key in fields:
|
||||
spec = fields[key]
|
||||
transformer = transform[spec.get("type", "str")]
|
||||
pos_from = spec["offset"]
|
||||
pos_to = pos_from + spec["length"]
|
||||
text = line[pos_from:pos_to]
|
||||
value = transformer(text)
|
||||
if "enum" in spec:
|
||||
if "default" in spec:
|
||||
value = spec["default"]
|
||||
for enum_key in spec["enum"]:
|
||||
if enum_key == text:
|
||||
enum_value = transformer(spec["enum"][enum_key])
|
||||
value = enum_value
|
||||
break
|
||||
|
||||
data[key] = value
|
||||
|
||||
# print("parse_line data =", data)
|
||||
return data
|
||||
|
||||
|
||||
specfields = {
|
||||
"sps1": {
|
||||
"line_name": { "offset": 1, "length": 16, "type": "int" },
|
||||
"point_number": { "offset": 17, "length": 8, "type": "int" },
|
||||
"easting": { "offset": 46, "length": 9, "type": "float" },
|
||||
"northing": { "offset": 55, "length": 10, "type": "float" }
|
||||
},
|
||||
"sps21": {
|
||||
"line_name": { "offset": 1, "length": 7, "type": "int" },
|
||||
"point_number": { "offset": 11, "length": 7, "type": "int" },
|
||||
"easting": { "offset": 46, "length": 9, "type": "float" },
|
||||
"northing": { "offset": 55, "length": 10, "type": "float" }
|
||||
},
|
||||
"p190": {
|
||||
"line_name": { "offset": 1, "length": 12, "type": "int" },
|
||||
"point_number": { "offset": 19, "length": 6, "type": "int" },
|
||||
"easting": { "offset": 46, "length": 9, "type": "float" },
|
||||
"northing": { "offset": 55, "length": 9, "type": "float" }
|
||||
},
|
||||
}
|
||||
|
||||
def from_file(path, spec):
|
||||
|
||||
# If spec.fields is not present, deduce it from spec.type ("sps1", "sps21", "p190", etc.)
|
||||
if "fields" in spec:
|
||||
fields = spec["fields"]
|
||||
elif "type" in spec and spec["type"] in specfields:
|
||||
fields = specfields[spec["type"]]
|
||||
else:
|
||||
# TODO: Should default to looking for spec.format and doing a legacy import on it
|
||||
return "Neither 'type' nor 'fields' given. I don't know how to import this fixed-width dataset."
|
||||
|
||||
firstRow = spec.get("firstRow", 0)
|
||||
|
||||
skipStart = [] # Skip lines starting with any of these values
|
||||
skipMatch = [] # Skip lines matching any of these values
|
||||
|
||||
if "type" in spec:
|
||||
if spec["type"] == "sps1" or spec["type"] == "sps21" or spec["type"] == "p190":
|
||||
skipStart = "H"
|
||||
skipMatch = "EOF"
|
||||
|
||||
records = []
|
||||
with open(path, "r", errors="ignore") as fd:
|
||||
row = 0
|
||||
line = fd.readline()
|
||||
|
||||
while line:
|
||||
skip = False
|
||||
|
||||
if row < firstRow:
|
||||
skip = True
|
||||
|
||||
if not skip:
|
||||
for v in skipStart:
|
||||
if line.startswith(v):
|
||||
skip = True
|
||||
break
|
||||
for v in skipMatch:
|
||||
if line == v:
|
||||
skip = True
|
||||
break
|
||||
|
||||
if not skip:
|
||||
records.append(parse_line(line, fields))
|
||||
|
||||
row += 1
|
||||
line = fd.readline()
|
||||
|
||||
return records
|
||||
@@ -9,9 +9,11 @@ from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
|
||||
@@ -15,7 +15,6 @@ import re
|
||||
import time
|
||||
import configuration
|
||||
import p111
|
||||
import fwr
|
||||
from datastore import Datastore
|
||||
|
||||
def add_pending_remark(db, sequence):
|
||||
@@ -52,11 +51,12 @@ def del_pending_remark(db, sequence):
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
db.connect()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
@@ -70,94 +70,59 @@ if __name__ == '__main__':
|
||||
print("No final P1/11 configuration")
|
||||
exit(0)
|
||||
|
||||
|
||||
lineNameInfo = final_p111.get("lineNameInfo")
|
||||
pattern = final_p111.get("pattern")
|
||||
if not lineNameInfo:
|
||||
if not pattern:
|
||||
print("ERROR! Missing final.p111.lineNameInfo in project configuration. Cannot import final P111")
|
||||
raise Exception("Missing final.p111.lineNameInfo")
|
||||
else:
|
||||
print("WARNING! No `lineNameInfo` in project configuration (final.p111). You should add it to the settings.")
|
||||
rx = None
|
||||
if pattern and pattern.get("regex"):
|
||||
rx = re.compile(pattern["regex"])
|
||||
pattern = final_p111["pattern"]
|
||||
rx = re.compile(pattern["regex"])
|
||||
|
||||
if "pending" in survey["final"]:
|
||||
pendingRx = re.compile(survey["final"]["pending"]["pattern"]["regex"])
|
||||
|
||||
for fileprefix in final_p111["paths"]:
|
||||
realprefix = configuration.translate_path(fileprefix)
|
||||
print(f"Path prefix: {fileprefix} → {realprefix}")
|
||||
print(f"Path prefix: {fileprefix}")
|
||||
|
||||
for globspec in final_p111["globs"]:
|
||||
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
|
||||
physical_filepath = str(physical_filepath)
|
||||
logical_filepath = configuration.untranslate_path(physical_filepath)
|
||||
print(f"Found {logical_filepath}")
|
||||
for filepath in pathlib.Path(fileprefix).glob(globspec):
|
||||
filepath = str(filepath)
|
||||
print(f"Found {filepath}")
|
||||
|
||||
pending = False
|
||||
if pendingRx:
|
||||
pending = pendingRx.search(physical_filepath) is not None
|
||||
pending = pendingRx.search(filepath) is not None
|
||||
|
||||
if not db.file_in_db(logical_filepath):
|
||||
if not db.file_in_db(filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(physical_filepath)
|
||||
age = time.time() - os.path.getmtime(filepath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", logical_filepath)
|
||||
print("Skipping file because too new", filepath)
|
||||
continue
|
||||
|
||||
print("Importing")
|
||||
|
||||
if rx:
|
||||
match = rx.match(os.path.basename(logical_filepath))
|
||||
if not match:
|
||||
error_message = f"File path not match the expected format! ({logical_filepath} ~ {pattern['regex']})"
|
||||
print(error_message, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
match = rx.match(os.path.basename(filepath))
|
||||
if not match:
|
||||
error_message = f"File path not match the expected format! ({filepath} ~ {pattern['regex']})"
|
||||
print(error_message, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
|
||||
file_info = dict(zip(pattern["captures"], match.groups()))
|
||||
file_info["meta"] = {}
|
||||
|
||||
if lineNameInfo:
|
||||
basename = os.path.basename(physical_filepath)
|
||||
fields = lineNameInfo.get("fields", {})
|
||||
fixed = lineNameInfo.get("fixed")
|
||||
try:
|
||||
parsed_line = fwr.parse_line(basename, fields, fixed)
|
||||
except ValueError as err:
|
||||
parsed_line = "Line format error: " + str(err)
|
||||
if type(parsed_line) == str:
|
||||
print(parsed_line, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
|
||||
file_info = {}
|
||||
file_info["sequence"] = parsed_line["sequence"]
|
||||
file_info["line"] = parsed_line["line"]
|
||||
del(parsed_line["sequence"])
|
||||
del(parsed_line["line"])
|
||||
file_info["meta"] = {
|
||||
"fileInfo": parsed_line
|
||||
}
|
||||
file_info = dict(zip(pattern["captures"], match.groups()))
|
||||
file_info["meta"] = {}
|
||||
|
||||
if pending:
|
||||
print("Skipping / removing final file because marked as PENDING", logical_filepath)
|
||||
print("Skipping / removing final file because marked as PENDING", filepath)
|
||||
db.del_sequence_final(file_info["sequence"])
|
||||
add_pending_remark(db, file_info["sequence"])
|
||||
continue
|
||||
else:
|
||||
del_pending_remark(db, file_info["sequence"])
|
||||
|
||||
p111_data = p111.from_file(physical_filepath)
|
||||
p111_data = p111.from_file(filepath)
|
||||
|
||||
print("Saving")
|
||||
|
||||
p111_records = p111.p111_type("S", p111_data)
|
||||
file_info["meta"]["lineName"] = p111.line_name(p111_data)
|
||||
|
||||
db.save_final_p111(p111_records, file_info, logical_filepath, survey["epsg"])
|
||||
db.save_final_p111(p111_records, file_info, filepath, survey["epsg"])
|
||||
else:
|
||||
print("Already in DB")
|
||||
if pending:
|
||||
|
||||
@@ -1,127 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
Import SmartSource data.
|
||||
|
||||
For each survey in configuration.surveys(), check for new
|
||||
or modified final gun header files and (re-)import them into the
|
||||
database.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import pathlib
|
||||
import re
|
||||
import time
|
||||
import json
|
||||
import configuration
|
||||
from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
"""
|
||||
Imports map layers from the directories defined in the configuration object
|
||||
`import.map.layers`. The content of that key is an object with the following
|
||||
structure:
|
||||
|
||||
{
|
||||
layer1Name: [
|
||||
format: "geojson",
|
||||
path: "…", // Logical path to a directory
|
||||
globs: [
|
||||
"**/*.geojson", // List of globs matching map data files
|
||||
…
|
||||
]
|
||||
],
|
||||
|
||||
layer2Name: …
|
||||
…
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def process (layer_name, layer, physical_filepath):
|
||||
physical_filepath = str(physical_filepath)
|
||||
logical_filepath = configuration.untranslate_path(physical_filepath)
|
||||
print(f"Found {logical_filepath}")
|
||||
|
||||
if not db.file_in_db(logical_filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(physical_filepath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", logical_filepath)
|
||||
return
|
||||
|
||||
print("Importing")
|
||||
|
||||
file_info = {
|
||||
"type": "map_layer",
|
||||
"format": layer["format"],
|
||||
"name": layer_name,
|
||||
"tooltip": layer.get("tooltip"),
|
||||
"popup": layer.get("popup")
|
||||
}
|
||||
|
||||
db.save_file_data(logical_filepath, json.dumps(file_info))
|
||||
|
||||
else:
|
||||
file_info = db.get_file_data(logical_filepath)
|
||||
dirty = False
|
||||
if file_info:
|
||||
if file_info["name"] != layer_name:
|
||||
print("Renaming to", layer_name)
|
||||
file_info["name"] = layer_name
|
||||
dirty = True
|
||||
if file_info.get("tooltip") != layer.get("tooltip"):
|
||||
print("Changing tooltip to", layer.get("tooltip") or "null")
|
||||
file_info["tooltip"] = layer.get("tooltip")
|
||||
dirty = True
|
||||
if file_info.get("popup") != layer.get("popup"):
|
||||
print("Changing popup to", layer.get("popup") or "null")
|
||||
file_info["popup"] = layer.get("popup")
|
||||
dirty = True
|
||||
|
||||
if dirty:
|
||||
db.save_file_data(logical_filepath, json.dumps(file_info))
|
||||
else:
|
||||
print("Already in DB")
|
||||
|
||||
|
||||
print("Reading configuration")
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
|
||||
db.set_survey(survey["schema"])
|
||||
|
||||
try:
|
||||
map_layers = survey["imports"]["map"]["layers"]
|
||||
except KeyError:
|
||||
print("No map layers defined")
|
||||
continue
|
||||
|
||||
for layer_name, layer_items in map_layers.items():
|
||||
|
||||
for layer in layer_items:
|
||||
fileprefix = layer["path"]
|
||||
realprefix = configuration.translate_path(fileprefix)
|
||||
|
||||
if os.path.isfile(realprefix):
|
||||
|
||||
process(layer_name, layer, realprefix)
|
||||
|
||||
elif os.path.isdir(realprefix):
|
||||
|
||||
if not "globs" in layer:
|
||||
layer["globs"] = [ "**/*.geojson" ]
|
||||
|
||||
for globspec in layer["globs"]:
|
||||
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
|
||||
process(layer_name, layer, physical_filepath)
|
||||
|
||||
print("Done")
|
||||
@@ -15,52 +15,38 @@ import configuration
|
||||
import preplots
|
||||
from datastore import Datastore
|
||||
|
||||
def preplots_sorter (preplot):
|
||||
rank = {
|
||||
"x-sl+csv": 10
|
||||
}
|
||||
return rank.get(preplot.get("type"), 0)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading configuration")
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
db.set_survey(survey["schema"])
|
||||
|
||||
# We sort the preplots so that ancillary line info always comes last,
|
||||
# after the actual line + point data has been imported
|
||||
for file in sorted(survey["preplots"], key=preplots_sorter):
|
||||
realpath = configuration.translate_path(file["path"])
|
||||
|
||||
for file in survey["preplots"]:
|
||||
print(f"Preplot: {file['path']}")
|
||||
if not db.file_in_db(file["path"]):
|
||||
|
||||
age = time.time() - os.path.getmtime(realpath)
|
||||
age = time.time() - os.path.getmtime(file["path"])
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", file["path"])
|
||||
continue
|
||||
|
||||
print("Importing")
|
||||
try:
|
||||
preplot = preplots.from_file(file, realpath)
|
||||
preplot = preplots.from_file(file)
|
||||
except FileNotFoundError:
|
||||
print(f"File does not exist: {file['path']}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
if type(preplot) is list:
|
||||
print("Saving to DB")
|
||||
if file.get("type") == "x-sl+csv":
|
||||
db.save_preplot_line_info(preplot, file["path"], file)
|
||||
else:
|
||||
db.save_preplots(preplot, file["path"], file["class"], survey["epsg"], file)
|
||||
db.save_preplots(preplot, file["path"], file["class"], survey["epsg"], file)
|
||||
elif type(preplot) is str:
|
||||
print(preplot)
|
||||
else:
|
||||
|
||||
@@ -15,17 +15,17 @@ import re
|
||||
import time
|
||||
import configuration
|
||||
import p111
|
||||
import fwr
|
||||
from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
db.connect()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
@@ -39,79 +39,45 @@ if __name__ == '__main__':
|
||||
print("No raw P1/11 configuration")
|
||||
exit(0)
|
||||
|
||||
lineNameInfo = raw_p111.get("lineNameInfo")
|
||||
pattern = raw_p111.get("pattern")
|
||||
if not lineNameInfo:
|
||||
if not pattern:
|
||||
print("ERROR! Missing raw.p111.lineNameInfo in project configuration. Cannot import raw P111")
|
||||
raise Exception("Missing raw.p111.lineNameInfo")
|
||||
else:
|
||||
print("WARNING! No `lineNameInfo` in project configuration (raw.p111). You should add it to the settings.")
|
||||
rx = None
|
||||
if pattern and pattern.get("regex"):
|
||||
rx = re.compile(pattern["regex"])
|
||||
pattern = raw_p111["pattern"]
|
||||
rx = re.compile(pattern["regex"])
|
||||
|
||||
if "ntbp" in survey["raw"]:
|
||||
ntbpRx = re.compile(survey["raw"]["ntbp"]["pattern"]["regex"])
|
||||
|
||||
for fileprefix in raw_p111["paths"]:
|
||||
realprefix = configuration.translate_path(fileprefix)
|
||||
print(f"Path prefix: {fileprefix} → {realprefix}")
|
||||
print(f"Path prefix: {fileprefix}")
|
||||
|
||||
for globspec in raw_p111["globs"]:
|
||||
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
|
||||
physical_filepath = str(physical_filepath)
|
||||
logical_filepath = configuration.untranslate_path(physical_filepath)
|
||||
print(f"Found {logical_filepath}")
|
||||
for filepath in pathlib.Path(fileprefix).glob(globspec):
|
||||
filepath = str(filepath)
|
||||
print(f"Found {filepath}")
|
||||
|
||||
if ntbpRx:
|
||||
ntbp = ntbpRx.search(physical_filepath) is not None
|
||||
ntbp = ntbpRx.search(filepath) is not None
|
||||
else:
|
||||
ntbp = False
|
||||
|
||||
if not db.file_in_db(logical_filepath):
|
||||
if not db.file_in_db(filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(physical_filepath)
|
||||
age = time.time() - os.path.getmtime(filepath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", logical_filepath)
|
||||
print("Skipping file because too new", filepath)
|
||||
continue
|
||||
|
||||
print("Importing")
|
||||
|
||||
if rx:
|
||||
match = rx.match(os.path.basename(logical_filepath))
|
||||
if not match:
|
||||
error_message = f"File path not matching the expected format! ({logical_filepath} ~ {pattern['regex']})"
|
||||
print(error_message, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
match = rx.match(os.path.basename(filepath))
|
||||
if not match:
|
||||
error_message = f"File path not match the expected format! ({filepath} ~ {pattern['regex']})"
|
||||
print(error_message, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
|
||||
file_info = dict(zip(pattern["captures"], match.groups()))
|
||||
file_info["meta"] = {}
|
||||
file_info = dict(zip(pattern["captures"], match.groups()))
|
||||
file_info["meta"] = {}
|
||||
|
||||
if lineNameInfo:
|
||||
basename = os.path.basename(physical_filepath)
|
||||
fields = lineNameInfo.get("fields", {})
|
||||
fixed = lineNameInfo.get("fixed")
|
||||
try:
|
||||
parsed_line = fwr.parse_line(basename, fields, fixed)
|
||||
except ValueError as err:
|
||||
parsed_line = "Line format error: " + str(err)
|
||||
if type(parsed_line) == str:
|
||||
print(parsed_line, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
|
||||
file_info = {}
|
||||
file_info["sequence"] = parsed_line["sequence"]
|
||||
file_info["line"] = parsed_line["line"]
|
||||
del(parsed_line["sequence"])
|
||||
del(parsed_line["line"])
|
||||
file_info["meta"] = {
|
||||
"fileInfo": parsed_line
|
||||
}
|
||||
|
||||
p111_data = p111.from_file(physical_filepath)
|
||||
p111_data = p111.from_file(filepath)
|
||||
|
||||
print("Saving")
|
||||
|
||||
@@ -119,7 +85,7 @@ if __name__ == '__main__':
|
||||
if len(p111_records):
|
||||
file_info["meta"]["lineName"] = p111.line_name(p111_data)
|
||||
|
||||
db.save_raw_p111(p111_records, file_info, logical_filepath, survey["epsg"], ntbp=ntbp)
|
||||
db.save_raw_p111(p111_records, file_info, filepath, survey["epsg"], ntbp=ntbp)
|
||||
else:
|
||||
print("No source records found in file")
|
||||
else:
|
||||
@@ -127,7 +93,7 @@ if __name__ == '__main__':
|
||||
|
||||
# Update the NTBP status to whatever the latest is,
|
||||
# as it might have changed.
|
||||
db.set_ntbp(logical_filepath, ntbp)
|
||||
db.set_ntbp(filepath, ntbp)
|
||||
if ntbp:
|
||||
print("Sequence is NTBP")
|
||||
|
||||
|
||||
@@ -15,17 +15,17 @@ import re
|
||||
import time
|
||||
import configuration
|
||||
import smsrc
|
||||
import fwr
|
||||
from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
db.connect()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
@@ -34,80 +34,49 @@ if __name__ == '__main__':
|
||||
db.set_survey(survey["schema"])
|
||||
|
||||
try:
|
||||
raw_smsrc = survey["raw"]["source"]["smsrc"]["header"]
|
||||
raw_smsrc = survey["raw"]["smsrc"]
|
||||
except KeyError:
|
||||
print("No SmartSource data configuration")
|
||||
continue
|
||||
|
||||
# NOTE I've no idea what this is 🤔
|
||||
# flags = 0
|
||||
# if "flags" in raw_smsrc:
|
||||
# configuration.rxflags(raw_smsrc["flags"])
|
||||
flags = 0
|
||||
if "flags" in raw_smsrc:
|
||||
configuration.rxflags(raw_smsrc["flags"])
|
||||
|
||||
lineNameInfo = raw_smsrc.get("lineNameInfo")
|
||||
pattern = raw_smsrc.get("pattern")
|
||||
rx = None
|
||||
if pattern and pattern.get("regex"):
|
||||
rx = re.compile(pattern["regex"])
|
||||
pattern = raw_smsrc["pattern"]
|
||||
rx = re.compile(pattern["regex"], flags)
|
||||
|
||||
for fileprefix in raw_smsrc["paths"]:
|
||||
realprefix = configuration.translate_path(fileprefix)
|
||||
print(f"Path prefix: {fileprefix} → {realprefix}")
|
||||
print(f"Path prefix: {fileprefix}")
|
||||
|
||||
for globspec in raw_smsrc["globs"]:
|
||||
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
|
||||
physical_filepath = str(physical_filepath)
|
||||
logical_filepath = configuration.untranslate_path(physical_filepath)
|
||||
print(f"Found {logical_filepath}")
|
||||
for filepath in pathlib.Path(fileprefix).glob(globspec):
|
||||
filepath = str(filepath)
|
||||
print(f"Found {filepath}")
|
||||
|
||||
if not db.file_in_db(logical_filepath):
|
||||
if not db.file_in_db(filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(physical_filepath)
|
||||
age = time.time() - os.path.getmtime(filepath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", logical_filepath)
|
||||
print("Skipping file because too new", filepath)
|
||||
continue
|
||||
|
||||
print("Importing")
|
||||
|
||||
if rx:
|
||||
match = rx.match(os.path.basename(logical_filepath))
|
||||
if not match:
|
||||
error_message = f"File path not matching the expected format! ({logical_filepath} ~ {pattern['regex']})"
|
||||
print(error_message, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
match = rx.match(os.path.basename(filepath))
|
||||
if not match:
|
||||
error_message = f"File path not matching the expected format! ({filepath} ~ {pattern['regex']})"
|
||||
print(error_message, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
|
||||
file_info = dict(zip(pattern["captures"], match.groups()))
|
||||
file_info["meta"] = {}
|
||||
file_info = dict(zip(pattern["captures"], match.groups()))
|
||||
|
||||
|
||||
if lineNameInfo:
|
||||
basename = os.path.basename(physical_filepath)
|
||||
fields = lineNameInfo.get("fields", {})
|
||||
fixed = lineNameInfo.get("fixed")
|
||||
try:
|
||||
parsed_line = fwr.parse_line(basename, fields, fixed)
|
||||
except ValueError as err:
|
||||
parsed_line = "Line format error: " + str(err)
|
||||
if type(parsed_line) == str:
|
||||
print(parsed_line, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
|
||||
file_info = {}
|
||||
file_info["sequence"] = parsed_line["sequence"]
|
||||
file_info["line"] = parsed_line["line"]
|
||||
del(parsed_line["sequence"])
|
||||
del(parsed_line["line"])
|
||||
file_info["meta"] = {
|
||||
"fileInfo": parsed_line
|
||||
}
|
||||
|
||||
smsrc_records = smsrc.from_file(physical_filepath)
|
||||
smsrc_records = smsrc.from_file(filepath)
|
||||
|
||||
print("Saving")
|
||||
|
||||
db.save_raw_smsrc(smsrc_records, file_info, logical_filepath)
|
||||
db.save_raw_smsrc(smsrc_records, file_info, filepath)
|
||||
else:
|
||||
print("Already in DB")
|
||||
|
||||
|
||||
@@ -15,4 +15,25 @@ from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("This function is obsolete. Returning with no action")
|
||||
print("Reading configuration")
|
||||
configs = configuration.files(include_archived = True)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
#db.connect()
|
||||
|
||||
print("Reading surveys")
|
||||
for config in configs:
|
||||
filepath = config[0]
|
||||
survey = config[1]
|
||||
print(f'Survey: {survey["id"]} ({filepath})')
|
||||
db.set_survey(survey["schema"])
|
||||
if not db.file_in_db(filepath):
|
||||
print("Saving to DB")
|
||||
db.save_file_data(filepath, json.dumps(survey))
|
||||
print("Applying survey configuration")
|
||||
db.apply_survey_configuration()
|
||||
else:
|
||||
print("Already in DB")
|
||||
|
||||
print("Done")
|
||||
|
||||
@@ -38,11 +38,11 @@ if __name__ == '__main__':
|
||||
|
||||
message = " ".join(args["remarks"])
|
||||
|
||||
print("new event:", schema, tstamp, message, args["label"])
|
||||
print("new event:", schema, tstamp, message)
|
||||
|
||||
if schema and tstamp and message:
|
||||
db.set_survey(schema)
|
||||
with db.conn.cursor() as cursor:
|
||||
qry = "INSERT INTO event_log (tstamp, remarks, labels) VALUES (%s, replace_placeholders(%s, %s, NULL, NULL), %s);"
|
||||
cursor.execute(qry, (tstamp, message, tstamp, args["label"]))
|
||||
qry = "INSERT INTO events_timed (tstamp, remarks) VALUES (%s, %s);"
|
||||
cursor.execute(qry, (tstamp, message))
|
||||
db.maybe_commit()
|
||||
|
||||
@@ -7,6 +7,7 @@ P1/11 parsing functions.
|
||||
import math
|
||||
import re
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from parse_fwr import parse_fwr
|
||||
|
||||
def _int (string):
|
||||
return int(float(string))
|
||||
|
||||
21
bin/parse_fwr.py
Normal file
21
bin/parse_fwr.py
Normal file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
def parse_fwr (string, widths, start=0):
|
||||
"""Parse a fixed-width record.
|
||||
|
||||
string: the string to parse.
|
||||
widths: a list of record widths. A negative width denotes a field to be skipped.
|
||||
start: optional start index.
|
||||
|
||||
Returns a list of strings.
|
||||
"""
|
||||
results = []
|
||||
current_index = start
|
||||
for width in widths:
|
||||
if width > 0:
|
||||
results.append(string[current_index : current_index + width])
|
||||
current_index += width
|
||||
else:
|
||||
current_index -= width
|
||||
|
||||
return results
|
||||
@@ -1,51 +1,14 @@
|
||||
import fwr
|
||||
import delimited
|
||||
import sps
|
||||
|
||||
"""
|
||||
Preplot importing functions.
|
||||
"""
|
||||
|
||||
|
||||
def is_fixed_width (file):
|
||||
fixed_width_types = [ "sps1", "sps21", "p190", "fixed-width" ]
|
||||
return type(file) == dict and "type" in file and file["type"] in fixed_width_types
|
||||
|
||||
def is_delimited (file):
|
||||
delimited_types = [ "csv", "p111", "x-sl+csv" ]
|
||||
return type(file) == dict and "type" in file and file["type"] in delimited_types
|
||||
|
||||
def from_file (file, realpath = None):
|
||||
"""
|
||||
Return a list of dicts, where each dict has the structure:
|
||||
{
|
||||
"line_name": <int>,
|
||||
"points": [
|
||||
{
|
||||
"line_name": <int>,
|
||||
"point_number": <int>,
|
||||
"easting": <float>,
|
||||
"northing": <float>
|
||||
},
|
||||
…
|
||||
]
|
||||
}
|
||||
On error, return a string describing the error condition.
|
||||
"""
|
||||
|
||||
filepath = realpath or file["path"]
|
||||
if is_fixed_width(file):
|
||||
records = fwr.from_file(filepath, file)
|
||||
elif is_delimited(file):
|
||||
records = delimited.from_file(filepath, file)
|
||||
def from_file (file):
|
||||
if not "type" in file or file["type"] == "sps":
|
||||
records = sps.from_file(file["path"], file["format"] if "format" in file else None )
|
||||
else:
|
||||
return "Unrecognised file format"
|
||||
|
||||
if type(records) == str:
|
||||
# This is an error message
|
||||
return records
|
||||
|
||||
if file.get("type") == "x-sl+csv":
|
||||
return records
|
||||
return "Not an SPS file"
|
||||
|
||||
lines = []
|
||||
line_names = set([r["line_name"] for r in records])
|
||||
|
||||
@@ -13,27 +13,21 @@ from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
db.set_survey(survey["schema"])
|
||||
|
||||
for file in db.list_files():
|
||||
try:
|
||||
path = configuration.translate_path(file[0])
|
||||
if not os.path.exists(path):
|
||||
print(path, "NOT FOUND")
|
||||
db.del_file(file[0])
|
||||
except TypeError:
|
||||
# In case the logical path no longer matches
|
||||
# the Dougal configuration.
|
||||
print(file[0], "COULD NOT BE TRANSLATED TO A PHYSICAL PATH. DELETING")
|
||||
db.del_file(file[0])
|
||||
path = file[0]
|
||||
if not os.path.exists(path):
|
||||
print(path, "NOT FOUND")
|
||||
db.del_file(path)
|
||||
|
||||
print("Done")
|
||||
|
||||
@@ -90,12 +90,6 @@ function run () {
|
||||
rm $STDOUTLOG $STDERRLOG
|
||||
}
|
||||
|
||||
function cleanup () {
|
||||
if [[ -f $LOCKFILE ]]; then
|
||||
rm "$LOCKFILE"
|
||||
fi
|
||||
}
|
||||
|
||||
if [[ -f $LOCKFILE ]]; then
|
||||
PID=$(cat "$LOCKFILE")
|
||||
if pgrep -F "$LOCKFILE"; then
|
||||
@@ -113,13 +107,6 @@ echo "$$" > "$LOCKFILE" || {
|
||||
}
|
||||
print_info "Start run"
|
||||
|
||||
print_log "Check if data is accessible"
|
||||
$BINDIR/check_mounts_present.py || {
|
||||
print_warning "Import mounts not accessible. Inhibiting all tasks!"
|
||||
cleanup
|
||||
exit 253
|
||||
}
|
||||
|
||||
print_log "Purge deleted files"
|
||||
run $BINDIR/purge_deleted_files.py
|
||||
|
||||
@@ -132,21 +119,18 @@ run $BINDIR/import_preplots.py
|
||||
print_log "Import raw P1/11"
|
||||
run $BINDIR/import_raw_p111.py
|
||||
|
||||
#print_log "Import raw P1/90"
|
||||
#run $BINDIR/import_raw_p190.py
|
||||
print_log "Import raw P1/90"
|
||||
run $BINDIR/import_raw_p190.py
|
||||
|
||||
print_log "Import final P1/11"
|
||||
run $BINDIR/import_final_p111.py
|
||||
|
||||
#print_log "Import final P1/90"
|
||||
#run $BINDIR/import_final_p190.py
|
||||
print_log "Import final P1/90"
|
||||
run $BINDIR/import_final_p190.py
|
||||
|
||||
print_log "Import SmartSource data"
|
||||
run $BINDIR/import_smsrc.py
|
||||
|
||||
print_log "Import map user layers"
|
||||
run $BINDIR/import_map_layers.py
|
||||
|
||||
# if [[ -z "$RUNNER_NOEXPORT" ]]; then
|
||||
# print_log "Export system data"
|
||||
# run $BINDIR/system_exports.py
|
||||
|
||||
51
bin/sps.py
Normal file
51
bin/sps.py
Normal file
@@ -0,0 +1,51 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
SPS importing functions.
|
||||
|
||||
And by SPS, we mean more or less any line-delimited, fixed-width record format.
|
||||
"""
|
||||
|
||||
import builtins
|
||||
from parse_fwr import parse_fwr
|
||||
|
||||
def int (v):
|
||||
return builtins.int(float(v))
|
||||
|
||||
def parse_line (string, spec):
|
||||
"""Parse a line from an SPS file."""
|
||||
names = spec["names"]
|
||||
widths = spec["widths"]
|
||||
normalisers = spec["normalisers"]
|
||||
record = [ t[0](t[1]) for t in zip(normalisers, parse_fwr(string, widths)) ]
|
||||
return dict(zip(names, record))
|
||||
|
||||
def from_file(path, spec = None):
|
||||
if spec is None:
|
||||
spec = {
|
||||
"names": [ "line_name", "point_number", "easting", "northing" ],
|
||||
"widths": [ -1, 10, 10, -25, 10, 10 ],
|
||||
"normalisers": [ int, int, float, float ]
|
||||
}
|
||||
else:
|
||||
normaliser_tokens = [ "int", "float", "str", "bool" ]
|
||||
spec["normalisers"] = [ eval(t) for t in spec["types"] if t in normaliser_tokens ]
|
||||
|
||||
records = []
|
||||
with open(path) as fd:
|
||||
cnt = 0
|
||||
line = fd.readline()
|
||||
while line:
|
||||
cnt = cnt+1
|
||||
|
||||
if line == "EOF":
|
||||
break
|
||||
|
||||
record = parse_line(line, spec)
|
||||
if record is not None:
|
||||
records.append(record)
|
||||
|
||||
line = fd.readline()
|
||||
|
||||
del spec["normalisers"]
|
||||
return records
|
||||
@@ -32,25 +32,6 @@ imports:
|
||||
# least this many seconds ago.
|
||||
file_min_age: 60
|
||||
|
||||
# These paths refer to remote mounts which must be present in order
|
||||
# for imports to work. If any of these paths are empty, import actions
|
||||
# (including data deletion) will be inhibited. This is to cope with
|
||||
# things like transient network failures.
|
||||
mounts:
|
||||
- /srv/mnt/Data
|
||||
|
||||
# These paths can be exposed to end users via the API. They should
|
||||
# contain the locations were project data, or any other user data
|
||||
# that needs to be accessible by Dougal, is located.
|
||||
#
|
||||
# This key can be either a string or an object:
|
||||
# - If a string, it points to the root path for Dougal-accessible data.
|
||||
# - If an object, there is an implicit root and the first-level
|
||||
# paths are denoted by the keys, with the values being their
|
||||
# respective physical paths.
|
||||
# Non-absolute paths are relative to $DOUGAL_ROOT.
|
||||
paths: /srv/mnt/Data
|
||||
|
||||
queues:
|
||||
asaqc:
|
||||
request:
|
||||
@@ -144,107 +144,6 @@ CREATE TYPE public.queue_item_status AS ENUM (
|
||||
|
||||
ALTER TYPE public.queue_item_status OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: event_meta(timestamp with time zone); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.event_meta(tstamp timestamp with time zone) RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_meta(tstamp, NULL, NULL);
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.event_meta(tstamp timestamp with time zone) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION event_meta(tstamp timestamp with time zone); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.event_meta(tstamp timestamp with time zone) IS 'Overload of event_meta (timestamptz, integer, integer) for use when searching by timestamp.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: event_meta(integer, integer); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.event_meta(sequence integer, point integer) RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_meta(NULL, sequence, point);
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.event_meta(sequence integer, point integer) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION event_meta(sequence integer, point integer); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.event_meta(sequence integer, point integer) IS 'Overload of event_meta (timestamptz, integer, integer) for use when searching by sequence / point.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: event_meta(timestamp with time zone, integer, integer); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.event_meta(tstamp timestamp with time zone, sequence integer, point integer) RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
result jsonb;
|
||||
-- Tolerance is hard-coded, at least until a need to expose arises.
|
||||
tolerance numeric;
|
||||
BEGIN
|
||||
tolerance := 3; -- seconds
|
||||
|
||||
-- We search by timestamp if we can, as that's a lot quicker
|
||||
IF tstamp IS NOT NULL THEN
|
||||
|
||||
SELECT meta
|
||||
INTO result
|
||||
FROM real_time_inputs rti
|
||||
WHERE
|
||||
rti.tstamp BETWEEN (event_meta.tstamp - tolerance * interval '1 second') AND (event_meta.tstamp + tolerance * interval '1 second')
|
||||
ORDER BY abs(extract('epoch' FROM rti.tstamp - event_meta.tstamp ))
|
||||
LIMIT 1;
|
||||
|
||||
ELSE
|
||||
|
||||
SELECT meta
|
||||
INTO result
|
||||
FROM real_time_inputs rti
|
||||
WHERE
|
||||
(meta->>'_sequence')::integer = event_meta.sequence AND
|
||||
(meta->>'_point')::integer = event_meta.point
|
||||
ORDER BY rti.tstamp DESC
|
||||
LIMIT 1;
|
||||
|
||||
END IF;
|
||||
|
||||
RETURN result;
|
||||
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.event_meta(tstamp timestamp with time zone, sequence integer, point integer) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION event_meta(tstamp timestamp with time zone, sequence integer, point integer); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.event_meta(tstamp timestamp with time zone, sequence integer, point integer) IS 'Return the real-time event metadata associated with a sequence / point in the current project or
|
||||
with a given timestamp. Timestamp that is first searched for in the shot tables
|
||||
of the current prospect or, if not found, in the real-time data.
|
||||
|
||||
Returns a JSONB object.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: geometry_from_tstamp(timestamp with time zone, numeric); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
@@ -273,78 +172,6 @@ ALTER FUNCTION public.geometry_from_tstamp(ts timestamp with time zone, toleranc
|
||||
COMMENT ON FUNCTION public.geometry_from_tstamp(ts timestamp with time zone, tolerance numeric, OUT geometry public.geometry, OUT delta numeric) IS 'Get geometry from timestamp';
|
||||
|
||||
|
||||
--
|
||||
-- Name: interpolate_geometry_from_tstamp(timestamp with time zone, numeric); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.interpolate_geometry_from_tstamp(ts timestamp with time zone, maxspan numeric) RETURNS public.geometry
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
ts0 timestamptz;
|
||||
ts1 timestamptz;
|
||||
geom0 geometry;
|
||||
geom1 geometry;
|
||||
span numeric;
|
||||
fraction numeric;
|
||||
BEGIN
|
||||
|
||||
SELECT tstamp, geometry
|
||||
INTO ts0, geom0
|
||||
FROM real_time_inputs
|
||||
WHERE tstamp <= ts
|
||||
ORDER BY tstamp DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT tstamp, geometry
|
||||
INTO ts1, geom1
|
||||
FROM real_time_inputs
|
||||
WHERE tstamp >= ts
|
||||
ORDER BY tstamp ASC
|
||||
LIMIT 1;
|
||||
|
||||
IF geom0 IS NULL OR geom1 IS NULL THEN
|
||||
RAISE NOTICE 'Interpolation failed (no straddling data)';
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
-- See if we got an exact match
|
||||
IF ts0 = ts THEN
|
||||
RETURN geom0;
|
||||
ELSIF ts1 = ts THEN
|
||||
RETURN geom1;
|
||||
END IF;
|
||||
|
||||
span := extract('epoch' FROM ts1 - ts0);
|
||||
|
||||
IF span > maxspan THEN
|
||||
RAISE NOTICE 'Interpolation timespan % outside maximum requested (%)', span, maxspan;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
fraction := extract('epoch' FROM ts - ts0) / span;
|
||||
|
||||
IF fraction < 0 OR fraction > 1 THEN
|
||||
RAISE NOTICE 'Requested timestamp % outside of interpolation span (fraction: %)', ts, fraction;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
RETURN ST_LineInterpolatePoint(St_MakeLine(geom0, geom1), fraction);
|
||||
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.interpolate_geometry_from_tstamp(ts timestamp with time zone, maxspan numeric) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION interpolate_geometry_from_tstamp(ts timestamp with time zone, maxspan numeric); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.interpolate_geometry_from_tstamp(ts timestamp with time zone, maxspan numeric) IS 'Interpolate a position over a given maximum timespan (in seconds)
|
||||
based on real-time inputs. Returns a POINT geometry.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: notify(); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
\connect dougal
|
||||
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.5"}')
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.7"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.5"}' WHERE public.info.key = 'version';
|
||||
SET value = public.info.value || '{"db_schema": "0.3.7"}' WHERE public.info.key = 'version';
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,267 +0,0 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.8
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This adds event_position() and event_meta() functions which are used
|
||||
-- to retrieve position or metadata, respectively, given either a timestamp
|
||||
-- or a sequence / point pair. Intended to be used in the context of #229.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
--
|
||||
-- event_position(): Fetch event position
|
||||
--
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_position (
|
||||
tstamp timestamptz, sequence integer, point integer, tolerance numeric
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
DECLARE
|
||||
position geometry;
|
||||
BEGIN
|
||||
|
||||
-- Try and get position by sequence / point first
|
||||
IF sequence IS NOT NULL AND point IS NOT NULL THEN
|
||||
-- Try and get the position from final_shots or raw_shots
|
||||
SELECT COALESCE(f.geometry, r.geometry) geometry
|
||||
INTO position
|
||||
FROM raw_shots r LEFT JOIN final_shots f USING (sequence, point)
|
||||
WHERE r.sequence = event_position.sequence AND r.point = event_position.point;
|
||||
|
||||
IF position IS NOT NULL THEN
|
||||
RETURN position;
|
||||
ELSIF tstamp IS NULL THEN
|
||||
-- Get the timestamp for the sequence / point, if we can.
|
||||
-- It will be used later in the function as we fall back
|
||||
-- to timestamp based search.
|
||||
-- We also adjust the tolerance as we're now dealing with
|
||||
-- an exact timestamp.
|
||||
SELECT COALESCE(f.tstamp, r.tstamp) tstamp, 0.002 tolerance
|
||||
INTO tstamp, tolerance
|
||||
FROM raw_shots r LEFT JOIN final_shots f USING (sequence, point)
|
||||
WHERE r.sequence = event_position.sequence AND r.point = event_position.point;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- If we got here, we better have a timestamp
|
||||
-- First attempt, get a position from final_shots, raw_shots. This may
|
||||
-- be redundant if we got here from the position of having a sequence /
|
||||
-- point without a position, but never mind.
|
||||
SELECT COALESCE(f.geometry, r.geometry) geometry
|
||||
INTO position
|
||||
FROM raw_shots r LEFT JOIN final_shots f USING (sequence, point)
|
||||
WHERE r.tstamp = event_position.tstamp OR f.tstamp = event_position.tstamp
|
||||
LIMIT 1; -- Just to be sure
|
||||
|
||||
IF position IS NULL THEN
|
||||
-- Ok, so everything else so far has failed, let's try and get this
|
||||
-- from real time data. We skip the search via sequence / point and
|
||||
-- go directly for timestamp.
|
||||
SELECT geometry
|
||||
INTO position
|
||||
FROM geometry_from_tstamp(tstamp, tolerance);
|
||||
END IF;
|
||||
|
||||
RETURN position;
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_position (timestamptz, integer, integer, numeric) IS
|
||||
'Return the position associated with a sequence / point in the current project or
|
||||
with a given timestamp. Timestamp that is first searched for in the shot tables
|
||||
of the current prospect or, if not found, in the real-time data.
|
||||
|
||||
Returns a geometry.';
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_position (
|
||||
tstamp timestamptz, sequence integer, point integer
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_position(tstamp, sequence, point, 3);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_position (timestamptz, integer, integer) IS
|
||||
'Overload of event_position with a default tolerance of three seconds.';
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_position (
|
||||
tstamp timestamptz
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_position(tstamp, NULL, NULL);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_position (timestamptz) IS
|
||||
'Overload of event_position (timestamptz, integer, integer) for use when searching by timestamp.';
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_position (
|
||||
sequence integer, point integer
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_position(NULL, sequence, point);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_position (integer, integer) IS
|
||||
'Overload of event_position (timestamptz, integer, integer) for use when searching by sequence / point.';
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
--
|
||||
-- event_meta(): Fetch event metadata
|
||||
--
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_meta (
|
||||
tstamp timestamptz, sequence integer, point integer
|
||||
)
|
||||
RETURNS jsonb
|
||||
AS $$
|
||||
DECLARE
|
||||
result jsonb;
|
||||
-- Tolerance is hard-coded, at least until a need to expose arises.
|
||||
tolerance numeric;
|
||||
BEGIN
|
||||
tolerance := 3; -- seconds
|
||||
|
||||
-- We search by timestamp if we can, as that's a lot quicker
|
||||
IF tstamp IS NOT NULL THEN
|
||||
|
||||
SELECT meta
|
||||
INTO result
|
||||
FROM real_time_inputs rti
|
||||
WHERE
|
||||
rti.tstamp BETWEEN (event_meta.tstamp - tolerance * interval '1 second') AND (event_meta.tstamp + tolerance * interval '1 second')
|
||||
ORDER BY abs(extract('epoch' FROM rti.tstamp - event_meta.tstamp ))
|
||||
LIMIT 1;
|
||||
|
||||
ELSE
|
||||
|
||||
SELECT meta
|
||||
INTO result
|
||||
FROM real_time_inputs rti
|
||||
WHERE
|
||||
(meta->>'_sequence')::integer = event_meta.sequence AND
|
||||
(meta->>'_point')::integer = event_meta.point
|
||||
ORDER BY rti.tstamp DESC
|
||||
LIMIT 1;
|
||||
|
||||
END IF;
|
||||
|
||||
RETURN result;
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_meta (timestamptz, integer, integer) IS
|
||||
'Return the real-time event metadata associated with a sequence / point in the current project or
|
||||
with a given timestamp. Timestamp that is first searched for in the shot tables
|
||||
of the current prospect or, if not found, in the real-time data.
|
||||
|
||||
Returns a JSONB object.';
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_meta (
|
||||
tstamp timestamptz
|
||||
)
|
||||
RETURNS jsonb
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_meta(tstamp, NULL, NULL);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_meta (timestamptz) IS
|
||||
'Overload of event_meta (timestamptz, integer, integer) for use when searching by timestamp.';
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_meta (
|
||||
sequence integer, point integer
|
||||
)
|
||||
RETURNS jsonb
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_meta(NULL, sequence, point);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_meta (integer, integer) IS
|
||||
'Overload of event_meta (timestamptz, integer, integer) for use when searching by sequence / point.';
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.8"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.8"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,229 +0,0 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.9
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This defines a replace_placeholders() function, taking as arguments
|
||||
-- a text string and either a timestamp or a sequence / point pair. It
|
||||
-- uses the latter arguments to find metadata from which it can extract
|
||||
-- relevant information and replace it into the text string wherever the
|
||||
-- appropriate placeholders appear. For instance, given a call such as
|
||||
-- replace_placeholders('The position is @POS@', NULL, 11, 2600) it will
|
||||
-- replace '@POS@' with the position of point 2600 in sequence 11, if it
|
||||
-- exists (or leave the placeholder untouched otherwise).
|
||||
--
|
||||
-- A scan_placeholders() procedure is also defined, which calls the above
|
||||
-- function on the entire event log.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION replace_placeholders (
|
||||
text_in text, tstamp timestamptz, sequence integer, point integer
|
||||
)
|
||||
RETURNS text
|
||||
AS $$
|
||||
DECLARE
|
||||
position geometry;
|
||||
metadata jsonb;
|
||||
text_out text;
|
||||
|
||||
json_query text;
|
||||
json_result jsonb;
|
||||
expect_recursion boolean := false;
|
||||
BEGIN
|
||||
|
||||
text_out := text_in;
|
||||
|
||||
-- We only get a position if we are going to need it…
|
||||
IF regexp_match(text_out, '@DMS@|@POS@|@DEG@') IS NOT NULL THEN
|
||||
position := ST_Transform(event_position(tstamp, sequence, point), 4326);
|
||||
END IF;
|
||||
|
||||
-- …and likewise with the metadata.
|
||||
IF regexp_match(text_out, '@BSP@|@WD@|@CMG@|@EN@|@GRID@|@(\$\..*?)@@') IS NOT NULL THEN
|
||||
metadata := event_meta(tstamp, sequence, point);
|
||||
END IF;
|
||||
|
||||
-- We shortcut the evaluation if neither of the above regexps matched
|
||||
IF position IS NULL AND metadata IS NULL THEN
|
||||
RETURN text_out;
|
||||
END IF;
|
||||
|
||||
IF position('@DMS@' IN text_out) != 0 THEN
|
||||
text_out := replace(text_out, '@DMS@', ST_AsLatLonText(position));
|
||||
END IF;
|
||||
|
||||
IF position('@POS@' IN text_out) != 0 THEN
|
||||
text_out := replace(text_out, '@POS@', replace(ST_AsLatLonText(position, 'D.DDDDDD'), ' ', ', '));
|
||||
END IF;
|
||||
|
||||
IF position('@DEG@' IN text_out) != 0 THEN
|
||||
text_out := replace(text_out, '@DEG@', replace(ST_AsLatLonText(position, 'D.DDDDDD'), ' ', ', '));
|
||||
END IF;
|
||||
|
||||
IF position('@EN@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'easting' AND metadata ? 'northing' THEN
|
||||
text_out := replace(text_out, '@EN@', (metadata->>'easting') || ', ' || (metadata->>'northing'));
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@GRID@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'easting' AND metadata ? 'northing' THEN
|
||||
text_out := replace(text_out, '@GRID@', (metadata->>'easting') || ', ' || (metadata->>'northing'));
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@CMG@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'bearing' THEN
|
||||
text_out := replace(text_out, '@CMG@', metadata->>'bearing');
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@BSP@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'speed' THEN
|
||||
text_out := replace(text_out, '@BSP@', round((metadata->>'speed')::numeric * 3600 / 1852, 1)::text);
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@WD@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'waterDepth' THEN
|
||||
text_out := replace(text_out, '@WD@', metadata->>'waterDepth');
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
json_query := (regexp_match(text_out, '@(\$\..*?)@@'))[1];
|
||||
IF json_query IS NOT NULL THEN
|
||||
json_result := jsonb_path_query_array(metadata, json_query::jsonpath);
|
||||
IF jsonb_array_length(json_result) = 1 THEN
|
||||
text_out := replace(text_out, '@'||json_query||'@@', json_result->>0);
|
||||
ELSE
|
||||
text_out := replace(text_out, '@'||json_query||'@@', json_result::text);
|
||||
END IF;
|
||||
-- There might be multiple JSONPath queries, so we may have to recurse
|
||||
expect_recursion := true;
|
||||
END IF;
|
||||
|
||||
IF expect_recursion IS TRUE AND text_in != text_out THEN
|
||||
--RAISE NOTICE 'Recursing %', text_out;
|
||||
-- We don't know if we have found all the JSONPath expression
|
||||
-- so we do another pass.
|
||||
RETURN replace_placeholders(text_out, tstamp, sequence, point);
|
||||
ELSE
|
||||
RETURN text_out;
|
||||
END IF;
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION replace_placeholders (text, timestamptz, integer, integer) IS
|
||||
'Replace certain placeholder strings in the input text with data obtained from shot or real-time data.';
|
||||
|
||||
|
||||
CREATE OR REPLACE PROCEDURE scan_placeholders ()
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
-- We update non read-only events via the event_log view to leave a trace
|
||||
-- of the fact that placeholders were replaced (and when).
|
||||
-- Note that this will not replace placeholders of old edits.
|
||||
UPDATE event_log
|
||||
SET remarks = replace_placeholders(remarks, tstamp, sequence, point)
|
||||
FROM (
|
||||
SELECT id
|
||||
FROM event_log e
|
||||
WHERE
|
||||
(meta->'readonly')::boolean IS NOT TRUE AND (
|
||||
regexp_match(remarks, '@DMS@|@POS@|@DEG@') IS NOT NULL OR
|
||||
regexp_match(remarks, '@BSP@|@WD@|@CMG@|@EN@|@GRID@|@(\$\..*?)@@') IS NOT NULL
|
||||
)
|
||||
) t
|
||||
WHERE event_log.id = t.id;
|
||||
|
||||
-- And then we update read-only events directly on the event_log_full table
|
||||
-- (as of this version of the schema we're prevented from updating read-only
|
||||
-- events via event_log anyway).
|
||||
UPDATE event_log_full
|
||||
SET remarks = replace_placeholders(remarks, tstamp, sequence, point)
|
||||
FROM (
|
||||
SELECT uid
|
||||
FROM event_log_full e
|
||||
WHERE
|
||||
(meta->'readonly')::boolean IS TRUE AND (
|
||||
regexp_match(remarks, '@DMS@|@POS@|@DEG@') IS NOT NULL OR
|
||||
regexp_match(remarks, '@BSP@|@WD@|@CMG@|@EN@|@GRID@|@(\$\..*?)@@') IS NOT NULL
|
||||
)
|
||||
) t
|
||||
WHERE event_log_full.uid = t.uid;
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE scan_placeholders () IS
|
||||
'Run replace_placeholders() on the entire event log.';
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.9"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.9"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,127 +0,0 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.10
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects only the public schema.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This defines a interpolate_geometry_from_tstamp(), taking a timestamp
|
||||
-- and a maximum timespan in seconds. It will then interpolate a position
|
||||
-- at the exact timestamp based on data from real_time_inputs, provided
|
||||
-- that the effective interpolation timespan does not exceed the maximum
|
||||
-- requested.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
BEGIN
|
||||
|
||||
CALL pg_temp.show_notice('Defining interpolate_geometry_from_tstamp()');
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.interpolate_geometry_from_tstamp(
|
||||
IN ts timestamptz,
|
||||
IN maxspan numeric
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
DECLARE
|
||||
ts0 timestamptz;
|
||||
ts1 timestamptz;
|
||||
geom0 geometry;
|
||||
geom1 geometry;
|
||||
span numeric;
|
||||
fraction numeric;
|
||||
BEGIN
|
||||
|
||||
SELECT tstamp, geometry
|
||||
INTO ts0, geom0
|
||||
FROM real_time_inputs
|
||||
WHERE tstamp <= ts
|
||||
ORDER BY tstamp DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT tstamp, geometry
|
||||
INTO ts1, geom1
|
||||
FROM real_time_inputs
|
||||
WHERE tstamp >= ts
|
||||
ORDER BY tstamp ASC
|
||||
LIMIT 1;
|
||||
|
||||
IF geom0 IS NULL OR geom1 IS NULL THEN
|
||||
RAISE NOTICE 'Interpolation failed (no straddling data)';
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
-- See if we got an exact match
|
||||
IF ts0 = ts THEN
|
||||
RETURN geom0;
|
||||
ELSIF ts1 = ts THEN
|
||||
RETURN geom1;
|
||||
END IF;
|
||||
|
||||
span := extract('epoch' FROM ts1 - ts0);
|
||||
|
||||
IF span > maxspan THEN
|
||||
RAISE NOTICE 'Interpolation timespan % outside maximum requested (%)', span, maxspan;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
fraction := extract('epoch' FROM ts - ts0) / span;
|
||||
|
||||
IF fraction < 0 OR fraction > 1 THEN
|
||||
RAISE NOTICE 'Requested timestamp % outside of interpolation span (fraction: %)', ts, fraction;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
RETURN ST_LineInterpolatePoint(St_MakeLine(geom0, geom1), fraction);
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION public.interpolate_geometry_from_tstamp(timestamptz, numeric) IS
|
||||
'Interpolate a position over a given maximum timespan (in seconds)
|
||||
based on real-time inputs. Returns a POINT geometry.';
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.10"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.10"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,149 +0,0 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.11
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This redefines augment_event_data() to use interpolation rather than
|
||||
-- nearest neighbour. It now takes an argument indicating the maximum
|
||||
-- allowed interpolation timespan. An overload with a default of ten
|
||||
-- minutes is also provided, as an in situ replacement for the previous
|
||||
-- version.
|
||||
--
|
||||
-- The ten minute default is based on Triggerfish headers behaviour seen
|
||||
-- on crew 248 during soft starts.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE PROCEDURE augment_event_data (maxspan numeric)
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
-- Populate the timestamp of sequence / point events
|
||||
UPDATE event_log_full
|
||||
SET tstamp = tstamp_from_sequence_shot(sequence, point)
|
||||
WHERE
|
||||
tstamp IS NULL AND sequence IS NOT NULL AND point IS NOT NULL;
|
||||
|
||||
-- Populate the geometry of sequence / point events for which
|
||||
-- there is raw_shots data.
|
||||
UPDATE event_log_full
|
||||
SET meta = meta ||
|
||||
jsonb_build_object(
|
||||
'geometry',
|
||||
(
|
||||
SELECT st_transform(geometry, 4326)::jsonb
|
||||
FROM raw_shots rs
|
||||
WHERE rs.sequence = event_log_full.sequence AND rs.point = event_log_full.point
|
||||
)
|
||||
)
|
||||
WHERE
|
||||
sequence IS NOT NULL AND point IS NOT NULL AND
|
||||
NOT meta ? 'geometry';
|
||||
|
||||
-- Populate the geometry of time-based events
|
||||
UPDATE event_log_full e
|
||||
SET
|
||||
meta = meta || jsonb_build_object('geometry',
|
||||
(SELECT st_transform(g.geometry, 4326)::jsonb
|
||||
FROM interpolate_geometry_from_tstamp(e.tstamp, maxspan) g))
|
||||
WHERE
|
||||
tstamp IS NOT NULL AND
|
||||
sequence IS NULL AND point IS NULL AND
|
||||
NOT meta ? 'geometry';
|
||||
|
||||
-- Get rid of null geometries
|
||||
UPDATE event_log_full
|
||||
SET
|
||||
meta = meta - 'geometry'
|
||||
WHERE
|
||||
jsonb_typeof(meta->'geometry') = 'null';
|
||||
|
||||
-- Simplify the GeoJSON when the CRS is EPSG:4326
|
||||
UPDATE event_log_full
|
||||
SET
|
||||
meta = meta #- '{geometry, crs}'
|
||||
WHERE
|
||||
meta->'geometry'->'crs'->'properties'->>'name' = 'EPSG:4326';
|
||||
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE augment_event_data(numeric)
|
||||
IS 'Populate missing timestamps and geometries in event_log_full';
|
||||
|
||||
CREATE OR REPLACE PROCEDURE augment_event_data ()
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
CALL augment_event_data(600);
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE augment_event_data()
|
||||
IS 'Overload of augment_event_data(maxspan numeric) with a maxspan value of 600 seconds.';
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.11"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.11"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,193 +0,0 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.12
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This defines a midnight_shots view and a log_midnight_shots() procedure
|
||||
-- (with some overloads). The view returns all points straddling midnight
|
||||
-- UTC and belonging to the same sequence (so last shot of the day and
|
||||
-- first shot of the next day).
|
||||
--
|
||||
-- The procedure inserts the corresponding events (optionally constrained
|
||||
-- by an earliest and a latest date) in the event log, unless the events
|
||||
-- already exist.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE VIEW midnight_shots AS
|
||||
WITH straddlers AS (
|
||||
-- Get sequence numbers straddling midnight UTC
|
||||
SELECT sequence
|
||||
FROM final_shots
|
||||
GROUP BY sequence
|
||||
HAVING min(date(tstamp)) != max(date(tstamp))
|
||||
),
|
||||
ts AS (
|
||||
-- Get earliest and latest timestamps for each day
|
||||
-- for each of the above sequences.
|
||||
-- This will return the timestamps for:
|
||||
-- FSP, LDSP, FDSP, LSP.
|
||||
SELECT
|
||||
fs.sequence,
|
||||
min(fs.tstamp) AS ts0,
|
||||
max(fs.tstamp) AS ts1
|
||||
FROM final_shots fs INNER JOIN straddlers USING (sequence)
|
||||
GROUP BY fs.sequence, (date(fs.tstamp))
|
||||
ORDER BY fs.sequence, date(fs.tstamp)
|
||||
),
|
||||
spts AS (
|
||||
-- Filter out FSP, LSP from the above.
|
||||
-- NOTE: This *should* in theory be able to cope with
|
||||
-- a sequence longer than 24 hours (so with more than
|
||||
-- one LDSP, FDSP) but that hasn't been tested.
|
||||
SELECT DISTINCT
|
||||
sequence,
|
||||
min(ts1) OVER (PARTITION BY sequence) ldsp,
|
||||
max(ts0) OVER (PARTITION BY sequence) fdsp
|
||||
FROM ts
|
||||
ORDER BY sequence
|
||||
), evt AS (
|
||||
SELECT
|
||||
fs.tstamp,
|
||||
fs.sequence,
|
||||
point,
|
||||
'Last shotpoint of the day' remarks,
|
||||
'{LDSP}'::text[] labels
|
||||
FROM final_shots fs
|
||||
INNER JOIN spts ON fs.sequence = spts.sequence AND fs.tstamp = spts.ldsp
|
||||
UNION SELECT
|
||||
fs.tstamp,
|
||||
fs.sequence,
|
||||
point,
|
||||
'First shotpoint of the day' remarks,
|
||||
'{FDSP}'::text[] labels
|
||||
FROM final_shots fs
|
||||
INNER JOIN spts ON fs.sequence = spts.sequence AND fs.tstamp = spts.fdsp
|
||||
ORDER BY tstamp
|
||||
)
|
||||
SELECT * FROM evt;
|
||||
|
||||
|
||||
CREATE OR REPLACE PROCEDURE log_midnight_shots (dt0 date, dt1 date)
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
INSERT INTO event_log (sequence, point, remarks, labels, meta)
|
||||
SELECT
|
||||
sequence, point, remarks, labels,
|
||||
'{"auto": true, "insertedBy": "log_midnight_shots"}'::jsonb
|
||||
FROM midnight_shots ms
|
||||
WHERE
|
||||
(dt0 IS NULL OR ms.tstamp >= dt0) AND
|
||||
(dt1 IS NULL OR ms.tstamp <= dt1) AND
|
||||
NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM event_log el
|
||||
WHERE ms.sequence = el.sequence AND ms.point = el.point AND el.labels @> ms.labels
|
||||
);
|
||||
|
||||
-- Delete any midnight shots that might have been inserted in the log
|
||||
-- but are no longer relevant according to the final_shots data.
|
||||
-- We operate on event_log, so the deletion is traceable.
|
||||
DELETE
|
||||
FROM event_log
|
||||
WHERE id IN (
|
||||
SELECT id
|
||||
FROM event_log el
|
||||
LEFT JOIN midnight_shots ms USING (sequence, point)
|
||||
WHERE
|
||||
'{LDSP,FDSP}'::text[] && el.labels -- &&: Do the arrays overlap?
|
||||
AND ms.sequence IS NULL
|
||||
);
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE log_midnight_shots (date, date)
|
||||
IS 'Add midnight shots between two dates dt0 and dt1 to the event_log, unless the events already exist.';
|
||||
|
||||
|
||||
CREATE OR REPLACE PROCEDURE log_midnight_shots (dt0 date)
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
CALL log_midnight_shots(dt0, NULL);
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE log_midnight_shots (date)
|
||||
IS 'Overload taking only a dt0 (adds events on that date or after).';
|
||||
|
||||
CREATE OR REPLACE PROCEDURE log_midnight_shots ()
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
CALL log_midnight_shots(NULL, NULL);
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE log_midnight_shots ()
|
||||
IS 'Overload taking no arguments (adds all missing events).';
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.12"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.12"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,162 +0,0 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.3.13
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- Fixes a bug in the `final_lines_summary` and `raw_lines_summary` views
|
||||
-- which results in the number of missing shots being miscounted on jobs
|
||||
-- using three sources.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
|
||||
CREATE OR REPLACE VIEW raw_lines_summary AS
|
||||
WITH summary AS (
|
||||
SELECT DISTINCT rs.sequence,
|
||||
first_value(rs.point) OVER w AS fsp,
|
||||
last_value(rs.point) OVER w AS lsp,
|
||||
first_value(rs.tstamp) OVER w AS ts0,
|
||||
last_value(rs.tstamp) OVER w AS ts1,
|
||||
count(rs.point) OVER w AS num_points,
|
||||
count(pp.point) OVER w AS num_preplots,
|
||||
public.st_distance(first_value(rs.geometry) OVER w, last_value(rs.geometry) OVER w) AS length,
|
||||
((public.st_azimuth(first_value(rs.geometry) OVER w, last_value(rs.geometry) OVER w) * (180)::double precision) / pi()) AS azimuth
|
||||
FROM (raw_shots rs
|
||||
LEFT JOIN preplot_points pp USING (line, point))
|
||||
WINDOW w AS (PARTITION BY rs.sequence ORDER BY rs.tstamp ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
|
||||
)
|
||||
SELECT rl.sequence,
|
||||
rl.line,
|
||||
s.fsp,
|
||||
s.lsp,
|
||||
s.ts0,
|
||||
s.ts1,
|
||||
(s.ts1 - s.ts0) AS duration,
|
||||
s.num_points,
|
||||
s.num_preplots,
|
||||
(SELECT count(*) AS count
|
||||
FROM missing_sequence_raw_points
|
||||
WHERE missing_sequence_raw_points.sequence = s.sequence) AS missing_shots,
|
||||
s.length,
|
||||
s.azimuth,
|
||||
rl.remarks,
|
||||
rl.ntbp,
|
||||
rl.meta
|
||||
FROM (summary s
|
||||
JOIN raw_lines rl USING (sequence));
|
||||
|
||||
|
||||
CREATE OR REPLACE VIEW final_lines_summary AS
|
||||
WITH summary AS (
|
||||
SELECT DISTINCT fs.sequence,
|
||||
first_value(fs.point) OVER w AS fsp,
|
||||
last_value(fs.point) OVER w AS lsp,
|
||||
first_value(fs.tstamp) OVER w AS ts0,
|
||||
last_value(fs.tstamp) OVER w AS ts1,
|
||||
count(fs.point) OVER w AS num_points,
|
||||
public.st_distance(first_value(fs.geometry) OVER w, last_value(fs.geometry) OVER w) AS length,
|
||||
((public.st_azimuth(first_value(fs.geometry) OVER w, last_value(fs.geometry) OVER w) * (180)::double precision) / pi()) AS azimuth
|
||||
FROM final_shots fs
|
||||
WINDOW w AS (PARTITION BY fs.sequence ORDER BY fs.tstamp ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
|
||||
)
|
||||
SELECT fl.sequence,
|
||||
fl.line,
|
||||
s.fsp,
|
||||
s.lsp,
|
||||
s.ts0,
|
||||
s.ts1,
|
||||
(s.ts1 - s.ts0) AS duration,
|
||||
s.num_points,
|
||||
( SELECT count(*) AS count
|
||||
FROM missing_sequence_final_points
|
||||
WHERE missing_sequence_final_points.sequence = s.sequence) AS missing_shots,
|
||||
s.length,
|
||||
s.azimuth,
|
||||
fl.remarks,
|
||||
fl.meta
|
||||
FROM (summary s
|
||||
JOIN final_lines fl USING (sequence));
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.3.13' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.3.12' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.13"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.13"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,122 +0,0 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.4.0
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This adapts the schema to the change in how project configurations are
|
||||
-- handled (https://gitlab.com/wgp/dougal/software/-/merge_requests/29)
|
||||
-- by creating a project_configuration() function which returns the
|
||||
-- current project's configuration data.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION project_configuration()
|
||||
RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
schema_name text;
|
||||
configuration jsonb;
|
||||
BEGIN
|
||||
|
||||
SELECT nspname
|
||||
INTO schema_name
|
||||
FROM pg_namespace
|
||||
WHERE oid = (
|
||||
SELECT pronamespace
|
||||
FROM pg_proc
|
||||
WHERE oid = 'project_configuration'::regproc::oid
|
||||
);
|
||||
|
||||
SELECT meta
|
||||
INTO configuration
|
||||
FROM public.projects
|
||||
WHERE schema = schema_name;
|
||||
|
||||
RETURN configuration;
|
||||
END
|
||||
$$;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.3.12' AND current_db_version != '0.3.13' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.0"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.0"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,264 +0,0 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.4.1
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This modifies adjust_planner() to use project_configuration()
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
|
||||
CREATE OR REPLACE PROCEDURE adjust_planner()
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
_planner_config jsonb;
|
||||
_planned_line planned_lines%ROWTYPE;
|
||||
_lag interval;
|
||||
_last_sequence sequences_summary%ROWTYPE;
|
||||
_deltatime interval;
|
||||
_shotinterval interval;
|
||||
_tstamp timestamptz;
|
||||
_incr integer;
|
||||
BEGIN
|
||||
|
||||
SET CONSTRAINTS planned_lines_pkey DEFERRED;
|
||||
|
||||
SELECT project_configuration()->'planner'
|
||||
INTO _planner_config;
|
||||
|
||||
SELECT *
|
||||
INTO _last_sequence
|
||||
FROM sequences_summary
|
||||
ORDER BY sequence DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT *
|
||||
INTO _planned_line
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
SELECT
|
||||
COALESCE(
|
||||
((lead(ts0) OVER (ORDER BY sequence)) - ts1),
|
||||
make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer)
|
||||
)
|
||||
INTO _lag
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
_incr = sign(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE '_planner_config: %', _planner_config;
|
||||
RAISE NOTICE '_last_sequence: %', _last_sequence;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
RAISE NOTICE '_incr: %', _incr;
|
||||
|
||||
-- Does the latest sequence match a planned sequence?
|
||||
IF _planned_line IS NULL THEN -- No it doesn't
|
||||
RAISE NOTICE 'Latest sequence shot does not match a planned sequence';
|
||||
SELECT * INTO _planned_line FROM planned_lines ORDER BY sequence ASC LIMIT 1;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
|
||||
IF _planned_line.sequence <= _last_sequence.sequence THEN
|
||||
RAISE NOTICE 'Renumbering the planned sequences starting from %', _planned_line.sequence + 1;
|
||||
-- Renumber the planned sequences starting from last shot sequence number + 1
|
||||
UPDATE planned_lines
|
||||
SET sequence = sequence + _last_sequence.sequence - _planned_line.sequence + 1;
|
||||
END IF;
|
||||
|
||||
-- The correction to make to the first planned line's ts0 will be based on either the last
|
||||
-- sequence's EOL + default line change time or the current time, whichever is later.
|
||||
_deltatime := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1) + make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer), current_timestamp) - _planned_line.ts0;
|
||||
|
||||
-- Is the first of the planned lines start time in the past? (±5 mins)
|
||||
IF _planned_line.ts0 < (current_timestamp - make_interval(mins => 5)) THEN
|
||||
RAISE NOTICE 'First planned line is in the past. Adjusting times by %', _deltatime;
|
||||
-- Adjust the start / end time of the planned lines by assuming that we are at
|
||||
-- `defaultLineChangeDuration` minutes away from SOL of the first planned line.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime;
|
||||
END IF;
|
||||
|
||||
ELSE -- Yes it does
|
||||
RAISE NOTICE 'Latest sequence does match a planned sequence: %, %', _planned_line.sequence, _planned_line.line;
|
||||
|
||||
-- Is it online?
|
||||
IF EXISTS(SELECT 1 FROM raw_lines_files WHERE sequence = _last_sequence.sequence AND hash = '*online*') THEN
|
||||
-- Yes it is
|
||||
RAISE NOTICE 'Sequence % is online', _last_sequence.sequence;
|
||||
|
||||
-- Let us get the SOL from the events log if we can
|
||||
RAISE NOTICE 'Trying to set fsp, ts0 from events log FSP, FGSP';
|
||||
WITH e AS (
|
||||
SELECT * FROM event_log
|
||||
WHERE
|
||||
sequence = _last_sequence.sequence
|
||||
AND ('FSP' = ANY(labels) OR 'FGSP' = ANY(labels))
|
||||
ORDER BY tstamp LIMIT 1
|
||||
)
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
fsp = COALESCE(e.point, fsp),
|
||||
ts0 = COALESCE(e.tstamp, ts0)
|
||||
FROM e
|
||||
WHERE planned_lines.sequence = _last_sequence.sequence;
|
||||
|
||||
-- Shot interval
|
||||
_shotinterval := (_last_sequence.ts1 - _last_sequence.ts0) / abs(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE 'Estimating EOL from current shot interval: %', _shotinterval;
|
||||
|
||||
SELECT (abs(lsp-fsp) * _shotinterval + ts0) - ts1
|
||||
INTO _deltatime
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
---- Set ts1 for the current sequence
|
||||
--UPDATE planned_lines
|
||||
--SET
|
||||
--ts1 = (abs(lsp-fsp) * _shotinterval) + ts0
|
||||
--WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Adjustment is %', _deltatime;
|
||||
|
||||
IF abs(EXTRACT(EPOCH FROM _deltatime)) < 8 THEN
|
||||
RAISE NOTICE 'Adjustment too small (< 8 s), so not applying it';
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- Adjust ts1 for the current sequence
|
||||
UPDATE planned_lines
|
||||
SET ts1 = ts1 + _deltatime
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
-- Now shift all sequences after
|
||||
UPDATE planned_lines
|
||||
SET ts0 = ts0 + _deltatime, ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences before %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence < _last_sequence.sequence;
|
||||
|
||||
ELSE
|
||||
-- No it isn't
|
||||
RAISE NOTICE 'Sequence % is offline', _last_sequence.sequence;
|
||||
|
||||
-- We were supposed to finish at _planned_line.ts1 but we finished at:
|
||||
_tstamp := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1), current_timestamp);
|
||||
-- WARNING Next line is for testing only
|
||||
--_tstamp := COALESCE(_last_sequence.ts1_final, _last_sequence.ts1);
|
||||
-- So we need to adjust timestamps by:
|
||||
_deltatime := _tstamp - _planned_line.ts1;
|
||||
|
||||
RAISE NOTICE 'Planned end: %, actual end: % (%, %)', _planned_line.ts1, _tstamp, _planned_line.sequence, _last_sequence.sequence;
|
||||
RAISE NOTICE 'Shifting times by % for sequences > %', _deltatime, _planned_line.sequence;
|
||||
-- NOTE: This won't work if sequences are not, err… sequential.
|
||||
-- NOTE: This has been known to happen in 2020.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _planned_line.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences up to %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence <= _last_sequence.sequence;
|
||||
|
||||
END IF;
|
||||
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.1"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.1"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,98 +0,0 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.4.2
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This modifies binning_parameters() to use project_configuration()
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION binning_parameters() RETURNS jsonb
|
||||
LANGUAGE sql STABLE LEAKPROOF PARALLEL SAFE
|
||||
AS $$
|
||||
SELECT project_configuration()->'binning' binning;
|
||||
$$;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.2"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.2"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,164 +0,0 @@
|
||||
-- Support notification payloads larger than Postgres' NOTIFY limit.
|
||||
--
|
||||
-- New schema version: 0.4.3
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects the public schema only.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This creates a new table where large notification payloads are stored
|
||||
-- temporarily and from which they might be recalled by the notification
|
||||
-- listeners. It also creates a purge_notifications() procedure used to
|
||||
-- clean up old notifications from the notifications log and finally,
|
||||
-- modifies notify() to support these changes. When a large payload is
|
||||
-- encountered, the payload is stored in the notify_payloads table and
|
||||
-- a trimmed down version containing a notification_id is sent to listeners
|
||||
-- instead. Listeners can then query notify_payloads to retrieve the full
|
||||
-- payloads. It is the application layer's responsibility to delete old
|
||||
-- notifications.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_schema () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating public schema';
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO public');
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.notify_payloads (
|
||||
id SERIAL,
|
||||
tstamp timestamptz NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
payload text NOT NULL DEFAULT '',
|
||||
PRIMARY KEY (id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS notify_payload_tstamp ON notify_payloads (tstamp);
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.notify() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
channel text := TG_ARGV[0];
|
||||
pid text;
|
||||
payload text;
|
||||
notification text;
|
||||
payload_id integer;
|
||||
BEGIN
|
||||
|
||||
SELECT projects.pid INTO pid FROM projects WHERE schema = TG_TABLE_SCHEMA;
|
||||
|
||||
payload := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'old', row_to_json(OLD),
|
||||
'new', row_to_json(NEW),
|
||||
'pid', pid
|
||||
)::text;
|
||||
|
||||
IF octet_length(payload) < 1000 THEN
|
||||
PERFORM pg_notify(channel, payload);
|
||||
ELSE
|
||||
-- We need to find another solution
|
||||
-- FIXME Consider storing the payload in a temporary memory table,
|
||||
-- referenced by some form of autogenerated ID. Then send the ID
|
||||
-- as the payload and then it's up to the user to fetch the original
|
||||
-- payload if interested. This needs a mechanism to expire older payloads
|
||||
-- in the interest of conserving memory.
|
||||
|
||||
INSERT INTO notify_payloads (payload) VALUES (payload) RETURNING id INTO payload_id;
|
||||
|
||||
notification := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'pid', pid,
|
||||
'payload_id', payload_id
|
||||
)::text;
|
||||
|
||||
PERFORM pg_notify(channel, notification);
|
||||
RAISE INFO 'Payload over limit';
|
||||
END IF;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$;
|
||||
|
||||
CREATE PROCEDURE public.purge_notifications (age_seconds numeric DEFAULT 120) AS $$
|
||||
DELETE FROM notify_payloads WHERE EXTRACT(epoch FROM CURRENT_TIMESTAMP - tstamp) > age_seconds;
|
||||
$$ LANGUAGE sql;
|
||||
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
-- This upgrade modified the `public` schema only, not individual
|
||||
-- project schemas.
|
||||
CALL pg_temp.upgrade_schema();
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_schema ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.3"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.3"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,104 +0,0 @@
|
||||
-- Add event_log_changes function
|
||||
--
|
||||
-- New schema version: 0.4.4
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This adds a function event_log_changes which returns the subset of
|
||||
-- events from event_log_full which have been modified on or after a
|
||||
-- given timestamp.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_log_changes(ts0 timestamptz)
|
||||
RETURNS SETOF event_log_full
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
SELECT *
|
||||
FROM event_log_full
|
||||
WHERE lower(validity) > ts0 OR upper(validity) IS NOT NULL AND upper(validity) > ts0
|
||||
ORDER BY lower(validity);
|
||||
$$;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.4"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.4"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,147 +0,0 @@
|
||||
-- Turn project_summary into a materialised view
|
||||
--
|
||||
-- New schema version: 0.4.5
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- The project_summary view is quite a bottleneck. While it itself is
|
||||
-- not the real culprit (rather the underlying views are), this is one
|
||||
-- relatively cheap way of improving responsiveness from the client's
|
||||
-- point of view.
|
||||
-- We leave the details of how / when to refresh the view to the non-
|
||||
-- database code.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
DROP VIEW project_summary;
|
||||
|
||||
CREATE MATERIALIZED VIEW project_summary AS
|
||||
WITH fls AS (
|
||||
SELECT
|
||||
avg((final_lines_summary.duration / ((final_lines_summary.num_points - 1))::double precision)) AS shooting_rate,
|
||||
avg((final_lines_summary.length / date_part('epoch'::text, final_lines_summary.duration))) AS speed,
|
||||
sum(final_lines_summary.duration) AS prod_duration,
|
||||
sum(final_lines_summary.length) AS prod_distance
|
||||
FROM final_lines_summary
|
||||
), project AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
p.name,
|
||||
p.schema
|
||||
FROM public.projects p
|
||||
WHERE (split_part(current_setting('search_path'::text), ','::text, 1) = p.schema)
|
||||
)
|
||||
SELECT
|
||||
project.pid,
|
||||
project.name,
|
||||
project.schema,
|
||||
( SELECT count(*) AS count
|
||||
FROM preplot_lines
|
||||
WHERE (preplot_lines.class = 'V'::bpchar)) AS lines,
|
||||
ps.total,
|
||||
ps.virgin,
|
||||
ps.prime,
|
||||
ps.other,
|
||||
ps.ntba,
|
||||
ps.remaining,
|
||||
( SELECT to_json(fs.*) AS to_json
|
||||
FROM final_shots fs
|
||||
ORDER BY fs.tstamp
|
||||
LIMIT 1) AS fsp,
|
||||
( SELECT to_json(fs.*) AS to_json
|
||||
FROM final_shots fs
|
||||
ORDER BY fs.tstamp DESC
|
||||
LIMIT 1) AS lsp,
|
||||
( SELECT count(*) AS count
|
||||
FROM raw_lines rl) AS seq_raw,
|
||||
( SELECT count(*) AS count
|
||||
FROM final_lines rl) AS seq_final,
|
||||
fls.prod_duration,
|
||||
fls.prod_distance,
|
||||
fls.speed AS shooting_rate
|
||||
FROM preplot_summary ps,
|
||||
fls,
|
||||
project;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.5' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.5"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.5"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,164 +0,0 @@
|
||||
-- Sailline ancillary data
|
||||
--
|
||||
-- New schema version: 0.5.0
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- Issue #264 calls for associating sail and acquisition lines as well
|
||||
-- as indicating expected acquisition direction, and other data which
|
||||
-- cannot be provided via standard import formats such as SPS or P1/90.
|
||||
--
|
||||
-- We support this via an additional table that holds most of the required
|
||||
-- data. This data can simply be inferred from regular preplots, e.g., line
|
||||
-- direction can be deduced from preplot point order, and sail / source
|
||||
-- line offsets can be taken from P1/90 headers or from a configuration
|
||||
-- parameter. Alternatively, and in preference, the data can be provided
|
||||
-- explicitly, which is what issue #264 asks for.
|
||||
--
|
||||
-- In principle, this makes at least some of the attributes of `preplot_lines`
|
||||
-- redundant (at least `incr` and `ntba`) but we will leave them there for
|
||||
-- the time being as technical debt.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS preplot_saillines
|
||||
(
|
||||
sailline integer NOT NULL,
|
||||
line integer NOT NULL,
|
||||
sailline_class character(1) NOT NULL,
|
||||
line_class character(1) NOT NULL,
|
||||
incr boolean NOT NULL DEFAULT true,
|
||||
ntba boolean NOT NULL DEFAULT false,
|
||||
remarks text NOT NULL DEFAULT '',
|
||||
meta jsonb NOT NULL DEFAULT '{}'::jsonb,
|
||||
hash text NULL, -- Theoretically the info in this table could all be inferred.
|
||||
PRIMARY KEY (sailline, sailline_class, line, line_class, incr),
|
||||
CONSTRAINT fk_sailline FOREIGN KEY (sailline, sailline_class)
|
||||
REFERENCES preplot_lines (line, class)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT fk_line FOREIGN KEY (line, line_class)
|
||||
REFERENCES preplot_lines (line, class)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT fk_hash FOREIGN KEY (hash)
|
||||
REFERENCES files (hash) MATCH SIMPLE
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE,
|
||||
CHECK (sailline_class = 'V' AND sailline_class != line_class)
|
||||
);
|
||||
|
||||
COMMENT ON TABLE preplot_saillines
|
||||
IS 'We explicitly associate each preplot sailline (aka vessel line) with zero or more source lines. This information can be inferred from preplot files, e.g., via a sailline offset value, or explicitly provided.';
|
||||
|
||||
-- Let us copy whatever information we can from existing tables or views
|
||||
|
||||
INSERT INTO preplot_saillines
|
||||
(sailline, line, sailline_class, line_class, incr, ntba, remarks, meta)
|
||||
SELECT DISTINCT
|
||||
sailline, psp.line, 'V' sailline_class, psp.class line_class, pl.incr, pl.ntba, pl.remarks, pl.meta
|
||||
FROM preplot_saillines_points psp
|
||||
INNER JOIN preplot_lines pl ON psp.sailline = pl.line AND pl.class = 'V'
|
||||
ORDER BY sailline
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- We need to recreate the preplot_saillines_points view
|
||||
|
||||
CREATE OR REPLACE VIEW preplot_saillines_points AS
|
||||
SELECT psl.sailline,
|
||||
psl.ntba AS sailline_ntba,
|
||||
psl.line,
|
||||
pps.point,
|
||||
pps.class,
|
||||
pps.ntba,
|
||||
pps.geometry,
|
||||
pps.meta
|
||||
FROM preplot_saillines psl
|
||||
INNER JOIN preplot_points pps
|
||||
ON psl.line = pps.line AND psl.line_class = pps.class;
|
||||
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.5.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.5' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.5.0"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.5.0"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,119 +0,0 @@
|
||||
-- Sailline ancillary data
|
||||
--
|
||||
-- New schema version: 0.5.1
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- The sequences_detail view wrongly associates source lines and shot
|
||||
-- points when it should be associating saillines and shot points instead.
|
||||
--
|
||||
-- This updates fixes that issue (#307).
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE VIEW sequences_detail
|
||||
AS
|
||||
SELECT rl.sequence,
|
||||
rl.line AS sailline,
|
||||
rs.line,
|
||||
rs.point,
|
||||
rs.tstamp,
|
||||
rs.objref AS objrefraw,
|
||||
fs.objref AS objreffinal,
|
||||
st_transform(pp.geometry, 4326) AS geometrypreplot,
|
||||
st_transform(rs.geometry, 4326) AS geometryraw,
|
||||
st_transform(fs.geometry, 4326) AS geometryfinal,
|
||||
ij_error(rs.line::double precision, rs.point::double precision, rs.geometry) AS errorraw,
|
||||
ij_error(rs.line::double precision, rs.point::double precision, fs.geometry) AS errorfinal,
|
||||
json_build_object('preplot', pp.meta, 'raw', rs.meta, 'final', fs.meta) AS meta
|
||||
FROM raw_lines rl
|
||||
INNER JOIN preplot_saillines psl ON rl.line = psl.sailline
|
||||
INNER JOIN raw_shots rs ON rs.sequence = rl.sequence AND rs.line = psl.line
|
||||
INNER JOIN preplot_points pp ON psl.line = pp.line AND psl.line_class = pp.class AND rs.point = pp.point
|
||||
LEFT JOIN final_shots fs ON rl.sequence = fs.sequence AND rs.point = fs.point;
|
||||
|
||||
ALTER TABLE sequences_detail
|
||||
OWNER TO postgres;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.5.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.5.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.5.1"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.5.1"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,145 +0,0 @@
|
||||
-- Fix preplot_lines_summary view
|
||||
--
|
||||
-- New schema version: 0.5.2
|
||||
--
|
||||
-- WARNING: This update is buggy and does not give the desired
|
||||
-- results. Schema version 0.5.4 fixes this.
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- Following introduction of `preplot_saillines` (0.5.0), the incr and
|
||||
-- ntba statuses are stored in a separate table, not in `preplot_lines`
|
||||
-- (TODO: a future upgrade should remove those columns from `preplot_lines`)
|
||||
--
|
||||
-- Now any views referencing `incr` and `ntba` must be updated to point to
|
||||
-- the new location of those attributes.
|
||||
--
|
||||
-- This update fixes #312.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE VIEW preplot_lines_summary
|
||||
AS
|
||||
WITH summary AS (
|
||||
SELECT DISTINCT pp.line, pp.class,
|
||||
first_value(pp.point) OVER w AS p0,
|
||||
last_value(pp.point) OVER w AS p1,
|
||||
count(pp.point) OVER w AS num_points,
|
||||
st_distance(first_value(pp.geometry) OVER w, last_value(pp.geometry) OVER w) AS length,
|
||||
st_azimuth(first_value(pp.geometry) OVER w, last_value(pp.geometry) OVER w) * 180::double precision / pi() AS azimuth0,
|
||||
st_azimuth(last_value(pp.geometry) OVER w, first_value(pp.geometry) OVER w) * 180::double precision / pi() AS azimuth1
|
||||
FROM preplot_points pp
|
||||
WHERE pp.class = 'V'::bpchar
|
||||
WINDOW w AS (PARTITION BY pp.line ORDER BY pp.point ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
|
||||
)
|
||||
SELECT psl.line,
|
||||
CASE
|
||||
WHEN psl.incr THEN s.p0
|
||||
ELSE s.p1
|
||||
END AS fsp,
|
||||
CASE
|
||||
WHEN psl.incr THEN s.p1
|
||||
ELSE s.p0
|
||||
END AS lsp,
|
||||
s.num_points,
|
||||
s.length,
|
||||
CASE
|
||||
WHEN psl.incr THEN s.azimuth0
|
||||
ELSE s.azimuth1
|
||||
END AS azimuth,
|
||||
psl.incr,
|
||||
psl.remarks
|
||||
FROM summary s
|
||||
JOIN preplot_saillines psl ON psl.sailline_class = s.class AND s.line = psl.line
|
||||
ORDER BY psl.line, incr;
|
||||
|
||||
|
||||
ALTER TABLE preplot_lines_summary
|
||||
OWNER TO postgres;
|
||||
COMMENT ON VIEW preplot_lines_summary
|
||||
IS 'Summarises ''V'' (vessel sailline) preplot lines.';
|
||||
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.5.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.5.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.5.2"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.5.2"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,132 +0,0 @@
|
||||
-- Fix final_lines_summary view
|
||||
--
|
||||
-- New schema version: 0.5.3
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This fixes a long-standing bug, where if the sail and source lines are
|
||||
-- the same, the number of missing shots will be miscounted.
|
||||
--
|
||||
-- This update fixes #313.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE VIEW final_lines_summary
|
||||
AS
|
||||
WITH summary AS (
|
||||
SELECT DISTINCT fs.sequence,
|
||||
first_value(fs.point) OVER w AS fsp,
|
||||
last_value(fs.point) OVER w AS lsp,
|
||||
first_value(fs.tstamp) OVER w AS ts0,
|
||||
last_value(fs.tstamp) OVER w AS ts1,
|
||||
count(fs.point) OVER w AS num_points,
|
||||
count(pp.point) OVER w AS num_preplots,
|
||||
st_distance(first_value(fs.geometry) OVER w, last_value(fs.geometry) OVER w) AS length,
|
||||
st_azimuth(first_value(fs.geometry) OVER w, last_value(fs.geometry) OVER w) * 180::double precision / pi() AS azimuth
|
||||
FROM final_shots fs
|
||||
LEFT JOIN preplot_points pp USING (line, point)
|
||||
WINDOW w AS (PARTITION BY fs.sequence ORDER BY fs.tstamp ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
|
||||
)
|
||||
SELECT fl.sequence,
|
||||
fl.line,
|
||||
s.fsp,
|
||||
s.lsp,
|
||||
s.ts0,
|
||||
s.ts1,
|
||||
s.ts1 - s.ts0 AS duration,
|
||||
s.num_points,
|
||||
(( SELECT count(*) AS count
|
||||
FROM preplot_points
|
||||
WHERE preplot_points.line = fl.line AND (preplot_points.point >= s.fsp AND preplot_points.point <= s.lsp OR preplot_points.point >= s.lsp AND preplot_points.point <= s.fsp))) - s.num_preplots AS missing_shots,
|
||||
s.length,
|
||||
s.azimuth,
|
||||
fl.remarks,
|
||||
fl.meta
|
||||
FROM summary s
|
||||
JOIN final_lines fl USING (sequence);
|
||||
|
||||
ALTER TABLE final_lines_summary
|
||||
OWNER TO postgres;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.5.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.5.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.5.3"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.5.3"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,145 +0,0 @@
|
||||
-- Fix preplot_lines_summary view
|
||||
--
|
||||
-- New schema version: 0.5.4
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- Fixes upgrade 35 (0.5.2). The original description of 0.5.2 is included
|
||||
-- below for ease of reference:
|
||||
--
|
||||
-- Following introduction of `preplot_saillines` (0.5.0), the incr and
|
||||
-- ntba statuses are stored in a separate table, not in `preplot_lines`
|
||||
-- (TODO: a future upgrade should remove those columns from `preplot_lines`)
|
||||
--
|
||||
-- Now any views referencing `incr` and `ntba` must be updated to point to
|
||||
-- the new location of those attributes.
|
||||
--
|
||||
-- This update fixes #312.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE VIEW preplot_lines_summary
|
||||
AS
|
||||
WITH summary AS (
|
||||
SELECT DISTINCT pp.line,
|
||||
pp.class,
|
||||
first_value(pp.point) OVER w AS p0,
|
||||
last_value(pp.point) OVER w AS p1,
|
||||
count(pp.point) OVER w AS num_points,
|
||||
st_distance(first_value(pp.geometry) OVER w, last_value(pp.geometry) OVER w) AS length,
|
||||
st_azimuth(first_value(pp.geometry) OVER w, last_value(pp.geometry) OVER w) * 180::double precision / pi() AS azimuth0,
|
||||
st_azimuth(last_value(pp.geometry) OVER w, first_value(pp.geometry) OVER w) * 180::double precision / pi() AS azimuth1
|
||||
FROM preplot_points pp
|
||||
WHERE pp.class = 'V'::bpchar
|
||||
WINDOW w AS (PARTITION BY pp.line ORDER BY pp.point ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
|
||||
)
|
||||
SELECT DISTINCT psl.sailline AS line,
|
||||
CASE
|
||||
WHEN psl.incr THEN s.p0
|
||||
ELSE s.p1
|
||||
END AS fsp,
|
||||
CASE
|
||||
WHEN psl.incr THEN s.p1
|
||||
ELSE s.p0
|
||||
END AS lsp,
|
||||
s.num_points,
|
||||
s.length,
|
||||
CASE
|
||||
WHEN psl.incr THEN s.azimuth0
|
||||
ELSE s.azimuth1
|
||||
END AS azimuth,
|
||||
psl.incr,
|
||||
psl.remarks
|
||||
FROM summary s
|
||||
JOIN preplot_saillines psl ON psl.sailline_class = s.class AND s.line = psl.sailline
|
||||
ORDER BY psl.sailline, psl.incr;
|
||||
|
||||
ALTER TABLE preplot_lines_summary
|
||||
OWNER TO postgres;
|
||||
COMMENT ON VIEW preplot_lines_summary
|
||||
IS 'Summarises ''V'' (vessel sailline) preplot lines.';
|
||||
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.5.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.5.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.5.4"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.5.4"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,110 +0,0 @@
|
||||
-- Fix final_lines_summary view
|
||||
--
|
||||
-- New schema version: 0.6.0
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade only affects the `public` schema.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update adds a `keystore` table, intended for storing arbitrary
|
||||
-- key / value pairs which, unlike, the `info` tables, is not meant to
|
||||
-- be directly accessible via the API. Its main purpose as of this writing
|
||||
-- is to store user definitions (see #176, #177, #180).
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS keystore (
|
||||
type TEXT NOT NULL, -- A class of data to be stored
|
||||
key TEXT NOT NULL, -- A key that is unique for the class and access type
|
||||
last_modified TIMESTAMP -- To detect update conflicts
|
||||
DEFAULT CURRENT_TIMESTAMP,
|
||||
data jsonb,
|
||||
PRIMARY KEY (type, key) -- Composite primary key
|
||||
);
|
||||
|
||||
-- Create a function to update the last_modified timestamp
|
||||
CREATE OR REPLACE FUNCTION update_last_modified()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.last_modified = CURRENT_TIMESTAMP;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Create a trigger that calls the function before each update
|
||||
CREATE OR REPLACE TRIGGER update_keystore_last_modified
|
||||
BEFORE UPDATE ON keystore
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_last_modified();
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.5.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.0"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.0"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,108 +0,0 @@
|
||||
-- Fix final_lines_summary view
|
||||
--
|
||||
-- New schema version: 0.6.1
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade only affects the `public` schema.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update adds a default user to the system (see #176, #177, #180).
|
||||
-- The default user can only be invoked by connecting from localhost.
|
||||
--
|
||||
-- This user has full access to every project via the organisations
|
||||
-- permissions wildcard: `{"*": {read: true, write: true, edit: true}}`
|
||||
-- and can be used to bootstrap the system by creating other users
|
||||
-- and assigning organisational permissions.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
INSERT INTO keystore (type, key, data)
|
||||
VALUES ('user', '6f1e7159-4ca0-4ae4-ab4e-89078166cc10', '
|
||||
{
|
||||
"id": "6f1e7159-4ca0-4ae4-ab4e-89078166cc10",
|
||||
"ip": "127.0.0.0/24",
|
||||
"name": "☠️",
|
||||
"colour": "red",
|
||||
"active": true,
|
||||
"organisations": {
|
||||
"*": {
|
||||
"read": true,
|
||||
"write": true,
|
||||
"edit": true
|
||||
}
|
||||
}
|
||||
}
|
||||
'::jsonb)
|
||||
ON CONFLICT (type, key) DO NOTHING;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.1"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.1"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,106 +0,0 @@
|
||||
-- Fix final_lines_summary view
|
||||
--
|
||||
-- New schema version: 0.6.2
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade only affects the `public` schema.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update adds an "organisations" section to the configuration,
|
||||
-- with a default configured organisation of "WGP" with full access.
|
||||
-- This is so that projects can be made accessible after migrating
|
||||
-- to the new permissions architecture.
|
||||
--
|
||||
-- In addition, projects with an id starting with "eq" are assumed to
|
||||
-- be Equinor projects, and an additional organisation is added with
|
||||
-- read-only access. This is intended for clients, which should be
|
||||
-- assigned to the "Equinor organisation".
|
||||
--
|
||||
-- Finally, we assign the vessel to the "WGP" organisation (full access)
|
||||
-- so that we can actually use administrative endpoints.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
-- Add "organisations" section to configurations, if not already present
|
||||
UPDATE projects
|
||||
SET
|
||||
meta = jsonb_set(meta, '{organisations}', '{"WGP": {"read": true, "write": true, "edit": true}}'::jsonb, true)
|
||||
WHERE meta->'organisations' IS NULL;
|
||||
|
||||
-- Add (or overwrite!) "organisations.Equinor" giving read-only access (can be changed later via API)
|
||||
UPDATE projects
|
||||
SET
|
||||
meta = jsonb_set(meta, '{organisations, Equinor}', '{"read": true, "write": false, "edit": false}'::jsonb, true)
|
||||
WHERE pid LIKE 'eq%';
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.2"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.2"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,109 +0,0 @@
|
||||
-- Add procedure to decimate old nav data
|
||||
--
|
||||
-- New schema version: 0.6.3
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade creates a new schema called `comparisons`.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update adds a `comparisons` table to a `comparisons` schema.
|
||||
-- The `comparisons.comparisons` table holds 4D prospect comparison data.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
-- BEGIN
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS comparisons
|
||||
AUTHORIZATION postgres;
|
||||
|
||||
COMMENT ON SCHEMA comparisons
|
||||
IS 'Holds 4D comparison data and logic';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS comparisons.comparisons
|
||||
(
|
||||
type text COLLATE pg_catalog."default" NOT NULL,
|
||||
baseline_pid text COLLATE pg_catalog."default" NOT NULL,
|
||||
monitor_pid text COLLATE pg_catalog."default" NOT NULL,
|
||||
data bytea,
|
||||
meta jsonb NOT NULL DEFAULT '{}'::jsonb,
|
||||
CONSTRAINT comparisons_pkey PRIMARY KEY (baseline_pid, monitor_pid, type)
|
||||
)
|
||||
|
||||
TABLESPACE pg_default;
|
||||
|
||||
ALTER TABLE IF EXISTS comparisons.comparisons
|
||||
OWNER to postgres;
|
||||
|
||||
-- END
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.3"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.3"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,169 +0,0 @@
|
||||
-- Add procedure to decimate old nav data
|
||||
--
|
||||
-- New schema version: 0.6.4
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects the public schema only.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update modifies notify() to accept, as optional arguments, the
|
||||
-- names of columns that are to be *excluded* from the notification.
|
||||
-- It is intended for tables with large columns which are however of
|
||||
-- no particular interest in a notification.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
-- BEGIN
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.notify()
|
||||
RETURNS trigger
|
||||
LANGUAGE 'plpgsql'
|
||||
COST 100
|
||||
VOLATILE NOT LEAKPROOF
|
||||
AS $BODY$
|
||||
DECLARE
|
||||
channel text := TG_ARGV[0];
|
||||
pid text;
|
||||
payload text;
|
||||
notification text;
|
||||
payload_id integer;
|
||||
old_json jsonb;
|
||||
new_json jsonb;
|
||||
excluded_col text;
|
||||
i integer;
|
||||
BEGIN
|
||||
|
||||
-- Fetch pid
|
||||
SELECT projects.pid INTO pid FROM projects WHERE schema = TG_TABLE_SCHEMA;
|
||||
|
||||
-- Build old and new as jsonb, excluding specified columns if provided
|
||||
IF OLD IS NOT NULL THEN
|
||||
old_json := row_to_json(OLD)::jsonb;
|
||||
FOR i IN 1 .. TG_NARGS - 1 LOOP
|
||||
excluded_col := TG_ARGV[i];
|
||||
old_json := old_json - excluded_col;
|
||||
END LOOP;
|
||||
ELSE
|
||||
old_json := NULL;
|
||||
END IF;
|
||||
|
||||
IF NEW IS NOT NULL THEN
|
||||
new_json := row_to_json(NEW)::jsonb;
|
||||
FOR i IN 1 .. TG_NARGS - 1 LOOP
|
||||
excluded_col := TG_ARGV[i];
|
||||
new_json := new_json - excluded_col;
|
||||
END LOOP;
|
||||
ELSE
|
||||
new_json := NULL;
|
||||
END IF;
|
||||
|
||||
-- Build payload
|
||||
payload := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'old', old_json,
|
||||
'new', new_json,
|
||||
'pid', pid
|
||||
)::text;
|
||||
|
||||
-- Handle large payloads
|
||||
IF octet_length(payload) < 1000 THEN
|
||||
PERFORM pg_notify(channel, payload);
|
||||
ELSE
|
||||
-- Store large payload and notify with ID (as before)
|
||||
INSERT INTO notify_payloads (payload) VALUES (payload) RETURNING id INTO payload_id;
|
||||
|
||||
notification := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'pid', pid,
|
||||
'payload_id', payload_id
|
||||
)::text;
|
||||
|
||||
PERFORM pg_notify(channel, notification);
|
||||
RAISE INFO 'Payload over limit';
|
||||
END IF;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$BODY$;
|
||||
|
||||
ALTER FUNCTION public.notify()
|
||||
OWNER TO postgres;
|
||||
|
||||
-- END
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.4"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.4"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,96 +0,0 @@
|
||||
-- Add procedure to decimate old nav data
|
||||
--
|
||||
-- New schema version: 0.6.5
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects the public schema only.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update modifies notify() to accept, as optional arguments, the
|
||||
-- names of columns that are to be *excluded* from the notification.
|
||||
-- It is intended for tables with large columns which are however of
|
||||
-- no particular interest in a notification.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
-- BEGIN
|
||||
|
||||
CREATE OR REPLACE TRIGGER comparisons_tg
|
||||
AFTER INSERT OR DELETE OR UPDATE
|
||||
ON comparisons.comparisons
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.notify('comparisons', 'data');
|
||||
|
||||
-- END
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.5' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.5"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.5"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -45,13 +45,11 @@
|
||||
name: "No fire"
|
||||
id: no_fire
|
||||
check: |
|
||||
// const currentShot = currentItem;
|
||||
// const gunData = currentItem._("raw_meta.smsrc");
|
||||
// (gunData && gunData.guns && gunData.guns.length != gunData.num_active)
|
||||
// ? `Source ${gunData.src_number}: No fire (${gunData.guns.length - gunData.num_active} guns)`
|
||||
// : true;
|
||||
// Disabled due to changes in Smartsource software. It now returns all guns on every shot, not just active ones.
|
||||
true
|
||||
const currentShot = currentItem;
|
||||
const gunData = currentItem._("raw_meta.smsrc");
|
||||
(gunData && gunData.guns && gunData.guns.length != gunData.num_active)
|
||||
? `Source ${gunData.src_number}: No fire (${gunData.guns.length - gunData.num_active} guns)`
|
||||
: true;
|
||||
|
||||
-
|
||||
name: "Pressure errors"
|
||||
|
||||
@@ -1,968 +0,0 @@
|
||||
const codeToType = {
|
||||
0: Int8Array,
|
||||
1: Uint8Array,
|
||||
2: Int16Array,
|
||||
3: Uint16Array,
|
||||
4: Int32Array,
|
||||
5: Uint32Array,
|
||||
7: Float32Array,
|
||||
8: Float64Array,
|
||||
9: BigInt64Array,
|
||||
10: BigUint64Array
|
||||
};
|
||||
|
||||
const typeToBytes = {
|
||||
Int8Array: 1,
|
||||
Uint8Array: 1,
|
||||
Int16Array: 2,
|
||||
Uint16Array: 2,
|
||||
Int32Array: 4,
|
||||
Uint32Array: 4,
|
||||
Float32Array: 4,
|
||||
Float64Array: 8,
|
||||
BigInt64Array: 8,
|
||||
BigUint64Array: 8
|
||||
};
|
||||
|
||||
function readTypedValue(view, offset, type) {
|
||||
switch (type) {
|
||||
case Int8Array: return view.getInt8(offset);
|
||||
case Uint8Array: return view.getUint8(offset);
|
||||
case Int16Array: return view.getInt16(offset, true);
|
||||
case Uint16Array: return view.getUint16(offset, true);
|
||||
case Int32Array: return view.getInt32(offset, true);
|
||||
case Uint32Array: return view.getUint32(offset, true);
|
||||
case Float32Array: return view.getFloat32(offset, true);
|
||||
case Float64Array: return view.getFloat64(offset, true);
|
||||
case BigInt64Array: return view.getBigInt64(offset, true);
|
||||
case BigUint64Array: return view.getBigUint64(offset, true);
|
||||
default: throw new Error(`Unsupported type: ${type.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
function writeTypedValue(view, offset, value, type) {
|
||||
switch (type) {
|
||||
case Int8Array: view.setInt8(offset, value); break;
|
||||
case Uint8Array: view.setUint8(offset, value); break;
|
||||
case Int16Array: view.setInt16(offset, value, true); break;
|
||||
case Uint16Array: view.setUint16(offset, value, true); break;
|
||||
case Int32Array: view.setInt32(offset, value, true); break;
|
||||
case Uint32Array: view.setUint32(offset, value, true); break;
|
||||
case Float32Array: view.setFloat32(offset, value, true); break;
|
||||
case Float64Array: view.setFloat64(offset, value, true); break;
|
||||
case BigInt64Array: view.setBigInt64(offset, BigInt(value), true); break;
|
||||
case BigUint64Array: view.setBigUint64(offset, BigInt(value), true); break;
|
||||
default: throw new Error(`Unsupported type: ${type.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
class DougalBinaryBundle extends ArrayBuffer {
|
||||
|
||||
static HEADER_LENGTH = 4; // Length of a bundle header
|
||||
|
||||
/** Clone an existing ByteArray into a DougalBinaryBundle
|
||||
*/
|
||||
static clone (buffer) {
|
||||
const clone = new DougalBinaryBundle(buffer.byteLength);
|
||||
const uint8Array = new Uint8Array(buffer);
|
||||
const uint8ArrayClone = new Uint8Array(clone);
|
||||
uint8ArrayClone.set(uint8Array);
|
||||
return clone;
|
||||
}
|
||||
|
||||
constructor (length, options) {
|
||||
super (length, options);
|
||||
}
|
||||
|
||||
/** Get the count of bundles in this ByteArray.
|
||||
*
|
||||
* Stops at the first non-bundle looking offset
|
||||
*/
|
||||
get bundleCount () {
|
||||
let count = 0;
|
||||
let currentBundleOffset = 0;
|
||||
const view = new DataView(this);
|
||||
|
||||
while (currentBundleOffset < this.byteLength) {
|
||||
|
||||
const currentBundleHeader = view.getUint32(currentBundleOffset, true);
|
||||
if ((currentBundleHeader & 0xff) !== 0x1c) {
|
||||
// This is not a bundle
|
||||
return count;
|
||||
}
|
||||
let currentBundleLength = currentBundleHeader >>> 8;
|
||||
|
||||
currentBundleOffset += currentBundleLength + DougalBinaryBundle.HEADER_LENGTH;
|
||||
count++;
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
|
||||
/** Get the number of chunks in the bundles of this ByteArray
|
||||
*/
|
||||
get chunkCount () {
|
||||
let count = 0;
|
||||
let bundleOffset = 0;
|
||||
const view = new DataView(this);
|
||||
|
||||
while (bundleOffset < this.byteLength) {
|
||||
const header = view.getUint32(bundleOffset, true);
|
||||
if ((header & 0xFF) !== 0x1C) break;
|
||||
const length = header >>> 8;
|
||||
if (bundleOffset + 4 + length > this.byteLength) break;
|
||||
|
||||
let chunkOffset = bundleOffset + 4; // relative to buffer start
|
||||
|
||||
while (chunkOffset < bundleOffset + 4 + length) {
|
||||
const chunkType = view.getUint8(chunkOffset);
|
||||
if (chunkType !== 0x11 && chunkType !== 0x12) break;
|
||||
|
||||
const cCount = view.getUint16(chunkOffset + 2, true);
|
||||
const ΔelemC = view.getUint8(chunkOffset + 10);
|
||||
const elemC = view.getUint8(chunkOffset + 11);
|
||||
|
||||
let localOffset = 12; // header size
|
||||
|
||||
localOffset += ΔelemC + elemC; // preface
|
||||
|
||||
// initial values
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(chunkOffset + 12 + k);
|
||||
const baseCode = typeByte & 0xF;
|
||||
const baseType = codeToType[baseCode];
|
||||
if (!baseType) throw new Error('Invalid base type code');
|
||||
localOffset += typeToBytes[baseType.name];
|
||||
}
|
||||
|
||||
// pad after initial
|
||||
while (localOffset % 4 !== 0) localOffset++;
|
||||
|
||||
if (chunkType === 0x11) { // Sequential
|
||||
// record data: Δelems incrs
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(chunkOffset + 12 + k);
|
||||
const incrCode = typeByte >> 4;
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!incrType) throw new Error('Invalid incr type code');
|
||||
localOffset += cCount * typeToBytes[incrType.name];
|
||||
}
|
||||
|
||||
// elems
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
localOffset += cCount * typeToBytes[type.name];
|
||||
}
|
||||
} else { // Interleaved
|
||||
// Compute exact stride for interleaved record data
|
||||
let ΔelemStride = 0;
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(chunkOffset + 12 + k);
|
||||
const incrCode = typeByte >> 4;
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!incrType) throw new Error('Invalid incr type code');
|
||||
ΔelemStride += typeToBytes[incrType.name];
|
||||
}
|
||||
let elemStride = 0;
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
elemStride += typeToBytes[type.name];
|
||||
}
|
||||
const recordStride = ΔelemStride + elemStride;
|
||||
localOffset += cCount * recordStride;
|
||||
}
|
||||
|
||||
// pad after record
|
||||
while (localOffset % 4 !== 0) localOffset++;
|
||||
|
||||
chunkOffset += localOffset;
|
||||
count++;
|
||||
}
|
||||
|
||||
bundleOffset += 4 + length;
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
/** Return an array of DougalBinaryChunkSequential or DougalBinaryChunkInterleaved instances
|
||||
*/
|
||||
chunks () {
|
||||
const chunks = [];
|
||||
let bundleOffset = 0;
|
||||
const view = new DataView(this);
|
||||
|
||||
while (bundleOffset < this.byteLength) {
|
||||
const header = view.getUint32(bundleOffset, true);
|
||||
if ((header & 0xFF) !== 0x1C) break;
|
||||
const length = header >>> 8;
|
||||
if (bundleOffset + 4 + length > this.byteLength) break;
|
||||
|
||||
let chunkOffset = bundleOffset + 4;
|
||||
|
||||
while (chunkOffset < bundleOffset + 4 + length) {
|
||||
const chunkType = view.getUint8(chunkOffset);
|
||||
if (chunkType !== 0x11 && chunkType !== 0x12) break;
|
||||
|
||||
const cCount = view.getUint16(chunkOffset + 2, true);
|
||||
const ΔelemC = view.getUint8(chunkOffset + 10);
|
||||
const elemC = view.getUint8(chunkOffset + 11);
|
||||
|
||||
let localOffset = 12;
|
||||
|
||||
localOffset += ΔelemC + elemC;
|
||||
|
||||
// initial values
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(chunkOffset + 12 + k);
|
||||
const baseCode = typeByte & 0xF;
|
||||
const baseType = codeToType[baseCode];
|
||||
if (!baseType) throw new Error('Invalid base type code');
|
||||
localOffset += typeToBytes[baseType.name];
|
||||
}
|
||||
|
||||
// pad after initial
|
||||
while (localOffset % 4 !== 0) localOffset++;
|
||||
|
||||
if (chunkType === 0x11) { // Sequential
|
||||
// record data: Δelems incrs
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(chunkOffset + 12 + k);
|
||||
const incrCode = typeByte >> 4;
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!incrType) throw new Error('Invalid incr type code');
|
||||
localOffset += cCount * typeToBytes[incrType.name];
|
||||
}
|
||||
|
||||
// elems
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
localOffset += cCount * typeToBytes[type.name];
|
||||
}
|
||||
} else { // Interleaved
|
||||
// Compute exact stride for interleaved record data
|
||||
let ΔelemStride = 0;
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(chunkOffset + 12 + k);
|
||||
const incrCode = typeByte >> 4;
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!incrType) throw new Error('Invalid incr type code');
|
||||
ΔelemStride += typeToBytes[incrType.name];
|
||||
}
|
||||
let elemStride = 0;
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
elemStride += typeToBytes[type.name];
|
||||
}
|
||||
const recordStride = ΔelemStride + elemStride;
|
||||
localOffset += cCount * recordStride;
|
||||
}
|
||||
|
||||
// pad after record
|
||||
while (localOffset % 4 !== 0) localOffset++;
|
||||
|
||||
switch (chunkType) {
|
||||
case 0x11:
|
||||
chunks.push(new DougalBinaryChunkSequential(this, chunkOffset, localOffset));
|
||||
break;
|
||||
case 0x12:
|
||||
chunks.push(new DougalBinaryChunkInterleaved(this, chunkOffset, localOffset));
|
||||
break;
|
||||
default:
|
||||
throw new Error('Invalid chunk type');
|
||||
}
|
||||
|
||||
chunkOffset += localOffset;
|
||||
}
|
||||
|
||||
bundleOffset += 4 + length;
|
||||
}
|
||||
|
||||
return chunks;
|
||||
}
|
||||
|
||||
/** Return a ByteArray containing all data from all
|
||||
* chunks including reconstructed i, j and incremental
|
||||
* values as follows:
|
||||
*
|
||||
* <i_0> <i_1> … <i_x> // i values (constant)
|
||||
* <j_0> <j_1> … <j_x> // j values (j0 + Δj*i)
|
||||
* <Δelem_0_0> <Δelem_0_1> … <Δelem_0_x> // reconstructed Δelem0 (uses baseType)
|
||||
* <Δelem_1_0> <Δelem_1_1> … <Δelem_1_x> // reconstructed Δelem1
|
||||
* …
|
||||
* <Δelem_y_0> <Δelem_y_1> … <Δelem_y_x> // reconstructed Δelem1
|
||||
* <elem_0_0> <elem_0_1> … <elem_0_x> // First elem
|
||||
* <elem_1_0> <elem_1_1> … <elem_1_x> // Second elem
|
||||
* …
|
||||
* <elem_z_0> <elem_z_1> … <elem_z_x> // Last elem
|
||||
*
|
||||
* It does not matter whether the underlying chunks are
|
||||
* sequential or interleaved. This function will transform
|
||||
* as necessary.
|
||||
*
|
||||
*/
|
||||
getDataSequentially () {
|
||||
const chunks = this.chunks();
|
||||
if (chunks.length === 0) return new ArrayBuffer(0);
|
||||
|
||||
const firstChunk = chunks[0];
|
||||
const ΔelemC = firstChunk.ΔelemCount;
|
||||
const elemC = firstChunk.elemCount;
|
||||
|
||||
// Check consistency across chunks
|
||||
for (const chunk of chunks) {
|
||||
if (chunk.ΔelemCount !== ΔelemC || chunk.elemCount !== elemC) {
|
||||
throw new Error('Inconsistent chunk structures');
|
||||
}
|
||||
}
|
||||
|
||||
// Get types from first chunk
|
||||
const view = new DataView(firstChunk);
|
||||
const ΔelemBaseTypes = [];
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(12 + k);
|
||||
const baseCode = typeByte & 0xF;
|
||||
const baseType = codeToType[baseCode];
|
||||
if (!baseType) throw new Error('Invalid base type code');
|
||||
ΔelemBaseTypes.push(baseType);
|
||||
}
|
||||
const elemTypes = [];
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
elemTypes.push(type);
|
||||
}
|
||||
|
||||
// Compute total records
|
||||
const totalN = chunks.reduce((sum, c) => sum + c.jCount, 0);
|
||||
|
||||
// Compute sizes
|
||||
const size_i = totalN * 2; // Uint16 for i
|
||||
const size_j = totalN * 4; // Int32 for j
|
||||
let size_Δelems = 0;
|
||||
for (const t of ΔelemBaseTypes) {
|
||||
size_Δelems += totalN * typeToBytes[t.name];
|
||||
}
|
||||
let size_elems = 0;
|
||||
for (const t of elemTypes) {
|
||||
size_elems += totalN * typeToBytes[t.name];
|
||||
}
|
||||
const totalSize = size_i + size_j + size_Δelems + size_elems;
|
||||
|
||||
const ab = new ArrayBuffer(totalSize);
|
||||
const dv = new DataView(ab);
|
||||
|
||||
// Write i's
|
||||
let off = 0;
|
||||
for (const chunk of chunks) {
|
||||
const i = chunk.i;
|
||||
for (let idx = 0; idx < chunk.jCount; idx++) {
|
||||
dv.setUint16(off, i, true);
|
||||
off += 2;
|
||||
}
|
||||
}
|
||||
|
||||
// Write j's
|
||||
off = size_i;
|
||||
for (const chunk of chunks) {
|
||||
const j0 = chunk.j0;
|
||||
const Δj = chunk.Δj;
|
||||
for (let idx = 0; idx < chunk.jCount; idx++) {
|
||||
const j = j0 + idx * Δj;
|
||||
dv.setInt32(off, j, true);
|
||||
off += 4;
|
||||
}
|
||||
}
|
||||
|
||||
// Write Δelems
|
||||
off = size_i + size_j;
|
||||
for (let m = 0; m < ΔelemC; m++) {
|
||||
const type = ΔelemBaseTypes[m];
|
||||
const bytes = typeToBytes[type.name];
|
||||
for (const chunk of chunks) {
|
||||
const arr = chunk.Δelem(m);
|
||||
for (let idx = 0; idx < chunk.jCount; idx++) {
|
||||
writeTypedValue(dv, off, arr[idx], type);
|
||||
off += bytes;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Write elems
|
||||
for (let m = 0; m < elemC; m++) {
|
||||
const type = elemTypes[m];
|
||||
const bytes = typeToBytes[type.name];
|
||||
for (const chunk of chunks) {
|
||||
const arr = chunk.elem(m);
|
||||
for (let idx = 0; idx < chunk.jCount; idx++) {
|
||||
writeTypedValue(dv, off, arr[idx], type);
|
||||
off += bytes;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ab;
|
||||
}
|
||||
|
||||
/** Return a ByteArray containing all data from all
|
||||
* chunks including reconstructed i, j and incremental
|
||||
* values, interleaved as follows:
|
||||
*
|
||||
* <i_0> <j_0> <Δelem_0_0> <Δelem_1_0> … <Δelem_y_0> <elem_0_0> <elem_1_0> … <elem_z_0>
|
||||
* <i_1> <j_1> <Δelem_0_1> <Δelem_1_1> … <Δelem_y_1> <elem_0_1> <elem_1_1> … <elem_z_1>
|
||||
* <i_x> <j_x> <Δelem_0_x> <Δelem_1_x> … <Δelem_y_x> <elem_0_x> <elem_1_x> … <elem_z_x>
|
||||
*
|
||||
* It does not matter whether the underlying chunks are
|
||||
* sequential or interleaved. This function will transform
|
||||
* as necessary.
|
||||
*
|
||||
*/
|
||||
getDataInterleaved () {
|
||||
const chunks = this.chunks();
|
||||
if (chunks.length === 0) return new ArrayBuffer(0);
|
||||
|
||||
const firstChunk = chunks[0];
|
||||
const ΔelemC = firstChunk.ΔelemCount;
|
||||
const elemC = firstChunk.elemCount;
|
||||
|
||||
// Check consistency across chunks
|
||||
for (const chunk of chunks) {
|
||||
if (chunk.ΔelemCount !== ΔelemC || chunk.elemCount !== elemC) {
|
||||
throw new Error('Inconsistent chunk structures');
|
||||
}
|
||||
}
|
||||
|
||||
// Get types from first chunk
|
||||
const view = new DataView(firstChunk);
|
||||
const ΔelemBaseTypes = [];
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(12 + k);
|
||||
const baseCode = typeByte & 0xF;
|
||||
const baseType = codeToType[baseCode];
|
||||
if (!baseType) throw new Error('Invalid base type code');
|
||||
ΔelemBaseTypes.push(baseType);
|
||||
}
|
||||
const elemTypes = [];
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
elemTypes.push(type);
|
||||
}
|
||||
|
||||
// Compute total records
|
||||
const totalN = chunks.reduce((sum, c) => sum + c.jCount, 0);
|
||||
|
||||
// Compute record size
|
||||
const recordSize = 2 + 4 + // i (Uint16) + j (Int32)
|
||||
ΔelemBaseTypes.reduce((sum, t) => sum + typeToBytes[t.name], 0) +
|
||||
elemTypes.reduce((sum, t) => sum + typeToBytes[t.name], 0);
|
||||
const totalSize = totalN * recordSize;
|
||||
|
||||
const ab = new ArrayBuffer(totalSize);
|
||||
const dv = new DataView(ab);
|
||||
|
||||
let off = 0;
|
||||
for (const chunk of chunks) {
|
||||
const i = chunk.i;
|
||||
const j0 = chunk.j0;
|
||||
const Δj = chunk.Δj;
|
||||
for (let idx = 0; idx < chunk.jCount; idx++) {
|
||||
dv.setUint16(off, i, true);
|
||||
off += 2;
|
||||
const j = j0 + idx * Δj;
|
||||
dv.setInt32(off, j, true);
|
||||
off += 4;
|
||||
for (let m = 0; m < ΔelemC; m++) {
|
||||
const type = ΔelemBaseTypes[m];
|
||||
const bytes = typeToBytes[type.name];
|
||||
const arr = chunk.Δelem(m);
|
||||
writeTypedValue(dv, off, arr[idx], type);
|
||||
off += bytes;
|
||||
}
|
||||
for (let m = 0; m < elemC; m++) {
|
||||
const type = elemTypes[m];
|
||||
const bytes = typeToBytes[type.name];
|
||||
const arr = chunk.elem(m);
|
||||
writeTypedValue(dv, off, arr[idx], type);
|
||||
off += bytes;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ab;
|
||||
}
|
||||
|
||||
get records () {
|
||||
const data = [];
|
||||
for (const record of this) {
|
||||
data.push(record.slice(1));
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
const chunks = this.chunks();
|
||||
let chunkIndex = 0;
|
||||
let chunkIterator = chunks.length > 0 ? chunks[0][Symbol.iterator]() : null;
|
||||
|
||||
return {
|
||||
next() {
|
||||
if (!chunkIterator) {
|
||||
return { done: true };
|
||||
}
|
||||
|
||||
let result = chunkIterator.next();
|
||||
while (result.done && chunkIndex < chunks.length - 1) {
|
||||
chunkIndex++;
|
||||
chunkIterator = chunks[chunkIndex][Symbol.iterator]();
|
||||
result = chunkIterator.next();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
class DougalBinaryChunkSequential extends ArrayBuffer {
|
||||
|
||||
constructor (buffer, offset, length) {
|
||||
super(length);
|
||||
new Uint8Array(this).set(new Uint8Array(buffer, offset, length));
|
||||
this._ΔelemCaches = new Array(this.ΔelemCount);
|
||||
this._elemCaches = new Array(this.elemCount);
|
||||
this._ΔelemBlockOffsets = null;
|
||||
this._elemBlockOffsets = null;
|
||||
this._recordOffset = null;
|
||||
}
|
||||
|
||||
_getRecordOffset() {
|
||||
if (this._recordOffset !== null) return this._recordOffset;
|
||||
const view = new DataView(this);
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
const elemC = this.elemCount;
|
||||
|
||||
let recordOffset = 12 + ΔelemC + elemC;
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const tb = view.getUint8(12 + k);
|
||||
const bc = tb & 0xF;
|
||||
const bt = codeToType[bc];
|
||||
recordOffset += typeToBytes[bt.name];
|
||||
}
|
||||
while (recordOffset % 4 !== 0) recordOffset++;
|
||||
this._recordOffset = recordOffset;
|
||||
return recordOffset;
|
||||
}
|
||||
|
||||
_initBlockOffsets() {
|
||||
if (this._ΔelemBlockOffsets !== null) return;
|
||||
const view = new DataView(this);
|
||||
const count = this.jCount;
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
const elemC = this.elemCount;
|
||||
|
||||
const recordOffset = this._getRecordOffset();
|
||||
|
||||
this._ΔelemBlockOffsets = [];
|
||||
let o = recordOffset;
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
this._ΔelemBlockOffsets[k] = o;
|
||||
const tb = view.getUint8(12 + k);
|
||||
const ic = tb >> 4;
|
||||
const it = codeToType[ic];
|
||||
o += count * typeToBytes[it.name];
|
||||
}
|
||||
|
||||
this._elemBlockOffsets = [];
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
this._elemBlockOffsets[k] = o;
|
||||
const tc = view.getUint8(12 + ΔelemC + k);
|
||||
const t = codeToType[tc];
|
||||
o += count * typeToBytes[t.name];
|
||||
}
|
||||
}
|
||||
|
||||
/** Return the user-defined value
|
||||
*/
|
||||
get udv () {
|
||||
return new DataView(this).getUint8(1);
|
||||
}
|
||||
|
||||
/** Return the number of j elements in this chunk
|
||||
*/
|
||||
get jCount () {
|
||||
return new DataView(this).getUint16(2, true);
|
||||
}
|
||||
|
||||
/** Return the i value in this chunk
|
||||
*/
|
||||
get i () {
|
||||
return new DataView(this).getUint16(4, true);
|
||||
}
|
||||
|
||||
/** Return the j0 value in this chunk
|
||||
*/
|
||||
get j0 () {
|
||||
return new DataView(this).getUint16(6, true);
|
||||
}
|
||||
|
||||
/** Return the Δj value in this chunk
|
||||
*/
|
||||
get Δj () {
|
||||
return new DataView(this).getInt16(8, true);
|
||||
}
|
||||
|
||||
/** Return the Δelem_count value in this chunk
|
||||
*/
|
||||
get ΔelemCount () {
|
||||
return new DataView(this).getUint8(10);
|
||||
}
|
||||
|
||||
/** Return the elem_count value in this chunk
|
||||
*/
|
||||
get elemCount () {
|
||||
return new DataView(this).getUint8(11);
|
||||
}
|
||||
|
||||
/** Return a TypedArray (e.g., Uint16Array, …) for the n-th Δelem in the chunk
|
||||
*/
|
||||
Δelem (n) {
|
||||
if (this._ΔelemCaches[n]) return this._ΔelemCaches[n];
|
||||
|
||||
if (n < 0 || n >= this.ΔelemCount) throw new Error(`Invalid Δelem index: ${n}`);
|
||||
const view = new DataView(this);
|
||||
const count = this.jCount;
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
|
||||
const typeByte = view.getUint8(12 + n);
|
||||
const baseCode = typeByte & 0xF;
|
||||
const incrCode = typeByte >> 4;
|
||||
const baseType = codeToType[baseCode];
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!baseType || !incrType) throw new Error('Invalid type codes for Δelem');
|
||||
|
||||
// Find offset for initial value of this Δelem
|
||||
let initialOffset = 12 + ΔelemC + this.elemCount;
|
||||
for (let k = 0; k < n; k++) {
|
||||
const tb = view.getUint8(12 + k);
|
||||
const bc = tb & 0xF;
|
||||
const bt = codeToType[bc];
|
||||
initialOffset += typeToBytes[bt.name];
|
||||
}
|
||||
|
||||
let current = readTypedValue(view, initialOffset, baseType);
|
||||
|
||||
// Advance to start of record data (after all initials and pad)
|
||||
const recordOffset = this._getRecordOffset();
|
||||
|
||||
// Find offset for deltas of this Δelem (skip previous Δelems' delta blocks)
|
||||
this._initBlockOffsets();
|
||||
const deltaOffset = this._ΔelemBlockOffsets[n];
|
||||
|
||||
// Reconstruct the array
|
||||
const arr = new baseType(count);
|
||||
const isBigInt = baseType === BigInt64Array || baseType === BigUint64Array;
|
||||
arr[0] = current;
|
||||
for (let idx = 1; idx < count; idx++) {
|
||||
let delta = readTypedValue(view, deltaOffset + idx * typeToBytes[incrType.name], incrType);
|
||||
if (isBigInt) {
|
||||
delta = BigInt(delta);
|
||||
current += delta;
|
||||
} else {
|
||||
current += delta;
|
||||
}
|
||||
arr[idx] = current;
|
||||
}
|
||||
|
||||
this._ΔelemCaches[n] = arr;
|
||||
return arr;
|
||||
}
|
||||
|
||||
/** Return a TypedArray (e.g., Uint16Array, …) for the n-th elem in the chunk
|
||||
*/
|
||||
elem (n) {
|
||||
if (this._elemCaches[n]) return this._elemCaches[n];
|
||||
|
||||
if (n < 0 || n >= this.elemCount) throw new Error(`Invalid elem index: ${n}`);
|
||||
const view = new DataView(this);
|
||||
const count = this.jCount;
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
const elemC = this.elemCount;
|
||||
|
||||
const typeCode = view.getUint8(12 + ΔelemC + n);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid type code for elem');
|
||||
|
||||
// Find offset for this elem's data block
|
||||
this._initBlockOffsets();
|
||||
const elemOffset = this._elemBlockOffsets[n];
|
||||
|
||||
// Create and populate the array
|
||||
const arr = new type(count);
|
||||
const bytes = typeToBytes[type.name];
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
arr[idx] = readTypedValue(view, elemOffset + idx * bytes, type);
|
||||
}
|
||||
|
||||
this._elemCaches[n] = arr;
|
||||
return arr;
|
||||
}
|
||||
|
||||
getRecord (index) {
|
||||
if (index < 0 || index >= this.jCount) throw new Error(`Invalid record index: ${index}`);
|
||||
|
||||
const arr = [this.udv, this.i, this.j0 + index * this.Δj];
|
||||
|
||||
for (let m = 0; m < this.ΔelemCount; m++) {
|
||||
const values = this.Δelem(m);
|
||||
arr.push(values[index]);
|
||||
}
|
||||
|
||||
for (let m = 0; m < this.elemCount; m++) {
|
||||
const values = this.elem(m);
|
||||
arr.push(values[index]);
|
||||
}
|
||||
|
||||
return arr;
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
let index = 0;
|
||||
const chunk = this;
|
||||
return {
|
||||
next() {
|
||||
if (index < chunk.jCount) {
|
||||
return { value: chunk.getRecord(index++), done: false };
|
||||
} else {
|
||||
return { done: true };
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
class DougalBinaryChunkInterleaved extends ArrayBuffer {
|
||||
constructor(buffer, offset, length) {
|
||||
super(length);
|
||||
new Uint8Array(this).set(new Uint8Array(buffer, offset, length));
|
||||
this._incrStrides = [];
|
||||
this._elemStrides = [];
|
||||
this._incrOffsets = [];
|
||||
this._elemOffsets = [];
|
||||
this._recordStride = 0;
|
||||
this._recordOffset = null;
|
||||
this._initStrides();
|
||||
this._ΔelemCaches = new Array(this.ΔelemCount);
|
||||
this._elemCaches = new Array(this.elemCount);
|
||||
}
|
||||
|
||||
_getRecordOffset() {
|
||||
if (this._recordOffset !== null) return this._recordOffset;
|
||||
const view = new DataView(this);
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
const elemC = this.elemCount;
|
||||
|
||||
let recordOffset = 12 + ΔelemC + elemC;
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const tb = view.getUint8(12 + k);
|
||||
const bc = tb & 0xF;
|
||||
const bt = codeToType[bc];
|
||||
recordOffset += typeToBytes[bt.name];
|
||||
}
|
||||
while (recordOffset % 4 !== 0) recordOffset++;
|
||||
this._recordOffset = recordOffset;
|
||||
return recordOffset;
|
||||
}
|
||||
|
||||
_initStrides() {
|
||||
const view = new DataView(this);
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
const elemC = this.elemCount;
|
||||
|
||||
// Compute incr strides and offsets
|
||||
let incrOffset = 0;
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(12 + k);
|
||||
const incrCode = typeByte >> 4;
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!incrType) throw new Error('Invalid incr type code');
|
||||
this._incrOffsets.push(incrOffset);
|
||||
const bytes = typeToBytes[incrType.name];
|
||||
this._incrStrides.push(bytes);
|
||||
incrOffset += bytes;
|
||||
this._recordStride += bytes;
|
||||
}
|
||||
|
||||
// Compute elem strides and offsets
|
||||
let elemOffset = incrOffset;
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
this._elemOffsets.push(elemOffset);
|
||||
const bytes = typeToBytes[type.name];
|
||||
this._elemStrides.push(bytes);
|
||||
elemOffset += bytes;
|
||||
this._recordStride += bytes;
|
||||
}
|
||||
}
|
||||
|
||||
get udv() {
|
||||
return new DataView(this).getUint8(1);
|
||||
}
|
||||
|
||||
get jCount() {
|
||||
return new DataView(this).getUint16(2, true);
|
||||
}
|
||||
|
||||
get i() {
|
||||
return new DataView(this).getUint16(4, true);
|
||||
}
|
||||
|
||||
get j0() {
|
||||
return new DataView(this).getUint16(6, true);
|
||||
}
|
||||
|
||||
get Δj() {
|
||||
return new DataView(this).getInt16(8, true);
|
||||
}
|
||||
|
||||
get ΔelemCount() {
|
||||
return new DataView(this).getUint8(10);
|
||||
}
|
||||
|
||||
get elemCount() {
|
||||
return new DataView(this).getUint8(11);
|
||||
}
|
||||
|
||||
Δelem(n) {
|
||||
if (this._ΔelemCaches[n]) return this._ΔelemCaches[n];
|
||||
|
||||
if (n < 0 || n >= this.ΔelemCount) throw new Error(`Invalid Δelem index: ${n}`);
|
||||
const view = new DataView(this);
|
||||
const count = this.jCount;
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
|
||||
const typeByte = view.getUint8(12 + n);
|
||||
const baseCode = typeByte & 0xF;
|
||||
const incrCode = typeByte >> 4;
|
||||
const baseType = codeToType[baseCode];
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!baseType || !incrType) throw new Error('Invalid type codes for Δelem');
|
||||
|
||||
// Find offset for initial value of this Δelem
|
||||
let initialOffset = 12 + ΔelemC + this.elemCount;
|
||||
for (let k = 0; k < n; k++) {
|
||||
const tb = view.getUint8(12 + k);
|
||||
const bc = tb & 0xF;
|
||||
const bt = codeToType[bc];
|
||||
initialOffset += typeToBytes[bt.name];
|
||||
}
|
||||
|
||||
let current = readTypedValue(view, initialOffset, baseType);
|
||||
|
||||
// Find offset to start of record data
|
||||
const recordOffset = this._getRecordOffset();
|
||||
|
||||
// Use precomputed offset for this Δelem
|
||||
const deltaOffset = recordOffset + this._incrOffsets[n];
|
||||
|
||||
// Reconstruct the array
|
||||
const arr = new baseType(count);
|
||||
const isBigInt = baseType === BigInt64Array || baseType === BigUint64Array;
|
||||
arr[0] = current;
|
||||
for (let idx = 1; idx < count; idx++) {
|
||||
let delta = readTypedValue(view, deltaOffset + idx * this._recordStride, incrType);
|
||||
if (isBigInt) {
|
||||
delta = BigInt(delta);
|
||||
current += delta;
|
||||
} else {
|
||||
current += delta;
|
||||
}
|
||||
arr[idx] = current;
|
||||
}
|
||||
|
||||
this._ΔelemCaches[n] = arr;
|
||||
return arr;
|
||||
}
|
||||
|
||||
elem(n) {
|
||||
if (this._elemCaches[n]) return this._elemCaches[n];
|
||||
|
||||
if (n < 0 || n >= this.elemCount) throw new Error(`Invalid elem index: ${n}`);
|
||||
const view = new DataView(this);
|
||||
const count = this.jCount;
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
|
||||
const typeCode = view.getUint8(12 + ΔelemC + n);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid type code for elem');
|
||||
|
||||
// Find offset to start of record data
|
||||
const recordOffset = this._getRecordOffset();
|
||||
|
||||
// Use precomputed offset for this elem (relative to start of record data)
|
||||
const elemOffset = recordOffset + this._elemOffsets[n];
|
||||
|
||||
// Create and populate the array
|
||||
const arr = new type(count);
|
||||
const bytes = typeToBytes[type.name];
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
arr[idx] = readTypedValue(view, elemOffset + idx * this._recordStride, type);
|
||||
}
|
||||
|
||||
this._elemCaches[n] = arr;
|
||||
return arr;
|
||||
}
|
||||
|
||||
getRecord (index) {
|
||||
if (index < 0 || index >= this.jCount) throw new Error(`Invalid record index: ${index}`);
|
||||
|
||||
const arr = [this.udv, this.i, this.j0 + index * this.Δj];
|
||||
|
||||
for (let m = 0; m < this.ΔelemCount; m++) {
|
||||
const values = this.Δelem(m);
|
||||
arr.push(values[index]);
|
||||
}
|
||||
|
||||
for (let m = 0; m < this.elemCount; m++) {
|
||||
const values = this.elem(m);
|
||||
arr.push(values[index]);
|
||||
}
|
||||
|
||||
return arr;
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
let index = 0;
|
||||
const chunk = this;
|
||||
return {
|
||||
next() {
|
||||
if (index < chunk.jCount) {
|
||||
return { value: chunk.getRecord(index++), done: false };
|
||||
} else {
|
||||
return { done: true };
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
module.exports = { DougalBinaryBundle, DougalBinaryChunkSequential, DougalBinaryChunkInterleaved }
|
||||
@@ -1,327 +0,0 @@
|
||||
const codeToType = {
|
||||
0: Int8Array,
|
||||
1: Uint8Array,
|
||||
2: Int16Array,
|
||||
3: Uint16Array,
|
||||
4: Int32Array,
|
||||
5: Uint32Array,
|
||||
7: Float32Array,
|
||||
8: Float64Array,
|
||||
9: BigInt64Array,
|
||||
10: BigUint64Array
|
||||
};
|
||||
|
||||
const typeToBytes = {
|
||||
Int8Array: 1,
|
||||
Uint8Array: 1,
|
||||
Int16Array: 2,
|
||||
Uint16Array: 2,
|
||||
Int32Array: 4,
|
||||
Uint32Array: 4,
|
||||
Float32Array: 4,
|
||||
Float64Array: 8,
|
||||
BigInt64Array: 8,
|
||||
BigUint64Array: 8
|
||||
};
|
||||
|
||||
function sequential(binary) {
|
||||
if (!(binary instanceof Uint8Array) || binary.length < 4) {
|
||||
throw new Error('Invalid binary input');
|
||||
}
|
||||
|
||||
const view = new DataView(binary.buffer, binary.byteOffset, binary.byteLength);
|
||||
let offset = 0;
|
||||
|
||||
// Initialize result (assuming single i value for simplicity; extend for multiple i values if needed)
|
||||
const result = { i: null, j: [], Δelems: [], elems: [] };
|
||||
|
||||
// Process bundles
|
||||
while (offset < binary.length) {
|
||||
// Read bundle header
|
||||
if (offset + 4 > binary.length) throw new Error('Incomplete bundle header');
|
||||
|
||||
const bundleHeader = view.getUint32(offset, true);
|
||||
if ((bundleHeader & 0xFF) !== 0x1C) throw new Error('Invalid bundle marker');
|
||||
const bundleLength = bundleHeader >> 8;
|
||||
offset += 4;
|
||||
const bundleEnd = offset + bundleLength;
|
||||
|
||||
if (bundleEnd > binary.length) throw new Error('Bundle length exceeds input size');
|
||||
|
||||
// Process chunks in bundle
|
||||
while (offset < bundleEnd) {
|
||||
// Read chunk header
|
||||
if (offset + 12 > bundleEnd) throw new Error('Incomplete chunk header');
|
||||
const chunkType = view.getUint8(offset);
|
||||
if (chunkType !== 0x11) throw new Error(`Unsupported chunk type: ${chunkType}`);
|
||||
offset += 1; // Skip udv
|
||||
offset += 1;
|
||||
const count = view.getUint16(offset, true); offset += 2;
|
||||
if (count > 65535) throw new Error('Chunk count exceeds 65535');
|
||||
const iValue = view.getUint16(offset, true); offset += 2;
|
||||
const j0 = view.getUint16(offset, true); offset += 2;
|
||||
const Δj = view.getInt16(offset, true); offset += 2;
|
||||
const ΔelemCount = view.getUint8(offset++); // Δelem_count
|
||||
const elemCount = view.getUint8(offset++); // elem_count
|
||||
|
||||
// Set i value (assuming all chunks share the same i)
|
||||
if (result.i === null) result.i = iValue;
|
||||
else if (result.i !== iValue) throw new Error('Multiple i values not supported');
|
||||
|
||||
// Read preface (element types)
|
||||
const ΔelemTypes = [];
|
||||
for (let i = 0; i < ΔelemCount; i++) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete Δelem types');
|
||||
const typeByte = view.getUint8(offset++);
|
||||
const baseCode = typeByte & 0x0F;
|
||||
const incrCode = typeByte >> 4;
|
||||
if (!codeToType[baseCode] || !codeToType[incrCode]) {
|
||||
throw new Error(`Invalid type code in Δelem: ${typeByte}`);
|
||||
}
|
||||
ΔelemTypes.push({ baseType: codeToType[baseCode], incrType: codeToType[incrCode] });
|
||||
}
|
||||
const elemTypes = [];
|
||||
for (let i = 0; i < elemCount; i++) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete elem types');
|
||||
const typeCode = view.getUint8(offset++);
|
||||
if (!codeToType[typeCode]) throw new Error(`Invalid type code in elem: ${typeCode}`);
|
||||
elemTypes.push(codeToType[typeCode]);
|
||||
}
|
||||
|
||||
// Initialize Δelems and elems arrays if first chunk
|
||||
if (!result.Δelems.length && ΔelemCount > 0) {
|
||||
result.Δelems = Array(ΔelemCount).fill().map(() => []);
|
||||
}
|
||||
if (!result.elems.length && elemCount > 0) {
|
||||
result.elems = Array(elemCount).fill().map(() => []);
|
||||
}
|
||||
|
||||
// Read initial values for Δelems
|
||||
const initialValues = [];
|
||||
for (const { baseType } of ΔelemTypes) {
|
||||
if (offset + typeToBytes[baseType.name] > bundleEnd) {
|
||||
throw new Error('Incomplete initial values');
|
||||
}
|
||||
initialValues.push(readTypedValue(view, offset, baseType));
|
||||
offset += typeToBytes[baseType.name];
|
||||
}
|
||||
// Skip padding
|
||||
while (offset % 4 !== 0) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete padding after initial values');
|
||||
offset++;
|
||||
}
|
||||
|
||||
// Reconstruct j values
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
result.j.push(j0 + idx * Δj);
|
||||
}
|
||||
|
||||
// Read record data (non-interleaved)
|
||||
for (let i = 0; i < ΔelemCount; i++) {
|
||||
let current = initialValues[i];
|
||||
const values = result.Δelems[i];
|
||||
const incrType = ΔelemTypes[i].incrType;
|
||||
const isBigInt = typeof current === 'bigint';
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
if (offset + typeToBytes[incrType.name] > bundleEnd) {
|
||||
throw new Error('Incomplete Δelem data');
|
||||
}
|
||||
let delta = readTypedValue(view, offset, incrType);
|
||||
if (idx === 0) {
|
||||
values.push(isBigInt ? Number(current) : current);
|
||||
} else {
|
||||
if (isBigInt) {
|
||||
delta = BigInt(delta);
|
||||
current += delta;
|
||||
values.push(Number(current));
|
||||
} else {
|
||||
current += delta;
|
||||
values.push(current);
|
||||
}
|
||||
}
|
||||
offset += typeToBytes[incrType.name];
|
||||
}
|
||||
}
|
||||
for (let i = 0; i < elemCount; i++) {
|
||||
const values = result.elems[i];
|
||||
const type = elemTypes[i];
|
||||
const isBigInt = type === BigInt64Array || type === BigUint64Array;
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
if (offset + typeToBytes[type.name] > bundleEnd) {
|
||||
throw new Error('Incomplete elem data');
|
||||
}
|
||||
let value = readTypedValue(view, offset, type);
|
||||
values.push(isBigInt ? Number(value) : value);
|
||||
offset += typeToBytes[type.name];
|
||||
}
|
||||
}
|
||||
// Skip padding
|
||||
while (offset % 4 !== 0) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete padding after record data');
|
||||
offset++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
function interleaved(binary) {
|
||||
if (!(binary instanceof Uint8Array) || binary.length < 4) {
|
||||
throw new Error('Invalid binary input');
|
||||
}
|
||||
|
||||
const view = new DataView(binary.buffer, binary.byteOffset, binary.byteLength);
|
||||
let offset = 0;
|
||||
|
||||
// Initialize result (assuming single i value for simplicity; extend for multiple i values if needed)
|
||||
const result = { i: null, j: [], Δelems: [], elems: [] };
|
||||
|
||||
// Process bundles
|
||||
while (offset < binary.length) {
|
||||
// Read bundle header
|
||||
if (offset + 4 > binary.length) throw new Error('Incomplete bundle header');
|
||||
|
||||
const bundleHeader = view.getUint32(offset, true);
|
||||
if ((bundleHeader & 0xFF) !== 0x1C) throw new Error('Invalid bundle marker');
|
||||
const bundleLength = bundleHeader >> 8;
|
||||
offset += 4;
|
||||
const bundleEnd = offset + bundleLength;
|
||||
|
||||
if (bundleEnd > binary.length) throw new Error('Bundle length exceeds input size');
|
||||
|
||||
// Process chunks in bundle
|
||||
while (offset < bundleEnd) {
|
||||
// Read chunk header
|
||||
if (offset + 12 > bundleEnd) throw new Error('Incomplete chunk header');
|
||||
const chunkType = view.getUint8(offset);
|
||||
if (chunkType !== 0x12) throw new Error(`Unsupported chunk type: ${chunkType}`);
|
||||
offset += 1; // Skip udv
|
||||
offset += 1;
|
||||
const count = view.getUint16(offset, true); offset += 2;
|
||||
if (count > 65535) throw new Error('Chunk count exceeds 65535');
|
||||
const iValue = view.getUint16(offset, true); offset += 2;
|
||||
const j0 = view.getUint16(offset, true); offset += 2;
|
||||
const Δj = view.getInt16(offset, true); offset += 2;
|
||||
const ΔelemCount = view.getUint8(offset++); // Δelem_count
|
||||
const elemCount = view.getUint8(offset++); // elem_count
|
||||
|
||||
// Set i value (assuming all chunks share the same i)
|
||||
if (result.i === null) result.i = iValue;
|
||||
else if (result.i !== iValue) throw new Error('Multiple i values not supported');
|
||||
|
||||
// Read preface (element types)
|
||||
const ΔelemTypes = [];
|
||||
for (let i = 0; i < ΔelemCount; i++) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete Δelem types');
|
||||
const typeByte = view.getUint8(offset++);
|
||||
const baseCode = typeByte & 0x0F;
|
||||
const incrCode = typeByte >> 4;
|
||||
if (!codeToType[baseCode] || !codeToType[incrCode]) {
|
||||
throw new Error(`Invalid type code in Δelem: ${typeByte}`);
|
||||
}
|
||||
ΔelemTypes.push({ baseType: codeToType[baseCode], incrType: codeToType[incrCode] });
|
||||
}
|
||||
const elemTypes = [];
|
||||
for (let i = 0; i < elemCount; i++) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete elem types');
|
||||
const typeCode = view.getUint8(offset++);
|
||||
if (!codeToType[typeCode]) throw new Error(`Invalid type code in elem: ${typeCode}`);
|
||||
elemTypes.push(codeToType[typeCode]);
|
||||
}
|
||||
|
||||
// Initialize Δelems and elems arrays if first chunk
|
||||
if (!result.Δelems.length && ΔelemCount > 0) {
|
||||
result.Δelems = Array(ΔelemCount).fill().map(() => []);
|
||||
}
|
||||
if (!result.elems.length && elemCount > 0) {
|
||||
result.elems = Array(elemCount).fill().map(() => []);
|
||||
}
|
||||
|
||||
// Read initial values for Δelems
|
||||
const initialValues = [];
|
||||
for (const { baseType } of ΔelemTypes) {
|
||||
if (offset + typeToBytes[baseType.name] > bundleEnd) {
|
||||
throw new Error('Incomplete initial values');
|
||||
}
|
||||
initialValues.push(readTypedValue(view, offset, baseType));
|
||||
offset += typeToBytes[baseType.name];
|
||||
}
|
||||
// Skip padding
|
||||
while (offset % 4 !== 0) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete padding after initial values');
|
||||
offset++;
|
||||
}
|
||||
|
||||
// Reconstruct j values
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
result.j.push(j0 + idx * Δj);
|
||||
}
|
||||
|
||||
// Read interleaved record data
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
// Read Δelems
|
||||
for (let i = 0; i < ΔelemCount; i++) {
|
||||
const values = result.Δelems[i];
|
||||
const incrType = ΔelemTypes[i].incrType;
|
||||
const isBigInt = typeof initialValues[i] === 'bigint';
|
||||
if (offset + typeToBytes[incrType.name] > bundleEnd) {
|
||||
throw new Error('Incomplete Δelem data');
|
||||
}
|
||||
let delta = readTypedValue(view, offset, incrType);
|
||||
offset += typeToBytes[incrType.name];
|
||||
if (idx === 0) {
|
||||
values.push(isBigInt ? Number(initialValues[i]) : initialValues[i]);
|
||||
} else {
|
||||
if (isBigInt) {
|
||||
delta = BigInt(delta);
|
||||
initialValues[i] += delta;
|
||||
values.push(Number(initialValues[i]));
|
||||
} else {
|
||||
initialValues[i] += delta;
|
||||
values.push(initialValues[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Read elems
|
||||
for (let i = 0; i < elemCount; i++) {
|
||||
const values = result.elems[i];
|
||||
const type = elemTypes[i];
|
||||
const isBigInt = type === BigInt64Array || type === BigUint64Array;
|
||||
if (offset + typeToBytes[type.name] > bundleEnd) {
|
||||
throw new Error('Incomplete elem data');
|
||||
}
|
||||
let value = readTypedValue(view, offset, type);
|
||||
values.push(isBigInt ? Number(value) : value);
|
||||
offset += typeToBytes[type.name];
|
||||
}
|
||||
}
|
||||
// Skip padding
|
||||
while (offset % 4 !== 0) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete padding after record data');
|
||||
offset++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function readTypedValue(view, offset, type) {
|
||||
switch (type) {
|
||||
case Int8Array: return view.getInt8(offset);
|
||||
case Uint8Array: return view.getUint8(offset);
|
||||
case Int16Array: return view.getInt16(offset, true);
|
||||
case Uint16Array: return view.getUint16(offset, true);
|
||||
case Int32Array: return view.getInt32(offset, true);
|
||||
case Uint32Array: return view.getUint32(offset, true);
|
||||
case Float32Array: return view.getFloat32(offset, true);
|
||||
case Float64Array: return view.getFloat64(offset, true);
|
||||
case BigInt64Array: return view.getBigInt64(offset, true);
|
||||
case BigUint64Array: return view.getBigUint64(offset, true);
|
||||
default: throw new Error(`Unsupported type: ${type.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { sequential, interleaved };
|
||||
@@ -1,380 +0,0 @@
|
||||
const typeToCode = {
|
||||
Int8Array: 0,
|
||||
Uint8Array: 1,
|
||||
Int16Array: 2,
|
||||
Uint16Array: 3,
|
||||
Int32Array: 4,
|
||||
Uint32Array: 5,
|
||||
Float32Array: 7, // Float16 not natively supported in JS, use Float32
|
||||
Float64Array: 8,
|
||||
BigInt64Array: 9,
|
||||
BigUint64Array: 10
|
||||
};
|
||||
|
||||
const typeToBytes = {
|
||||
Int8Array: 1,
|
||||
Uint8Array: 1,
|
||||
Int16Array: 2,
|
||||
Uint16Array: 2,
|
||||
Int32Array: 4,
|
||||
Uint32Array: 4,
|
||||
Float32Array: 4,
|
||||
Float64Array: 8,
|
||||
BigInt64Array: 8,
|
||||
BigUint64Array: 8
|
||||
};
|
||||
|
||||
function sequential(json, iGetter, jGetter, Δelems = [], elems = [], udv = 0) {
|
||||
if (!Array.isArray(json) || !json.length) return new Uint8Array(0);
|
||||
if (typeof iGetter !== 'function' || typeof jGetter !== 'function') throw new Error('i and j must be getter functions');
|
||||
Δelems.forEach((elem, idx) => {
|
||||
if (typeof elem.key !== 'function') throw new Error(`Δelems[${idx}].key must be a getter function`);
|
||||
});
|
||||
elems.forEach((elem, idx) => {
|
||||
if (typeof elem.key !== 'function') throw new Error(`elems[${idx}].key must be a getter function`);
|
||||
});
|
||||
|
||||
// Group records by i value
|
||||
const groups = new Map();
|
||||
for (const record of json) {
|
||||
const iValue = iGetter(record);
|
||||
if (iValue == null) throw new Error('Missing i value from getter');
|
||||
if (!groups.has(iValue)) groups.set(iValue, []);
|
||||
groups.get(iValue).push(record);
|
||||
}
|
||||
|
||||
const maxBundleSize = 0xFFFFFF; // Max bundle length (24 bits)
|
||||
const buffers = [];
|
||||
|
||||
// Process each group (i value)
|
||||
for (const [iValue, records] of groups) {
|
||||
// Sort records by j to ensure consistent order
|
||||
records.sort((a, b) => jGetter(a) - jGetter(b));
|
||||
const jValues = records.map(jGetter);
|
||||
if (jValues.some(v => v == null)) throw new Error('Missing j value from getter');
|
||||
|
||||
// Split records into chunks based on Δj continuity
|
||||
const chunks = [];
|
||||
let currentChunk = [records[0]];
|
||||
let currentJ0 = jValues[0];
|
||||
let currentΔj = records.length > 1 ? jValues[1] - jValues[0] : 0;
|
||||
|
||||
for (let idx = 1; idx < records.length; idx++) {
|
||||
const chunkIndex = chunks.reduce((sum, c) => sum + c.records.length, 0);
|
||||
const expectedJ = currentJ0 + (idx - chunkIndex) * currentΔj;
|
||||
if (jValues[idx] !== expectedJ || idx - chunkIndex >= 65536) {
|
||||
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
|
||||
currentChunk = [records[idx]];
|
||||
currentJ0 = jValues[idx];
|
||||
currentΔj = idx + 1 < records.length ? jValues[idx + 1] - jValues[idx] : 0;
|
||||
} else {
|
||||
currentChunk.push(records[idx]);
|
||||
}
|
||||
}
|
||||
if (currentChunk.length > 0) {
|
||||
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
|
||||
}
|
||||
|
||||
// Calculate total size for all chunks in this group by simulating offsets
|
||||
const chunkSizes = chunks.map(({ records: chunkRecords }) => {
|
||||
if (chunkRecords.length > 65535) throw new Error(`Chunk size exceeds 65535 for i=${iValue}`);
|
||||
let simulatedOffset = 0; // Relative to chunk start
|
||||
simulatedOffset += 12; // Header
|
||||
simulatedOffset += Δelems.length + elems.length; // Preface
|
||||
simulatedOffset += Δelems.reduce((sum, e) => sum + typeToBytes[e.baseType.name], 0); // Initial values
|
||||
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after initial
|
||||
simulatedOffset += chunkRecords.length * (
|
||||
Δelems.reduce((sum, e) => sum + typeToBytes[e.incrType.name], 0) +
|
||||
elems.reduce((sum, e) => sum + typeToBytes[e.type.name], 0)
|
||||
); // Record data
|
||||
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after record
|
||||
return simulatedOffset;
|
||||
});
|
||||
const totalChunkSize = chunkSizes.reduce((sum, size) => sum + size, 0);
|
||||
|
||||
// Start a new bundle if needed
|
||||
const lastBundle = buffers[buffers.length - 1];
|
||||
if (!lastBundle || lastBundle.offset + totalChunkSize > maxBundleSize) {
|
||||
buffers.push({ offset: 4, buffer: null, view: null });
|
||||
}
|
||||
|
||||
// Initialize DataView for current bundle
|
||||
const currentBundle = buffers[buffers.length - 1];
|
||||
if (!currentBundle.buffer) {
|
||||
const requiredSize = totalChunkSize + 4;
|
||||
currentBundle.buffer = new ArrayBuffer(requiredSize);
|
||||
currentBundle.view = new DataView(currentBundle.buffer);
|
||||
}
|
||||
|
||||
// Process each chunk
|
||||
for (const { records: chunkRecords, j0, Δj } of chunks) {
|
||||
const chunkSize = chunkSizes.shift();
|
||||
|
||||
// Ensure buffer is large enough
|
||||
if (currentBundle.offset + chunkSize > currentBundle.buffer.byteLength) {
|
||||
const newSize = currentBundle.offset + chunkSize;
|
||||
const newBuffer = new ArrayBuffer(newSize);
|
||||
new Uint8Array(newBuffer).set(new Uint8Array(currentBundle.buffer));
|
||||
currentBundle.buffer = newBuffer;
|
||||
currentBundle.view = new DataView(newBuffer);
|
||||
}
|
||||
|
||||
// Write chunk header
|
||||
let offset = currentBundle.offset;
|
||||
currentBundle.view.setUint8(offset++, 0x11); // Chunk type
|
||||
currentBundle.view.setUint8(offset++, udv); // udv
|
||||
currentBundle.view.setUint16(offset, chunkRecords.length, true); offset += 2; // count
|
||||
currentBundle.view.setUint16(offset, iValue, true); offset += 2; // i
|
||||
currentBundle.view.setUint16(offset, j0, true); offset += 2; // j0
|
||||
currentBundle.view.setInt16(offset, Δj, true); offset += 2; // Δj
|
||||
currentBundle.view.setUint8(offset++, Δelems.length); // Δelem_count
|
||||
currentBundle.view.setUint8(offset++, elems.length); // elem_count
|
||||
|
||||
// Write chunk preface (element types)
|
||||
for (const elem of Δelems) {
|
||||
const baseCode = typeToCode[elem.baseType.name];
|
||||
const incrCode = typeToCode[elem.incrType.name];
|
||||
currentBundle.view.setUint8(offset++, (incrCode << 4) | baseCode);
|
||||
}
|
||||
for (const elem of elems) {
|
||||
currentBundle.view.setUint8(offset++, typeToCode[elem.type.name]);
|
||||
}
|
||||
|
||||
// Write initial values for Δelems
|
||||
for (const elem of Δelems) {
|
||||
const value = elem.key(chunkRecords[0]);
|
||||
if (value == null) throw new Error('Missing Δelem value from getter');
|
||||
writeTypedValue(currentBundle.view, offset, value, elem.baseType);
|
||||
offset += typeToBytes[elem.baseType.name];
|
||||
}
|
||||
// Pad to 4-byte boundary
|
||||
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
|
||||
|
||||
// Write record data (non-interleaved)
|
||||
for (const elem of Δelems) {
|
||||
let prev = elem.key(chunkRecords[0]);
|
||||
for (let idx = 0; idx < chunkRecords.length; idx++) {
|
||||
const value = idx === 0 ? 0 : elem.key(chunkRecords[idx]) - prev;
|
||||
writeTypedValue(currentBundle.view, offset, value, elem.incrType);
|
||||
offset += typeToBytes[elem.incrType.name];
|
||||
prev = elem.key(chunkRecords[idx]);
|
||||
}
|
||||
}
|
||||
for (const elem of elems) {
|
||||
for (const record of chunkRecords) {
|
||||
const value = elem.key(record);
|
||||
if (value == null) throw new Error('Missing elem value from getter');
|
||||
writeTypedValue(currentBundle.view, offset, value, elem.type);
|
||||
offset += typeToBytes[elem.type.name];
|
||||
}
|
||||
}
|
||||
// Pad to 4-byte boundary
|
||||
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
|
||||
|
||||
// Update bundle offset
|
||||
currentBundle.offset = offset;
|
||||
}
|
||||
|
||||
// Update bundle header
|
||||
currentBundle.view.setUint32(0, 0x1C | ((currentBundle.offset - 4) << 8), true);
|
||||
}
|
||||
|
||||
// Combine buffers into final Uint8Array
|
||||
const finalLength = buffers.reduce((sum, b) => sum + b.offset, 0);
|
||||
const result = new Uint8Array(finalLength);
|
||||
let offset = 0;
|
||||
for (const { buffer, offset: bundleOffset } of buffers) {
|
||||
result.set(new Uint8Array(buffer, 0, bundleOffset), offset);
|
||||
offset += bundleOffset;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
function interleaved(json, iGetter, jGetter, Δelems = [], elems = [], udv = 0) {
|
||||
if (!Array.isArray(json) || !json.length) return new Uint8Array(0);
|
||||
if (typeof iGetter !== 'function' || typeof jGetter !== 'function') throw new Error('i and j must be getter functions');
|
||||
Δelems.forEach((elem, idx) => {
|
||||
if (typeof elem.key !== 'function') throw new Error(`Δelems[${idx}].key must be a getter function`);
|
||||
});
|
||||
elems.forEach((elem, idx) => {
|
||||
if (typeof elem.key !== 'function') throw new Error(`elems[${idx}].key must be a getter function`);
|
||||
});
|
||||
|
||||
// Group records by i value
|
||||
const groups = new Map();
|
||||
for (const record of json) {
|
||||
const iValue = iGetter(record);
|
||||
if (iValue == null) throw new Error('Missing i value from getter');
|
||||
if (!groups.has(iValue)) groups.set(iValue, []);
|
||||
groups.get(iValue).push(record);
|
||||
}
|
||||
|
||||
const maxBundleSize = 0xFFFFFF; // Max bundle length (24 bits)
|
||||
const buffers = [];
|
||||
|
||||
// Process each group (i value)
|
||||
for (const [iValue, records] of groups) {
|
||||
// Sort records by j to ensure consistent order
|
||||
records.sort((a, b) => jGetter(a) - jGetter(b));
|
||||
const jValues = records.map(jGetter);
|
||||
if (jValues.some(v => v == null)) throw new Error('Missing j value from getter');
|
||||
|
||||
// Split records into chunks based on Δj continuity
|
||||
const chunks = [];
|
||||
let currentChunk = [records[0]];
|
||||
let currentJ0 = jValues[0];
|
||||
let currentΔj = records.length > 1 ? jValues[1] - jValues[0] : 0;
|
||||
|
||||
for (let idx = 1; idx < records.length; idx++) {
|
||||
const chunkIndex = chunks.reduce((sum, c) => sum + c.records.length, 0);
|
||||
const expectedJ = currentJ0 + (idx - chunkIndex) * currentΔj;
|
||||
if (jValues[idx] !== expectedJ || idx - chunkIndex >= 65536) {
|
||||
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
|
||||
currentChunk = [records[idx]];
|
||||
currentJ0 = jValues[idx];
|
||||
currentΔj = idx + 1 < records.length ? jValues[idx + 1] - jValues[idx] : 0;
|
||||
} else {
|
||||
currentChunk.push(records[idx]);
|
||||
}
|
||||
}
|
||||
if (currentChunk.length > 0) {
|
||||
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
|
||||
}
|
||||
|
||||
// Calculate total size for all chunks in this group by simulating offsets
|
||||
const chunkSizes = chunks.map(({ records: chunkRecords }) => {
|
||||
if (chunkRecords.length > 65535) throw new Error(`Chunk size exceeds 65535 for i=${iValue}`);
|
||||
let simulatedOffset = 0; // Relative to chunk start
|
||||
simulatedOffset += 12; // Header
|
||||
simulatedOffset += Δelems.length + elems.length; // Preface
|
||||
simulatedOffset += Δelems.reduce((sum, e) => sum + typeToBytes[e.baseType.name], 0); // Initial values
|
||||
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after initial
|
||||
simulatedOffset += chunkRecords.length * (
|
||||
Δelems.reduce((sum, e) => sum + typeToBytes[e.incrType.name], 0) +
|
||||
elems.reduce((sum, e) => sum + typeToBytes[e.type.name], 0)
|
||||
); // Interleaved record data
|
||||
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after record
|
||||
return simulatedOffset;
|
||||
});
|
||||
const totalChunkSize = chunkSizes.reduce((sum, size) => sum + size, 0);
|
||||
|
||||
// Start a new bundle if needed
|
||||
const lastBundle = buffers[buffers.length - 1];
|
||||
if (!lastBundle || lastBundle.offset + totalChunkSize > maxBundleSize) {
|
||||
buffers.push({ offset: 4, buffer: null, view: null });
|
||||
}
|
||||
|
||||
// Initialize DataView for current bundle
|
||||
const currentBundle = buffers[buffers.length - 1];
|
||||
if (!currentBundle.buffer) {
|
||||
const requiredSize = totalChunkSize + 4;
|
||||
currentBundle.buffer = new ArrayBuffer(requiredSize);
|
||||
currentBundle.view = new DataView(currentBundle.buffer);
|
||||
}
|
||||
|
||||
// Process each chunk
|
||||
for (const { records: chunkRecords, j0, Δj } of chunks) {
|
||||
const chunkSize = chunkSizes.shift();
|
||||
|
||||
// Ensure buffer is large enough
|
||||
if (currentBundle.offset + chunkSize > currentBundle.buffer.byteLength) {
|
||||
const newSize = currentBundle.offset + chunkSize;
|
||||
const newBuffer = new ArrayBuffer(newSize);
|
||||
new Uint8Array(newBuffer).set(new Uint8Array(currentBundle.buffer));
|
||||
currentBundle.buffer = newBuffer;
|
||||
currentBundle.view = new DataView(newBuffer);
|
||||
}
|
||||
|
||||
// Write chunk header
|
||||
let offset = currentBundle.offset;
|
||||
currentBundle.view.setUint8(offset++, 0x12); // Chunk type
|
||||
currentBundle.view.setUint8(offset++, udv); // udv
|
||||
currentBundle.view.setUint16(offset, chunkRecords.length, true); offset += 2; // count
|
||||
currentBundle.view.setUint16(offset, iValue, true); offset += 2; // i
|
||||
currentBundle.view.setUint16(offset, j0, true); offset += 2; // j0
|
||||
currentBundle.view.setInt16(offset, Δj, true); offset += 2; // Δj
|
||||
currentBundle.view.setUint8(offset++, Δelems.length); // Δelem_count
|
||||
currentBundle.view.setUint8(offset++, elems.length); // elem_count
|
||||
|
||||
// Write chunk preface (element types)
|
||||
for (const elem of Δelems) {
|
||||
const baseCode = typeToCode[elem.baseType.name];
|
||||
const incrCode = typeToCode[elem.incrType.name];
|
||||
currentBundle.view.setUint8(offset++, (incrCode << 4) | baseCode);
|
||||
}
|
||||
for (const elem of elems) {
|
||||
currentBundle.view.setUint8(offset++, typeToCode[elem.type.name]);
|
||||
}
|
||||
|
||||
// Write initial values for Δelems
|
||||
for (const elem of Δelems) {
|
||||
const value = elem.key(chunkRecords[0]);
|
||||
if (value == null) throw new Error('Missing Δelem value from getter');
|
||||
writeTypedValue(currentBundle.view, offset, value, elem.baseType);
|
||||
offset += typeToBytes[elem.baseType.name];
|
||||
}
|
||||
// Pad to 4-byte boundary
|
||||
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
|
||||
|
||||
// Write interleaved record data
|
||||
const prevValues = Δelems.map(elem => elem.key(chunkRecords[0]));
|
||||
for (let idx = 0; idx < chunkRecords.length; idx++) {
|
||||
// Write Δelems increments
|
||||
for (let i = 0; i < Δelems.length; i++) {
|
||||
const elem = Δelems[i];
|
||||
const value = idx === 0 ? 0 : elem.key(chunkRecords[idx]) - prevValues[i];
|
||||
writeTypedValue(currentBundle.view, offset, value, elem.incrType);
|
||||
offset += typeToBytes[elem.incrType.name];
|
||||
prevValues[i] = elem.key(chunkRecords[idx]);
|
||||
}
|
||||
// Write elems
|
||||
for (const elem of elems) {
|
||||
const value = elem.key(chunkRecords[idx]);
|
||||
if (value == null) throw new Error('Missing elem value from getter');
|
||||
writeTypedValue(currentBundle.view, offset, value, elem.type);
|
||||
offset += typeToBytes[elem.type.name];
|
||||
}
|
||||
}
|
||||
// Pad to 4-byte boundary
|
||||
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
|
||||
|
||||
// Update bundle offset
|
||||
currentBundle.offset = offset;
|
||||
}
|
||||
|
||||
// Update bundle header
|
||||
currentBundle.view.setUint32(0, 0x1C | ((currentBundle.offset - 4) << 8), true);
|
||||
}
|
||||
|
||||
// Combine buffers into final Uint8Array
|
||||
const finalLength = buffers.reduce((sum, b) => sum + b.offset, 0);
|
||||
const result = new Uint8Array(finalLength);
|
||||
let offset = 0;
|
||||
for (const { buffer, offset: bundleOffset } of buffers) {
|
||||
result.set(new Uint8Array(buffer, 0, bundleOffset), offset);
|
||||
offset += bundleOffset;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
function writeTypedValue(view, offset, value, type) {
|
||||
switch (type) {
|
||||
case Int8Array: view.setInt8(offset, value); break;
|
||||
case Uint8Array: view.setUint8(offset, value); break;
|
||||
case Int16Array: view.setInt16(offset, value, true); break;
|
||||
case Uint16Array: view.setUint16(offset, value, true); break;
|
||||
case Int32Array: view.setInt32(offset, value, true); break;
|
||||
case Uint32Array: view.setUint32(offset, value, true); break;
|
||||
case Float32Array: view.setFloat32(offset, value, true); break;
|
||||
case Float64Array: view.setFloat64(offset, value, true); break;
|
||||
case BigInt64Array: view.setBigInt64(offset, BigInt(value), true); break;
|
||||
case BigUint64Array: view.setBigUint64(offset, BigInt(value), true); break;
|
||||
default: throw new Error(`Unsupported type: ${type.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { sequential, interleaved };
|
||||
@@ -1,139 +0,0 @@
|
||||
|
||||
/** Binary encoder
|
||||
*
|
||||
* This module encodes scalar data from a grid-like source
|
||||
* into a packed binary format for bandwidth efficiency and
|
||||
* speed of access.
|
||||
*
|
||||
* Data are indexed by i & j values, with "i" being constant
|
||||
* (e.g., a sequence or line number) and "j" expected to change
|
||||
* by a constant, linear amount (e.g., point numbers). All data
|
||||
* from consecutive "j" values will be encoded as a single array
|
||||
* (or series of arrays if multiple values are encoded).
|
||||
* If there is a jump in the "j" progression, a new "chunk" will
|
||||
* be started with a new array (or series of arrays).
|
||||
*
|
||||
* Multiple values may be encoded per (i, j) pair, using any of
|
||||
* the types supported by JavaScript's TypedArray except for
|
||||
* Float16 and Uint8Clamped. Each variable can be encoded with
|
||||
* a different size.
|
||||
*
|
||||
* Values may be encoded directly or as deltas from an initial
|
||||
* value. The latter is particularly efficient when dealing with
|
||||
* monotonically incrementing data, such as timestamps.
|
||||
*
|
||||
* The conceptual packet format for sequentially encoded data
|
||||
* looks like this:
|
||||
*
|
||||
* <msg-type> <count: x> <i> <j0> <Δj>
|
||||
*
|
||||
* <Δelement_count: y>
|
||||
* <element_count: z>
|
||||
*
|
||||
* <Δelement_1_type_base> … <Δelement_y_type_base>
|
||||
* <Δelement_1_type_incr> … <Δelement_y_type_incr>
|
||||
* <elem_1_type> … <elem_z_type>
|
||||
*
|
||||
* <Δelement_1_first> … <Δelement_z_first>
|
||||
*
|
||||
* <Δelem_1_0> … <Δelem_1_x>
|
||||
* …
|
||||
* <Δelem_y_0> … <Δelem_y_x>
|
||||
* <elem_1_0> … <elem_1_x>
|
||||
* …
|
||||
* <elem_z_0> … <elem_z_x>
|
||||
*
|
||||
*
|
||||
* The conceptual packet format for interleaved encoded data
|
||||
* looks like this:
|
||||
*
|
||||
*
|
||||
* <msg-type> <count: x> <i> <j0> <Δj>
|
||||
*
|
||||
* <Δelement_count: y>
|
||||
* <element_count: z>
|
||||
*
|
||||
* <Δelement_1_type_base> … <Δelement_y_type_base>
|
||||
* <Δelement_1_type_incr> … <Δelement_y_type_incr>
|
||||
* <elem_1_type> … <elem_z_type>
|
||||
*
|
||||
* <Δelement_1_first> … <Δelement_y_first>
|
||||
*
|
||||
* <Δelem_1_0> <Δelem_2_0> … <Δelem_y_0> <elem_1_0> <elem_2_0> … <elem_z_0>
|
||||
* <Δelem_1_1> <Δelem_2_1> … <Δelem_y_1> <elem_1_1> <elem_2_1> … <elem_z_1>
|
||||
* …
|
||||
* <Δelem_1_x> <Δelem_2_x> … <Δelem_y_x> <elem_1_x> <elem_2_x> … <elem_z_x>
|
||||
*
|
||||
*
|
||||
* Usage example:
|
||||
*
|
||||
* json = [
|
||||
* {
|
||||
* sequence: 7,
|
||||
* sailline: 5354,
|
||||
* line: 5356,
|
||||
* point: 1068,
|
||||
* tstamp: 1695448704372,
|
||||
* objrefraw: 3,
|
||||
* objreffinal: 4
|
||||
* },
|
||||
* {
|
||||
* sequence: 7,
|
||||
* sailline: 5354,
|
||||
* line: 5352,
|
||||
* point: 1070,
|
||||
* tstamp: 1695448693612,
|
||||
* objrefraw: 2,
|
||||
* objreffinal: 3
|
||||
* },
|
||||
* {
|
||||
* sequence: 7,
|
||||
* sailline: 5354,
|
||||
* line: 5356,
|
||||
* point: 1072,
|
||||
* tstamp: 1695448684624,
|
||||
* objrefraw: 3,
|
||||
* objreffinal: 4
|
||||
* }
|
||||
* ];
|
||||
*
|
||||
* deltas = [
|
||||
* { key: el => el.tstamp, baseType: BigUint64Array, incrType: Int16Array }
|
||||
* ];
|
||||
*
|
||||
* elems = [
|
||||
* { key: el => el.objrefraw, type: Uint8Array },
|
||||
* { key: el => el.objreffinal, type: Uint8Array }
|
||||
* ];
|
||||
*
|
||||
* i = el => el.sequence;
|
||||
*
|
||||
* j = el => el.point;
|
||||
*
|
||||
* bundle = encode(json, i, j, deltas, elems);
|
||||
*
|
||||
* // bundle:
|
||||
*
|
||||
* Uint8Array(40) [
|
||||
* 36, 0, 0, 28, 17, 0, 3, 0, 7, 0,
|
||||
* 44, 4, 2, 0, 1, 2, 42, 1, 1, 116,
|
||||
* 37, 158, 192, 138, 1, 0, 0, 0, 0, 0,
|
||||
* 248, 213, 228, 220, 3, 2, 3, 4, 3, 4
|
||||
* ]
|
||||
*
|
||||
* decode(bundle);
|
||||
*
|
||||
* {
|
||||
* i: 7,
|
||||
* j: [ 1068, 1070, 1072 ],
|
||||
* 'Δelems': [ [ 1695448704372, 1695448693612, 1695448684624 ] ],
|
||||
* elems: [ [ 3, 2, 3 ], [ 4, 3, 4 ] ]
|
||||
* }
|
||||
*
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
encode: {...require('./encode')},
|
||||
decode: {...require('./decode')},
|
||||
...require('./classes')
|
||||
};
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"name": "@dougal/binary",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": ""
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
class ConcurrencyLimiter {
|
||||
|
||||
constructor(maxConcurrent) {
|
||||
this.maxConcurrent = maxConcurrent;
|
||||
this.active = 0;
|
||||
this.queue = [];
|
||||
}
|
||||
|
||||
async enqueue(task) {
|
||||
if (this.active >= this.maxConcurrent) {
|
||||
await new Promise(resolve => this.queue.push(resolve));
|
||||
}
|
||||
this.active++;
|
||||
try {
|
||||
return await task();
|
||||
} finally {
|
||||
this.active--;
|
||||
if (this.queue.length > 0) {
|
||||
this.queue.shift()();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ConcurrencyLimiter;
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"name": "@dougal/concurrency",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": ""
|
||||
}
|
||||
@@ -1,75 +0,0 @@
|
||||
|
||||
class Organisation {
|
||||
|
||||
constructor (data) {
|
||||
|
||||
this.read = !!data?.read;
|
||||
this.write = !!data?.write;
|
||||
this.edit = !!data?.edit;
|
||||
|
||||
this.other = {};
|
||||
|
||||
return new Proxy(this, {
|
||||
get (target, prop) {
|
||||
if (prop in target) {
|
||||
return target[prop]
|
||||
} else {
|
||||
return target.other[prop];
|
||||
}
|
||||
},
|
||||
|
||||
set (target, prop, value) {
|
||||
const oldValue = target[prop] !== undefined ? target[prop] : target.other[prop];
|
||||
const newValue = Boolean(value);
|
||||
|
||||
if (["read", "write", "edit"].includes(prop)) {
|
||||
target[prop] = newValue;
|
||||
} else {
|
||||
target.other[prop] = newValue;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
toJSON () {
|
||||
return {
|
||||
read: this.read,
|
||||
write: this.write,
|
||||
edit: this.edit,
|
||||
...this.other
|
||||
}
|
||||
}
|
||||
|
||||
toString (replacer, space) {
|
||||
return JSON.stringify(this.toJSON(), replacer, space);
|
||||
}
|
||||
|
||||
/** Limit the operations to only those allowed by `other`
|
||||
*/
|
||||
filter (other) {
|
||||
const filteredOrganisation = new Organisation();
|
||||
|
||||
filteredOrganisation.read = this.read && other.read;
|
||||
filteredOrganisation.write = this.write && other.write;
|
||||
filteredOrganisation.edit = this.edit && other.edit;
|
||||
|
||||
return filteredOrganisation;
|
||||
}
|
||||
|
||||
intersect (other) {
|
||||
return this.filter(other);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
if (typeof module !== 'undefined' && module.exports) {
|
||||
module.exports = Organisation; // CJS export
|
||||
}
|
||||
|
||||
// ESM export
|
||||
if (typeof exports !== 'undefined' && !exports.default) {
|
||||
exports.default = Organisation; // ESM export
|
||||
}
|
||||
@@ -1,225 +0,0 @@
|
||||
const Organisation = require('./Organisation');
|
||||
|
||||
class Organisations {
|
||||
|
||||
#values = {}
|
||||
|
||||
#overlord
|
||||
|
||||
static entries (orgs) {
|
||||
return orgs.names().map(name => [name, orgs.get(name)]);
|
||||
}
|
||||
|
||||
constructor (data, overlord) {
|
||||
if (data instanceof Organisations) {
|
||||
for (const [name, value] of Organisations.entries(data)) {
|
||||
this.set(name, new Organisation(value));
|
||||
}
|
||||
} else if (data instanceof Object) {
|
||||
for (const [name, value] of Object.entries(data)) {
|
||||
this.set(name, new Organisation(value));
|
||||
}
|
||||
} else if (data instanceof String) {
|
||||
this.set(data, new Organisation());
|
||||
} else if (typeof data !== "undefined") {
|
||||
throw new Error("Invalid constructor argument");
|
||||
}
|
||||
|
||||
if (overlord) {
|
||||
this.#overlord = overlord;
|
||||
}
|
||||
}
|
||||
|
||||
get values () {
|
||||
return this.#values;
|
||||
}
|
||||
|
||||
get length () {
|
||||
return this.names().length;
|
||||
}
|
||||
|
||||
get overlord () {
|
||||
return this.#overlord;
|
||||
}
|
||||
|
||||
set overlord (v) {
|
||||
this.#overlord = new Organisations(v);
|
||||
}
|
||||
|
||||
/** Get the operations for `name`
|
||||
*/
|
||||
get (name) {
|
||||
const key = Object.keys(this.values).find( k => k.toLowerCase() == name.toLowerCase() ) ?? name;
|
||||
return this.values[key];
|
||||
}
|
||||
|
||||
/** Set the operations for `name` to `value`
|
||||
*
|
||||
* If we have an overlord, ensure we cannot:
|
||||
*
|
||||
* 1. Add new organisations which the overlord
|
||||
* is not a member of
|
||||
* 2. Access operations that the overlord is not
|
||||
* allowed to access
|
||||
*/
|
||||
set (name, value) {
|
||||
name = String(name).trim();
|
||||
const key = Object.keys(this.values).find( k => k.toLowerCase() == name.toLowerCase() ) ?? name;
|
||||
const org = new Organisation(value);
|
||||
|
||||
if (this.overlord) {
|
||||
const parent = this.overlord.get(key) ?? this.overlord.get("*");
|
||||
if (parent) {
|
||||
this.values[key] = parent.filter(org);
|
||||
}
|
||||
} else {
|
||||
this.values[key] = new Organisation(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Enable the operation `op` in all organisations
|
||||
*/
|
||||
enableOperation (op) {
|
||||
if (this.overlord) {
|
||||
Object.keys(this.#values)
|
||||
.filter( key => (this.overlord.get(key) ?? this.overlord.get("*"))?.[op] )
|
||||
.forEach( key => this.#values[key][op] = true );
|
||||
} else {
|
||||
Object.values(this.#values).forEach( org => org[op] = true );
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Disable the operation `op` in all organisations
|
||||
*/
|
||||
disableOperation (op) {
|
||||
Object.values(this.#values).forEach( org => org[op] = false );
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Create a new organisation object limited by the caller's rights
|
||||
*
|
||||
* The spawned Organisations instance will have the same organisations
|
||||
* and rights as the caller minus the applied `mask`. With the default
|
||||
* mask, the spawned object will inherit all rights except for `edit`
|
||||
* rights.
|
||||
*
|
||||
* The "*" organisation must be explicitly assigned. It is not inherited.
|
||||
*/
|
||||
spawn (mask = {read: true, write: true, edit: false}) {
|
||||
|
||||
const parent = new Organisations();
|
||||
const wildcard = this.get("*").edit; // If true, we can spawn everywhere
|
||||
|
||||
this.entries().forEach( ([k, v]) => {
|
||||
// if (k != "*") { // This organisation is not inherited
|
||||
if (v.edit || wildcard) { // We have the right to spawn in this organisation
|
||||
const o = new Organisation({
|
||||
read: v.read && mask.read,
|
||||
write: v.write && mask.write,
|
||||
edit: v.edit && mask.edit
|
||||
});
|
||||
parent.set(k, o);
|
||||
}
|
||||
// }
|
||||
});
|
||||
|
||||
return new Organisations({}, parent);
|
||||
}
|
||||
|
||||
remove (name) {
|
||||
const key = Object.keys(this.values).find( k => k.toLowerCase() == name.toLowerCase() ) ?? name;
|
||||
delete this.values[key];
|
||||
}
|
||||
|
||||
/** Return the list of organisation names
|
||||
*/
|
||||
names () {
|
||||
return Object.keys(this.values);
|
||||
}
|
||||
|
||||
/** Same as this.get(name)
|
||||
*/
|
||||
value (name) {
|
||||
return this.values[name];
|
||||
}
|
||||
|
||||
/** Same as Object.prototype.entries
|
||||
*/
|
||||
entries () {
|
||||
return this.names().map( name => [ name, this.value(name) ] );
|
||||
}
|
||||
|
||||
/** Return true if the named organisation is present
|
||||
*/
|
||||
has (name) {
|
||||
return Boolean(this.value(name));
|
||||
}
|
||||
|
||||
/** Return only those of our organisations
|
||||
* and operations present in `other`
|
||||
*/
|
||||
filter (other) {
|
||||
const filteredOrganisations = new Organisations();
|
||||
|
||||
const wildcard = other.value("*");
|
||||
|
||||
for (const [name, org] of this.entries()) {
|
||||
const ownOrg = other.value(name) ?? wildcard;
|
||||
if (ownOrg) {
|
||||
filteredOrganisations.set(name, org.filter(ownOrg))
|
||||
}
|
||||
}
|
||||
|
||||
return filteredOrganisations;
|
||||
}
|
||||
|
||||
/** Return only those organisations
|
||||
* that have access to the required
|
||||
* operation
|
||||
*/
|
||||
accessToOperation (op) {
|
||||
const filteredOrganisations = new Organisations();
|
||||
|
||||
for (const [name, org] of this.entries()) {
|
||||
if (org[op]) {
|
||||
filteredOrganisations.set(name, org);
|
||||
}
|
||||
}
|
||||
|
||||
return filteredOrganisations;
|
||||
}
|
||||
|
||||
toJSON () {
|
||||
const obj = {};
|
||||
for (const key in this.values) {
|
||||
obj[key] = this.values[key].toJSON();
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
toString (replacer, space) {
|
||||
return JSON.stringify(this.toJSON(), replacer, space);
|
||||
}
|
||||
|
||||
*[Symbol.iterator] () {
|
||||
for (const [name, operations] of this.entries()) {
|
||||
yield {name, operations};
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
if (typeof module !== 'undefined' && module.exports) {
|
||||
module.exports = Organisations; // CJS export
|
||||
}
|
||||
|
||||
// ESM export
|
||||
if (typeof exports !== 'undefined' && !exports.default) {
|
||||
exports.default = Organisations; // ESM export
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
|
||||
module.exports = {
|
||||
Organisation: require('./Organisation'),
|
||||
Organisations: require('./Organisations')
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"name": "@dougal/organisations",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": ""
|
||||
}
|
||||
@@ -1,364 +0,0 @@
|
||||
const EventEmitter = require('events');
|
||||
const { Organisations } = require('@dougal/organisations');
|
||||
|
||||
function randomUUID () {
|
||||
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
|
||||
const r = Math.random() * 16 | 0;
|
||||
const v = c === 'x' ? r : (r & 0x3 | 0x8);
|
||||
return v.toString(16);
|
||||
});
|
||||
}
|
||||
|
||||
class User extends EventEmitter {
|
||||
|
||||
// Valid field names
|
||||
static fields = [ "ip", "host", "name", "email", "description", "colour", "active", "organisations", "meta" ]
|
||||
|
||||
static validUUID (str) {
|
||||
const uuidv4Rx = /^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
|
||||
return uuidv4Rx.test(str);
|
||||
}
|
||||
|
||||
static validIPv4 (str) {
|
||||
const ipv4Rx = /^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\/([0-9]|[1-2][0-9]|3[0-2]))?$/;
|
||||
return ipv4Rx.test(str);
|
||||
}
|
||||
|
||||
static validIPv6 (str) {
|
||||
const ipv6Rx = /^(?:[0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|(?:[0-9a-fA-F]{1,4}:){1,7}:|(?:[0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|(?:[0-9a-fA-F]{1,4}:){1,5}(?::[0-9a-fA-F]{1,4}){1,2}|(?:[0-9a-fA-F]{1,4}:){1,4}(?::[0-9a-fA-F]{1,4}){1,3}|(?:[0-9a-fA-F]{1,4}:){1,3}(?::[0-9a-fA-F]{1,4}){1,4}|(?:[0-9a-fA-F]{1,4}:){1,2}(?::[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:(?::[0-9a-fA-F]{1,4}){1,6}|:((?::[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(?::[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(?:ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|(2[0-4][0-9]|[01]?[0-9][0-9]?))|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|(2[0-4][0-9]|[01]?[0-9][0-9]?))))$/;
|
||||
return ipv6Rx.test(str);
|
||||
}
|
||||
|
||||
static validHostname (str) {
|
||||
const hostnameRx = /^(?=.{1,253}$)(?:(?!-)[A-Za-z0-9-]{1,63}(?<!-)\.)+[A-Za-z]{2,}$/;
|
||||
return hostnameRx.test(str);
|
||||
}
|
||||
|
||||
#setString (k, v) {
|
||||
if (typeof v === "undefined") {
|
||||
this.values[k] = v;
|
||||
} else {
|
||||
this.values[k] = String(v).trim();
|
||||
}
|
||||
this.emit("changed", k, v);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
#updateTimestamp (v) {
|
||||
if (typeof v === "undefined") {
|
||||
this.#timestamp = (new Date()).valueOf();
|
||||
} else {
|
||||
this.#timestamp = (new Date(v)).valueOf();
|
||||
}
|
||||
this.emit("last_modified", this.#timestamp);
|
||||
}
|
||||
|
||||
// Create a new instance of `other`, where `other` is
|
||||
// an instance of User or of a derived class
|
||||
#clone (other = this) {
|
||||
const clone = new this.constructor();
|
||||
Object.assign(clone.values, other.values);
|
||||
clone.organisations = new Organisations(other.organisations);
|
||||
return clone;
|
||||
}
|
||||
|
||||
values = {}
|
||||
|
||||
#timestamp
|
||||
|
||||
constructor (data) {
|
||||
super();
|
||||
|
||||
User.fields.forEach( f => this[f] = data?.[f] );
|
||||
this.values.id = data?.id ?? randomUUID();
|
||||
this.values.active = !!this.active;
|
||||
this.values.hash = data?.hash;
|
||||
this.values.password = data?.password;
|
||||
this.values.organisations = new Organisations(data?.organisations);
|
||||
this.#updateTimestamp(data?.last_modified);
|
||||
}
|
||||
|
||||
/*
|
||||
* Getters
|
||||
*/
|
||||
|
||||
get id () { return this.values.id }
|
||||
|
||||
get ip () { return this.values.ip }
|
||||
|
||||
get host () { return this.values.host }
|
||||
|
||||
get name () { return this.values.name }
|
||||
|
||||
get email () { return this.values.email }
|
||||
|
||||
get description () { return this.values.description }
|
||||
|
||||
get colour () { return this.values.colour }
|
||||
|
||||
get active () { return this.values.active }
|
||||
|
||||
get organisations () { return this.values.organisations }
|
||||
|
||||
get password () { return this.values.password }
|
||||
|
||||
get timestamp () { return new Date(this.#timestamp) }
|
||||
|
||||
/*
|
||||
* Setters
|
||||
*/
|
||||
|
||||
set id (v) {
|
||||
if (typeof v === "undefined") {
|
||||
this.values.id = randomUUID();
|
||||
} else if (User.validUUID(v)) {
|
||||
this.values.id = v;
|
||||
} else {
|
||||
throw new Error("Invalid ID format (must be UUIDv4)");
|
||||
}
|
||||
this.emit("changed", "id", this.values.id);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
set ip (v) {
|
||||
if (User.validIPv4(v) || User.validIPv6(v) || typeof v === "undefined") {
|
||||
this.values.ip = v;
|
||||
} else {
|
||||
throw new Error("Invalid IP address or subnet");
|
||||
}
|
||||
this.emit("changed", "ip", this.values.ip);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
set host (v) {
|
||||
if (User.validHostname(v) || typeof v === "undefined") {
|
||||
this.values.host = v;
|
||||
} else {
|
||||
throw new Error("Invalid hostname");
|
||||
}
|
||||
this.emit("changed", "host", this.values.host);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
set name (v) {
|
||||
this.#setString("name", v);
|
||||
}
|
||||
|
||||
set email (v) {
|
||||
// TODO should validate, buy hey!
|
||||
this.#setString("email", v);
|
||||
}
|
||||
|
||||
set description (v) {
|
||||
this.#setString("description", v);
|
||||
}
|
||||
|
||||
set colour (v) {
|
||||
this.#setString("colour", v);
|
||||
}
|
||||
|
||||
set active (v) {
|
||||
this.values.active = !!v;
|
||||
this.emit("changed", "active", this.values.active);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
set organisations (v) {
|
||||
this.values.organisations = new Organisations(v);
|
||||
this.emit("changed", "organisations", this.values.organisations);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
set password (v) {
|
||||
this.values.password = v;
|
||||
this.emit("changed", "password", this.values.password);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Validation methods
|
||||
*/
|
||||
|
||||
get errors () {
|
||||
let err = [];
|
||||
|
||||
if (!this.id) err.push("ERR_NO_ID");
|
||||
if (!this.name) err.push("ERR_NO_NAME");
|
||||
if (!this.organisations.length) err.push("ERR_NO_ORG");
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
get isValid () {
|
||||
return this.errors.length == 0;
|
||||
}
|
||||
|
||||
/*
|
||||
* Filtering methods
|
||||
*/
|
||||
|
||||
filter (other) {
|
||||
// const filteredUser = new User(this);
|
||||
const filteredUser = this.#clone();
|
||||
filteredUser.organisations = this.organisations.filter(other.organisations);
|
||||
return filteredUser;
|
||||
}
|
||||
|
||||
/** Return users that are visible to me.
|
||||
*
|
||||
* These are users with which at leas one common organisation
|
||||
* with read, write or delete access to.
|
||||
*
|
||||
* If we are wildcarded ("*"), we see everyone.
|
||||
*
|
||||
* If a peer is wildcarded, they can be seen by everone.
|
||||
*/
|
||||
peers (list) {
|
||||
if (this.organisations.value("*")) {
|
||||
return list;
|
||||
} else {
|
||||
return list.filter( user => this.canRead(user) );
|
||||
// return list.filter( user =>
|
||||
// user.organisations.value("*") ||
|
||||
// user.organisations.filter(this.organisations).length > 0
|
||||
// this.organisations.filter(user.organisations).length > 0
|
||||
// );
|
||||
}
|
||||
}
|
||||
|
||||
/** Return users that I can edit
|
||||
*
|
||||
* These users must belong to an organisation
|
||||
* over which I have edit rights.
|
||||
*
|
||||
* If we are edit wildcarded, we can edit everyone.
|
||||
*/
|
||||
editablePeers (list) {
|
||||
const editableOrgs = this.organisations.accessToOperation("edit");
|
||||
if (editableOrgs.value("*")) {
|
||||
return list;
|
||||
} else {
|
||||
return list.filter( user => this.canEdit(user) );
|
||||
// editableOrgs.filter(user.organisations).length > 0
|
||||
// );
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* General methods
|
||||
*/
|
||||
|
||||
/** Return `true` if we are `other`
|
||||
*/
|
||||
is (other) {
|
||||
return this.id == other.id;
|
||||
}
|
||||
|
||||
canDo (operation, other) {
|
||||
if (this.organisations.get('*')?.[operation])
|
||||
return true;
|
||||
|
||||
if (other instanceof User) {
|
||||
return other.organisations.names().some(name => this.organisations.get(name)?.[operation]);
|
||||
} else if (other instanceof Organisations) {
|
||||
return other.accessToOperation(operation).names().some(name => this.organisations.get(name)?.[operation]);
|
||||
} else if (other?.organisations) {
|
||||
return this.canDo(operation, new Organisations(other.organisations));
|
||||
} else if (other instanceof Object) {
|
||||
return this.canDo(operation, new Organisations(other));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
canRead (other) {
|
||||
return this.canDo("read", other);
|
||||
}
|
||||
|
||||
canWrite (other) {
|
||||
return this.canDo("write", other);
|
||||
}
|
||||
|
||||
canEdit (other) {
|
||||
return this.canDo("edit", other);
|
||||
}
|
||||
|
||||
/** Perform an edit on another user
|
||||
*
|
||||
* Syntax: user.edit(other).to(another);
|
||||
*
|
||||
* Applies to `other` the changes described in `another`
|
||||
* that are permitted to `user`. The argument `another`
|
||||
* must be a plain object (not a `User` instance) with
|
||||
* only the properties that are to be changed.
|
||||
*
|
||||
* NOTE: Organisations are not merged, they are overwritten
|
||||
* and then filtered to ensure that the edited user does not
|
||||
* gain more privileges than those granted to the editing
|
||||
* user.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* // This causes user test77 to set user x23 to
|
||||
* // inactive
|
||||
* test77.edit(x23).to({active: false})
|
||||
*/
|
||||
edit (other) {
|
||||
if (this.canEdit(other)) {
|
||||
return {
|
||||
to: (another) => {
|
||||
const newUser = Object.assign(this.#clone(other), another);
|
||||
return newUser.filter(this);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Do not fail or throw but return undefined
|
||||
}
|
||||
|
||||
/** Create a new user similar to us except it doesn't have `edit` rights
|
||||
* by default
|
||||
*/
|
||||
spawn (init = {}, mask = {read: true, write: true, edit: false}) {
|
||||
// const user = new User(init);
|
||||
const user = this.#clone(init);
|
||||
user.organisations = this.organisations.accessToOperation("edit").disableOperation("edit");
|
||||
user.organisations.overlord = this.organisations;
|
||||
return user;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/*
|
||||
* Conversion and presentation methods
|
||||
*/
|
||||
|
||||
toJSON () {
|
||||
return {
|
||||
id: this.id,
|
||||
ip: this.ip,
|
||||
host: this.host,
|
||||
name: this.name,
|
||||
email: this.email,
|
||||
description: this.description,
|
||||
colour: this.colour,
|
||||
active: this.active,
|
||||
organisations: this.organisations.toJSON(),
|
||||
password: this.password
|
||||
}
|
||||
}
|
||||
|
||||
toString (replacer, space) {
|
||||
return JSON.stringify(this.toJSON(), replacer, space);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
if (typeof module !== 'undefined' && module.exports) {
|
||||
module.exports = User; // CJS export
|
||||
}
|
||||
|
||||
// ESM export
|
||||
if (typeof exports !== 'undefined' && !exports.default) {
|
||||
exports.default = User; // ESM export
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
|
||||
module.exports = {
|
||||
User: require('./User')
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"name": "@dougal/user",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"dependencies": {
|
||||
"@dougal/organisations": "file:../organisations"
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@ module.exports = {
|
||||
'@vue/cli-plugin-babel/preset'
|
||||
],
|
||||
plugins: [
|
||||
'@babel/plugin-proposal-logical-assignment-operators',
|
||||
'@babel/plugin-transform-private-methods'
|
||||
'@babel/plugin-proposal-logical-assignment-operators'
|
||||
]
|
||||
}
|
||||
|
||||
27287
lib/www/client/source/package-lock.json
generated
Normal file
27287
lib/www/client/source/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -3,48 +3,34 @@
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"serve": "vue-cli-service serve --host=0.0.0.0",
|
||||
"serve": "vue-cli-service serve",
|
||||
"build": "vue-cli-service build"
|
||||
},
|
||||
"dependencies": {
|
||||
"@deck.gl/aggregation-layers": "^9.1.13",
|
||||
"@deck.gl/geo-layers": "^9.1.13",
|
||||
"@deck.gl/mesh-layers": "^9.1.14",
|
||||
"@dougal/binary": "file:../../../modules/@dougal/binary",
|
||||
"@dougal/concurrency": "file:../../../modules/@dougal/concurrency",
|
||||
"@dougal/organisations": "file:../../../modules/@dougal/organisations",
|
||||
"@dougal/user": "file:../../../modules/@dougal/user",
|
||||
"@loaders.gl/obj": "^4.3.4",
|
||||
"@mdi/font": "^7.2.96",
|
||||
"buffer": "^6.0.3",
|
||||
"@mdi/font": "^5.6.55",
|
||||
"core-js": "^3.6.5",
|
||||
"csv-parse": "^5.5.2",
|
||||
"d3": "^7.0.1",
|
||||
"jwt-decode": "^3.0.0",
|
||||
"leaflet": "^1.7.1",
|
||||
"leaflet-arrowheads": "^1.2.2",
|
||||
"leaflet-realtime": "^2.2.0",
|
||||
"leaflet.markercluster": "^1.4.1",
|
||||
"lodash.debounce": "^4.0.8",
|
||||
"marked": "^9.1.4",
|
||||
"path-browserify": "^1.0.1",
|
||||
"plotly.js-dist": "^2.27.0",
|
||||
"marked": "^2.0.3",
|
||||
"plotly.js-dist": "^2.5.0",
|
||||
"suncalc": "^1.8.0",
|
||||
"typeface-roboto": "0.0.75",
|
||||
"vue": "^2.6.12",
|
||||
"vue-debounce": "^2.6.0",
|
||||
"vue-router": "^3.5.1",
|
||||
"vuetify": "^2.5.0",
|
||||
"vuex": "^3.6.2",
|
||||
"yaml": "^2.3.4"
|
||||
"vuex": "^3.6.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/plugin-proposal-logical-assignment-operators": "^7.14.5",
|
||||
"@babel/plugin-transform-private-methods": "^7.27.1",
|
||||
"@vue/cli-plugin-babel": "^5.0.8",
|
||||
"@vue/cli-plugin-router": "^5.0.8",
|
||||
"@vue/cli-plugin-vuex": "^5.0.8",
|
||||
"@vue/cli-service": "^5.0.8",
|
||||
"@vue/cli-plugin-babel": "~4.4.0",
|
||||
"@vue/cli-plugin-router": "~4.4.0",
|
||||
"@vue/cli-plugin-vuex": "~4.4.0",
|
||||
"@vue/cli-service": "^4.5.13",
|
||||
"sass": "~1.32",
|
||||
"sass-loader": "^8.0.0",
|
||||
"stylus": "^0.54.8",
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,7 @@
|
||||
:color="snackColour"
|
||||
:timeout="6000"
|
||||
>
|
||||
<div v-html="snackText"></div>
|
||||
{{ snackText }}
|
||||
<template v-slot:action="{ attrs }">
|
||||
<v-btn
|
||||
text
|
||||
@@ -35,7 +35,7 @@
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import { mapActions } from 'vuex';
|
||||
import DougalNavigation from './components/navigation';
|
||||
import DougalFooter from './components/footer';
|
||||
|
||||
@@ -52,8 +52,8 @@ export default {
|
||||
}),
|
||||
|
||||
computed: {
|
||||
snackText () { return this.$root.markdownInline(this.$store.state.snack.snackText) },
|
||||
snackColour () { return this.$store.state.snack.snackColour },
|
||||
snackText () { return this.$store.state.snack.snackText },
|
||||
snackColour () { return this.$store.state.snack.snackColour }
|
||||
},
|
||||
|
||||
watch: {
|
||||
@@ -75,44 +75,17 @@ export default {
|
||||
if (!newVal) {
|
||||
this.$store.commit('setSnackText', "");
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
handleJWT (context, {payload}) {
|
||||
this.setCredentials({token: payload.token});
|
||||
},
|
||||
|
||||
handleProject (context, {payload}) {
|
||||
if (payload?.table == "public") {
|
||||
this.refreshProjects();
|
||||
}
|
||||
},
|
||||
|
||||
registerNotificationHandlers () {
|
||||
|
||||
this.$store.dispatch('registerHandler', {
|
||||
table: '.jwt',
|
||||
handler: this.handleJWT
|
||||
});
|
||||
|
||||
this.$store.dispatch('registerHandler', {
|
||||
table: 'project',
|
||||
handler: this.handleProject
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
...mapActions(["setCredentials", "refreshProjects"])
|
||||
...mapActions(["setCredentials"])
|
||||
},
|
||||
|
||||
async mounted () {
|
||||
mounted () {
|
||||
// Local Storage values are always strings
|
||||
this.$vuetify.theme.dark = localStorage.getItem("darkTheme") == "true";
|
||||
this.registerNotificationHandlers();
|
||||
await this.setCredentials();
|
||||
this.refreshProjects();
|
||||
this.setCredentials()
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
<template>
|
||||
<v-dialog
|
||||
v-model="dialogOpen"
|
||||
@input="(e) => $emit('input', e)"
|
||||
max-width="600"
|
||||
>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-btn v-if="adminaccess()"
|
||||
title="Create a new project from scratch. Generally, it's preferable to clone an existing project (right-click → ‘Clone’)"
|
||||
small
|
||||
outlined
|
||||
color="warning"
|
||||
v-bind="attrs"
|
||||
v-on="on"
|
||||
>
|
||||
<span>Create new project</span>
|
||||
<v-icon right small>mdi-file-document-plus-outline</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
|
||||
<dougal-project-settings-name-id-geodetics
|
||||
:value="newProjectDetails"
|
||||
@input="save"
|
||||
@close="dialogOpen = false"
|
||||
>
|
||||
</dougal-project-settings-name-id-geodetics>
|
||||
|
||||
</v-dialog>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import DougalProjectSettingsNameIdGeodetics from '@/components/project-settings/name-id-geodetics'
|
||||
import AccessMixin from '@/mixins/access';
|
||||
|
||||
export default {
|
||||
name: 'DougalAppBarExtensionProjectList',
|
||||
|
||||
components: {
|
||||
DougalProjectSettingsNameIdGeodetics
|
||||
},
|
||||
|
||||
mixins: [
|
||||
AccessMixin
|
||||
],
|
||||
|
||||
data() {
|
||||
return {
|
||||
dialogOpen: false,
|
||||
newProjectDetails: {
|
||||
name: null,
|
||||
id: null,
|
||||
epsg: null
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
methods: {
|
||||
async save (data) {
|
||||
this.dialogOpen = false;
|
||||
data.archived = true; // Make the project inactive to start with
|
||||
console.log("POST the new project data");
|
||||
console.log(data);
|
||||
|
||||
const init = {
|
||||
method: "POST",
|
||||
body: data
|
||||
};
|
||||
const cb = (err, res) => {
|
||||
if (!err && res) {
|
||||
console.log(res);
|
||||
if (res.status == "201") {
|
||||
// Redirect to new project settings page
|
||||
const settingsUrl = `/projects/${data.id.toLowerCase()}/configuration`;
|
||||
this.$router.push(settingsUrl);
|
||||
|
||||
}
|
||||
}
|
||||
};
|
||||
await this.api(["/project", init, cb]);
|
||||
},
|
||||
|
||||
...mapActions(["api"])
|
||||
}
|
||||
}
|
||||
</script>
|
||||
@@ -1,63 +0,0 @@
|
||||
<template>
|
||||
<v-tabs :value="tab" show-arrows v-if="page != 'configuration'">
|
||||
<v-tab v-for="tab, index in tabs" :key="index" link :to="tabLink(tab.href)" v-text="tab.text"></v-tab>
|
||||
<template v-if="adminaccess()">
|
||||
<v-spacer></v-spacer>
|
||||
<v-tab :to="tabLink('configuration')" class="orange--text darken-3" title="Edit project settings"><v-icon small left color="orange darken-3">mdi-cog-outline</v-icon> Settings</v-tab>
|
||||
</template>
|
||||
</v-tabs>
|
||||
<v-tabs optional :value="0" show-arrows align-with-title v-else>
|
||||
<v-tab>Project settings</v-tab>
|
||||
<v-spacer></v-spacer>
|
||||
<v-tab :to="tabLink('summary')">Go to project</v-tab>
|
||||
</v-tabs>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import AccessMixin from '@/mixins/access';
|
||||
|
||||
export default {
|
||||
name: 'DougalAppBarExtensionProject',
|
||||
|
||||
mixins: [
|
||||
AccessMixin
|
||||
],
|
||||
|
||||
data() {
|
||||
return {
|
||||
tabs: [
|
||||
{ href: "summary", text: "Summary" },
|
||||
{ href: "lines", text: "Lines" },
|
||||
{ href: "plan", text: "Plan" },
|
||||
{ href: "sequences", text: "Sequences" },
|
||||
{ href: "calendar", text: "Calendar" },
|
||||
{ href: "log", text: "Log" },
|
||||
{ href: "qc", text: "QC" },
|
||||
{ href: "graphs", text: "Graphs" },
|
||||
{ href: "map", text: "Map" }
|
||||
]
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
page () {
|
||||
return this.$route.path.split(/\/+/)[3];
|
||||
},
|
||||
|
||||
tab () {
|
||||
return this.tabs.findIndex(t => t.href == this.page);
|
||||
},
|
||||
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
tabLink (href) {
|
||||
return `/projects/${this.$route.params.project}/${href}`;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
</script>
|
||||
@@ -1,82 +0,0 @@
|
||||
<template>
|
||||
<v-row
|
||||
dense
|
||||
no-gutters
|
||||
align="center"
|
||||
>
|
||||
|
||||
<v-col cols="1">
|
||||
<slot name="prepend"></slot>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
<v-chip outlined label small :color="colour || getHSLColourFor(key)">{{name}}</v-chip>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="4">
|
||||
<v-text-field
|
||||
dense
|
||||
label="Column"
|
||||
type="number"
|
||||
min="0"
|
||||
clearable
|
||||
:value="value.column"
|
||||
@input="$emit('input', {...value, column: Number($event)})"
|
||||
>
|
||||
<template v-slot:append-outer>
|
||||
<dougal-field-content-dialog
|
||||
:readonly="readonly"
|
||||
:value="value"
|
||||
@input="$emit('input', $event)"
|
||||
></dougal-field-content-dialog>
|
||||
</template>
|
||||
</v-text-field>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="1">
|
||||
<slot name="append"></slot>
|
||||
</v-col>
|
||||
|
||||
</v-row>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { parse } from 'csv-parse/sync'
|
||||
import { getHSLColourFor } from '@/lib/hsl'
|
||||
import DougalFieldContentDialog from '../fields/field-content-dialog'
|
||||
|
||||
export default {
|
||||
name: "DougalDelimitedStringDecoderField",
|
||||
|
||||
components: {
|
||||
//DougalFixedStringDecoderField,
|
||||
DougalFieldContentDialog
|
||||
},
|
||||
|
||||
props: {
|
||||
value: Object,
|
||||
name: String,
|
||||
colour: String,
|
||||
readonly: Boolean,
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
},
|
||||
|
||||
watch: {
|
||||
},
|
||||
|
||||
methods: {
|
||||
getHSLColourFor: getHSLColourFor.bind(this),
|
||||
},
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -1,366 +0,0 @@
|
||||
<template>
|
||||
<v-card flat elevation="0">
|
||||
<v-card-title v-if="title">{{ title }}</v-card-title>
|
||||
<v-card-subtitle v-if="subtitle">{{ subtitle }}</v-card-subtitle>
|
||||
<v-card-text>
|
||||
<v-form>
|
||||
|
||||
<v-container>
|
||||
|
||||
<dougal-delimited-string-decoder-field v-for="(field, key) in fields" :key="key"
|
||||
:colour="getHSLColourFor(key)"
|
||||
:readonly="readonly"
|
||||
:name="key"
|
||||
:value="fields[key]"
|
||||
@input="$emit('update:fields', {...fields, [key]: $event})"
|
||||
>
|
||||
<template v-slot:append v-if="editableFieldList && !readonly">
|
||||
<v-btn
|
||||
class="ml-3"
|
||||
fab
|
||||
text
|
||||
small
|
||||
title="Remove this property"
|
||||
>
|
||||
<v-icon
|
||||
color="error"
|
||||
@click="removeField(key)"
|
||||
>mdi-minus</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
</dougal-delimited-string-decoder-field>
|
||||
<v-row dense no-gutters v-if="editableFieldList && !readonly">
|
||||
<v-col cols=6 offset=1>
|
||||
<v-text-field
|
||||
label="Add new field"
|
||||
hint="Enter the name of a new field"
|
||||
:error-messages="fieldNameErrors"
|
||||
v-model="fieldName"
|
||||
append-outer-icon="mdi-plus-circle"
|
||||
@keydown.enter.prevent="addField"
|
||||
>
|
||||
<template v-slot:append-outer>
|
||||
<v-icon
|
||||
color="primary"
|
||||
:disabled="fieldName && !!fieldNameErrors"
|
||||
@click="addField"
|
||||
>mdi-plus</v-icon>
|
||||
</template>
|
||||
</v-text-field>
|
||||
</v-col>
|
||||
</v-row>
|
||||
|
||||
<v-row>
|
||||
<v-col cols="6">
|
||||
<v-combobox
|
||||
label="Field delimiter"
|
||||
hint="How are the fields separated from each other?"
|
||||
:items="delimiters"
|
||||
v-model="delimiter_"
|
||||
></v-combobox>
|
||||
</v-col>
|
||||
</v-row>
|
||||
|
||||
<v-row>
|
||||
<v-col cols="6">
|
||||
<v-text-field
|
||||
class="ml-3"
|
||||
label="Skip lines"
|
||||
hint="This lets you to skip file headers if present"
|
||||
type="number"
|
||||
min="0"
|
||||
:value.number="numberedLines"
|
||||
@input="$emit('update:numbered-lines', Number($event))"
|
||||
></v-text-field>
|
||||
</v-col>
|
||||
<v-col cols="6">
|
||||
<v-checkbox
|
||||
v-ripple
|
||||
label="First non-skipped line are field names"
|
||||
:value="headerRow"
|
||||
@change="$emit('update:header-row', $event)"
|
||||
></v-checkbox>
|
||||
</v-col>
|
||||
</v-row>
|
||||
|
||||
<v-row>
|
||||
<v-col>
|
||||
<v-simple-table dense>
|
||||
<template v-slot:default>
|
||||
<colgroup v-if="showLineNumbers">
|
||||
<col class="line_no"/>
|
||||
</colgroup>
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="line_no">
|
||||
<v-simple-checkbox
|
||||
off-icon="mdi-format-list-numbered"
|
||||
title="Show line numbers"
|
||||
v-model="showLineNumbers"
|
||||
>
|
||||
</v-simple-checkbox>
|
||||
</th>
|
||||
<th v-for="(header, idx) in headers" :key="idx"
|
||||
:style="`color:${header.colour};`"
|
||||
>
|
||||
<v-select
|
||||
dense
|
||||
clearable
|
||||
:items="fieldsAvailableFor(idx)"
|
||||
:value="header.fieldName"
|
||||
@input="fieldSelected(idx, $event)"
|
||||
>
|
||||
</v-select>
|
||||
</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th class="line_no">
|
||||
<small v-if="showLineNumbers && headers.length">Line no.</small>
|
||||
</th>
|
||||
<th v-for="(header, idx) in headers" :key="idx"
|
||||
:style="`color:${header.colour};`"
|
||||
>
|
||||
{{ header.text }}
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr v-for="(row, ridx) in rows" :key="ridx">
|
||||
<td class="line_no"">
|
||||
<small v-if="showLineNumbers">
|
||||
{{ ridx + (typeof numberedLines == "number" ? numberedLines : 0)+1 }}
|
||||
</small>
|
||||
</td>
|
||||
<td v-for="(cell, cidx) in row" :key="cidx"
|
||||
:style="`background-color:${cell.colour};`"
|
||||
>
|
||||
{{ cell.text }}
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</template>
|
||||
</v-simple-table>
|
||||
</v-col>
|
||||
</v-row>
|
||||
|
||||
</v-container>
|
||||
|
||||
|
||||
|
||||
</v-form>
|
||||
</v-card-text>
|
||||
<v-card-actions>
|
||||
</v-card-actions>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
/*.v-data-table table tbody tr td*/
|
||||
th {
|
||||
border: 1px solid hsl(0, 0%, 33.3%);
|
||||
}
|
||||
|
||||
td {
|
||||
border-inline: 1px solid hsl(0, 0%, 33.3%);
|
||||
}
|
||||
|
||||
.line_no {
|
||||
text-align: right;
|
||||
width: 4ex;
|
||||
border: none !important;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { parse } from 'csv-parse/sync'
|
||||
import { getHSLColourFor } from '@/lib/hsl'
|
||||
import truncateText from '@/lib/truncate-text'
|
||||
import DougalDelimitedStringDecoderField from './delimited-string-decoder-field'
|
||||
|
||||
export default {
|
||||
name: "DougalDelimitedStringDecoder",
|
||||
|
||||
components: {
|
||||
DougalDelimitedStringDecoderField
|
||||
},
|
||||
|
||||
props: {
|
||||
text: String,
|
||||
fields: Object,
|
||||
delimiter: String,
|
||||
headerRow: { type: [ Boolean, Number ], default: false},
|
||||
numberedLines: [ Boolean, Number ],
|
||||
maxHeight: String,
|
||||
editableFieldList: { type: Boolean, default: true },
|
||||
readonly: Boolean,
|
||||
title: String,
|
||||
subtitle: String
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
//< The reason for not using this.text directly is that at some point
|
||||
//< we might extend this component to allow editing the sample text.
|
||||
text_: "",
|
||||
//< The name of a new field to add.
|
||||
fieldName: "",
|
||||
showLineNumbers: null,
|
||||
delimiters: [
|
||||
{ text: "Comma (,)", value: "," },
|
||||
{ text: "Tabulator (⇥)", value: "\x09" },
|
||||
{ text: "Semicolon (;)", value: ";" }
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
/** The index of the last column.
|
||||
*
|
||||
* This will be the higher of the number of columns available
|
||||
* in the sample text or the highest column number defined in
|
||||
* this.fields.
|
||||
*
|
||||
* NOTE: May return NaN
|
||||
*/
|
||||
numberOfColumns () {
|
||||
const lastIndex = Object.values(this.fields)
|
||||
.reduce( (acc, cur) => Math.max(acc, cur.column), this.cells[0]?.length-1);
|
||||
return isNaN(lastIndex) ? 0 : (lastIndex + 1);
|
||||
},
|
||||
|
||||
cells () {
|
||||
return parse(this.text_, {delimiter: this.delimiter, trim: true});
|
||||
},
|
||||
|
||||
headers () {
|
||||
|
||||
const headerNames = typeof this.headerRow == "number"
|
||||
? this.cells[this.headerRow]
|
||||
: this.headerRow === true
|
||||
? this.cells[0]
|
||||
: Array.from(this.cells[0] ?? [], (_, ι) => `Column ${ι}`);
|
||||
|
||||
return headerNames?.map((c, ι) => {
|
||||
const fieldName = Object.keys(this.fields).find(i => this.fields[i].column == ι);
|
||||
const field = this.fields[fieldName] ?? {}
|
||||
const colour = this.headerRow === false
|
||||
? this.getHSLColourFor(ι*10)
|
||||
: this.getHSLColourFor(c);
|
||||
|
||||
return {
|
||||
text: c,
|
||||
colour: this.getHSLColourFor(c),
|
||||
fieldName,
|
||||
field
|
||||
} ?? {}
|
||||
}) ?? [];
|
||||
},
|
||||
|
||||
rows () {
|
||||
// NOTE It doesn't matter if headerRow is boolean, it works just the same.
|
||||
return [...this.cells].slice(this.headerRow).map(r =>
|
||||
r.map( (c, ι) => ({
|
||||
text: truncateText(c),
|
||||
colour: this.headers.length
|
||||
? this.getHSLColourFor(this.headers[ι]?.text, 0.2)
|
||||
: this.getHSLColourFor(ι*10, 0.2)
|
||||
})));
|
||||
},
|
||||
|
||||
fieldNameErrors () {
|
||||
return Object.keys(this.fields).includes(this.fieldName)
|
||||
? "A field with this name already exists"
|
||||
: null;
|
||||
},
|
||||
|
||||
delimiter_: {
|
||||
get () {
|
||||
return this.delimiters.find(i => i.value == this.delimiter) ?? this.delimiter;
|
||||
},
|
||||
|
||||
set (v) {
|
||||
this.$emit("update:delimiter", typeof v == "object" ? v.value : v);
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
text () {
|
||||
if (this.text != this.text_) {
|
||||
this.reset();
|
||||
}
|
||||
},
|
||||
|
||||
numberedLines (cur, prev) {
|
||||
if (cur != prev) {
|
||||
this.showLineNumbers = typeof cur == "number" || cur;
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
|
||||
|
||||
fieldsAvailableFor (idx) {
|
||||
return Object.keys(this.fields).filter( i =>
|
||||
this.fields[i].column === idx || this.fields[i].column === null) ?? [];
|
||||
},
|
||||
|
||||
fieldSelected (col, key) {
|
||||
|
||||
const fields = {};
|
||||
for (const k in this.fields) {
|
||||
const field = {...this.fields[k]};
|
||||
if (k === key) {
|
||||
field.column = col
|
||||
} else {
|
||||
if (field.column === col) {
|
||||
field.column = null;
|
||||
}
|
||||
}
|
||||
fields[k] = field;
|
||||
}
|
||||
|
||||
this.$emit("update:fields", fields);
|
||||
|
||||
},
|
||||
|
||||
addField () {
|
||||
if (!this.fieldNameErrors) {
|
||||
this.$emit("update:fields", {
|
||||
...this.fields,
|
||||
[this.fieldName]: { column: null }
|
||||
});
|
||||
this.fieldName = "";
|
||||
}
|
||||
},
|
||||
|
||||
removeField (key) {
|
||||
const fields = {...this.fields};
|
||||
delete fields[key];
|
||||
this.$emit("update:fields", fields);
|
||||
},
|
||||
|
||||
getHSLColourFor: getHSLColourFor.bind(this),
|
||||
|
||||
numberLine (number, line) {
|
||||
return `<span class="line-number">${number}</span>${line}`;
|
||||
},
|
||||
|
||||
reset () {
|
||||
this.text_ = this.text.replaceAll("\r", "");
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.reset();
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -1,140 +0,0 @@
|
||||
<template>
|
||||
<v-row dense no-gutters>
|
||||
|
||||
<v-col cols="1">
|
||||
<slot name="prepend"></slot>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
<v-chip outlined label small :color="colour">{{name}}</v-chip>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
<v-text-field
|
||||
class="ml-3"
|
||||
dense
|
||||
label="From"
|
||||
type="number"
|
||||
min="0"
|
||||
v-model.number="value.offset"
|
||||
:readonly="readonly"
|
||||
></v-text-field>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
<v-text-field
|
||||
class="ml-3"
|
||||
dense
|
||||
label="Length"
|
||||
type="number"
|
||||
min="0"
|
||||
v-model.number="value.length"
|
||||
:readonly="readonly"
|
||||
></v-text-field>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
<dougal-field-content-dialog
|
||||
:readonly="readonly"
|
||||
:value="value"
|
||||
@input="$emit('input', $event)"
|
||||
></dougal-field-content-dialog>
|
||||
|
||||
</v-col>
|
||||
|
||||
<v-col cols="1">
|
||||
<slot name="append"></slot>
|
||||
</v-col>
|
||||
|
||||
</v-row>
|
||||
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
.input {
|
||||
flex: 1 1 auto;
|
||||
line-height: 20px;
|
||||
padding: 8px 0 8px;
|
||||
min-height: 32px;
|
||||
max-height: 32px;
|
||||
max-width: 100%;
|
||||
min-width: 0px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.input >>> .chunk {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid;
|
||||
}
|
||||
|
||||
.input >>> .chunk-empty {
|
||||
padding-inline: 1px;
|
||||
}
|
||||
|
||||
.input >>> .chunk-overlap {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid grey;
|
||||
color: grey;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import DougalFieldContentDialog from '../fields/field-content-dialog'
|
||||
|
||||
export default {
|
||||
name: "DougalFixedStringDecoderField",
|
||||
|
||||
components: {
|
||||
DougalFieldContentDialog
|
||||
},
|
||||
|
||||
props: {
|
||||
value: Object,
|
||||
name: String,
|
||||
colour: String,
|
||||
readonly: Boolean,
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
name_: "",
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
name () {
|
||||
if (this.name != this.name_) {
|
||||
this.name_ = this.name;
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
addField () {
|
||||
if (!this.fieldNameErrors) {
|
||||
this.$emit("update:fields", {
|
||||
...this.fields,
|
||||
[this.fieldName]: { offset: 0, length: 0 }
|
||||
});
|
||||
this.fieldName = "";
|
||||
}
|
||||
},
|
||||
|
||||
reset () {
|
||||
this.text_ = this.text;
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.reset();
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -1,486 +0,0 @@
|
||||
<template>
|
||||
<v-card flat elevation="0">
|
||||
<v-card-title v-if="title">{{ title }}</v-card-title>
|
||||
<v-card-subtitle v-if="subtitle">{{ subtitle }}</v-card-subtitle>
|
||||
<v-card-text>
|
||||
<v-form>
|
||||
<div v-if="isMultiline"
|
||||
class="multiline mb-5"
|
||||
:style="multilineElementStyle"
|
||||
v-html="html"
|
||||
>
|
||||
</div>
|
||||
<v-input v-else
|
||||
class="v-text-field"
|
||||
:hint="hint"
|
||||
persistent-hint
|
||||
v-model="text_"
|
||||
>
|
||||
<label
|
||||
class="v-label"
|
||||
:class="[ $vuetify.theme.isDark && 'theme--dark', text_ && text_.length && 'v-label--active' ]"
|
||||
style="left: 0px; right: auto; position: absolute;"
|
||||
>{{ label }}</label>
|
||||
<div class="input"
|
||||
:class="isMultiline ? 'multiline' : ''"
|
||||
v-html="html"
|
||||
>
|
||||
</div>
|
||||
</v-input>
|
||||
|
||||
<v-container>
|
||||
|
||||
<!-- Variable fields -->
|
||||
|
||||
<v-row no-gutters class="mb-2">
|
||||
<h4>Variable fields</h4>
|
||||
</v-row>
|
||||
|
||||
<dougal-fixed-string-decoder-field v-for="(field, key) in fields" :key="key"
|
||||
v-model="fields[key]"
|
||||
:name="key"
|
||||
:colour="getHSLColourFor(key)"
|
||||
:readonly="readonly"
|
||||
>
|
||||
<template v-slot:append v-if="editableFieldList && !readonly">
|
||||
<v-btn
|
||||
class="ml-3"
|
||||
fab
|
||||
text
|
||||
small
|
||||
title="Remove this property"
|
||||
>
|
||||
<v-icon
|
||||
color="error"
|
||||
@click="removeField(key)"
|
||||
>mdi-minus</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
</dougal-fixed-string-decoder-field>
|
||||
|
||||
<v-row dense no-gutters v-if="editableFieldList && !readonly">
|
||||
<v-col cols="3">
|
||||
<v-text-field
|
||||
label="Add new field"
|
||||
hint="Enter the name of a new field"
|
||||
:error-messages="fieldNameErrors"
|
||||
v-model="fieldName"
|
||||
append-outer-icon="mdi-plus-circle"
|
||||
@keydown.enter.prevent="addField"
|
||||
>
|
||||
<template v-slot:append-outer>
|
||||
<v-icon
|
||||
color="primary"
|
||||
:disabled="fieldName && !!fieldNameErrors"
|
||||
@click="addField"
|
||||
>mdi-plus</v-icon>
|
||||
</template>
|
||||
</v-text-field>
|
||||
</v-col>
|
||||
</v-row>
|
||||
|
||||
<!-- Fixed text strings -->
|
||||
|
||||
<v-row no-gutters class="mt-2 mb-2">
|
||||
<h4>Fixed strings</h4>
|
||||
</v-row>
|
||||
|
||||
<dougal-fixed-string-text v-for="(item, idx) in fixed" :key="idx"
|
||||
v-model="fixed[idx]"
|
||||
:colour="getHSLColourFor(item.text+item.offset)"
|
||||
:readonly="readonly"
|
||||
>
|
||||
<template v-slot:append v-if="editableFieldList && !readonly">
|
||||
<v-btn
|
||||
class="ml-3"
|
||||
fab
|
||||
text
|
||||
small
|
||||
title="Remove this property"
|
||||
>
|
||||
<v-icon
|
||||
color="error"
|
||||
@click="removeFixed(idx)"
|
||||
>mdi-minus</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
</dougal-fixed-string-text>
|
||||
|
||||
<v-row dense no-gutters v-if="editableFieldList && !readonly">
|
||||
<v-col cols="3">
|
||||
<v-text-field
|
||||
label="Add fixed text"
|
||||
hint="Enter text"
|
||||
:error-messages="fieldNameErrors"
|
||||
v-model="fixedName"
|
||||
@keydown.enter.prevent="addFixed"
|
||||
>
|
||||
</v-text-field>
|
||||
</v-col>
|
||||
<v-col cols="3">
|
||||
<v-text-field
|
||||
class="ml-3"
|
||||
label="From position"
|
||||
hint="Enter offset"
|
||||
type="number"
|
||||
min="0"
|
||||
v-model.number="fixedOffset"
|
||||
:readonly="readonly"
|
||||
append-outer-icon="mdi-plus-circle"
|
||||
>
|
||||
<template v-slot:append-outer>
|
||||
<v-icon
|
||||
color="primary"
|
||||
:disabled="!fixedName"
|
||||
@click="addFixed"
|
||||
>mdi-plus</v-icon>
|
||||
</template>
|
||||
</v-text-field>
|
||||
</v-col>
|
||||
</v-row>
|
||||
|
||||
</v-container>
|
||||
|
||||
|
||||
|
||||
</v-form>
|
||||
</v-card-text>
|
||||
<v-card-actions>
|
||||
</v-card-actions>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
.input {
|
||||
flex: 1 1 auto;
|
||||
line-height: 20px;
|
||||
padding: 8px 0 8px;
|
||||
min-height: 32px;
|
||||
max-height: 32px;
|
||||
max-width: 100%;
|
||||
min-width: 0px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.multiline {
|
||||
font-family: mono;
|
||||
white-space: pre;
|
||||
overflow-x: auto;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.multiline >>> .line-number {
|
||||
display: inline-block;
|
||||
font-size: 75%;
|
||||
width: 5ex;
|
||||
margin-inline-end: 1ex;
|
||||
text-align: right;
|
||||
border: none;
|
||||
position: relative;
|
||||
top: -1px;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-field {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-fixed {
|
||||
padding-inline: 1px;
|
||||
border: 1px dashed;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-empty {
|
||||
padding-inline: 1px;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-overlap {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid grey;
|
||||
color: grey;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-mismatch {
|
||||
padding-inline: 1px;
|
||||
border: 2px solid red !important;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { getHSLColourFor } from '@/lib/hsl'
|
||||
import DougalFixedStringDecoderField from './fixed-string-decoder-field'
|
||||
import DougalFixedStringText from './fixed-string-text'
|
||||
|
||||
export default {
|
||||
name: "DougalFixedStringDecoder",
|
||||
|
||||
components: {
|
||||
DougalFixedStringDecoderField,
|
||||
DougalFixedStringText
|
||||
},
|
||||
|
||||
mixins: [
|
||||
{
|
||||
methods: {
|
||||
getHSLColourFor
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
props: {
|
||||
text: { type: String, default: "" },
|
||||
fixed: { type: Array, default: () => [] },
|
||||
fields: { type: Object, default: () => ({}) },
|
||||
multiline: Boolean,
|
||||
numberedLines: [ Boolean, Number ],
|
||||
maxHeight: String,
|
||||
editableFieldList: { type: Boolean, default: true },
|
||||
readonly: Boolean,
|
||||
title: String,
|
||||
subtitle: String,
|
||||
label: String,
|
||||
hint: String,
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
//< The reason for not using this.text directly is that at some point
|
||||
//< we might extend this component to allow editing the sample text.
|
||||
text_: "",
|
||||
//< The value of a fixed string that should be always present at a specific position
|
||||
fixedName: "",
|
||||
fixedOffset: 0,
|
||||
//< The name of a new field to add.
|
||||
fieldName: ""
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
/** Whether to treat the sample text as multiline.
|
||||
*/
|
||||
isMultiline () {
|
||||
return this.multiline === true || this.text.includes("\n");
|
||||
},
|
||||
|
||||
/* Return the fields as an array sorted by offset
|
||||
*/
|
||||
parts () {
|
||||
// return Object.entries(this.fields).sort( (a, b) => a[1].offset - b[1].offset );
|
||||
return [
|
||||
...Object.entries(this.fields),
|
||||
...this.fixed.map(i => [ i.text + i.offset, {...i, length: i.text?.length} ])
|
||||
].sort( (a, b) => {
|
||||
const offset_a = a.offset ?? a[1].offset;
|
||||
const offset_b = b.offset ?? b[1].offset;
|
||||
return a - b;
|
||||
})
|
||||
},
|
||||
|
||||
/* Transform this.parts into {start, end} intervals.
|
||||
*/
|
||||
chunks () {
|
||||
const chunks = [];
|
||||
const chunk_num = 0;
|
||||
for (const [name, part] of this.parts) {
|
||||
const chunk = {};
|
||||
chunk.start = part.offset;
|
||||
chunk.end = part.offset + part.length - 1;
|
||||
//chunk.text = this.text_.slice(chunk.start, chunk.end);
|
||||
chunk.colour = this.getHSLColourFor(name)
|
||||
chunk.class = part.text ? "fixed" : "field";
|
||||
chunk.text = part.text;
|
||||
|
||||
chunks.push(chunk);
|
||||
}
|
||||
|
||||
return chunks;
|
||||
},
|
||||
|
||||
multilineElementStyle () {
|
||||
if (this.maxHeight) {
|
||||
return `max-height: ${this.maxHeight};`;
|
||||
}
|
||||
return "";
|
||||
},
|
||||
|
||||
/** Return a colourised HTML version of this.text.
|
||||
*/
|
||||
html () {
|
||||
if (!this.text_) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.isMultiline) {
|
||||
if (typeof this.numberedLines == "number" || this.numberedLines) {
|
||||
const offset = typeof this.numberedLines == "number" ? Math.abs(this.numberedLines) : 0;
|
||||
return this.text_.split("\n").map( (line, idx) =>
|
||||
this.numberLine(offset+idx, this.renderTextLine(line))).join("<br/>");
|
||||
} else {
|
||||
return this.text_.split("\n").map(this.renderTextLine).join("<br/>");
|
||||
}
|
||||
} else {
|
||||
return this.renderTextLine(this.text_);
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
fieldNameErrors () {
|
||||
return this.parts.find( i => i[0] == this.fieldName )
|
||||
? "A field with this name already exists"
|
||||
: null;
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
text () {
|
||||
if (this.text != this.text_) {
|
||||
this.reset();
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
addFixed () {
|
||||
if (this.fixedName) {
|
||||
const fixed = [
|
||||
...this.fixed,
|
||||
{ text: this.fixedName, offset: this.fixedOffset }
|
||||
];
|
||||
fixed.sort( (a, b) => a.offset - b.offset );
|
||||
this.fixedName = null;
|
||||
this.fixedOffset = 0;
|
||||
this.$emit("update:fixed", fixed);
|
||||
}
|
||||
},
|
||||
|
||||
addField () {
|
||||
if (!this.fieldNameErrors) {
|
||||
this.$emit("update:fields", {
|
||||
...this.fields,
|
||||
[this.fieldName]: { offset: 0, length: 0 }
|
||||
});
|
||||
this.fieldName = "";
|
||||
}
|
||||
},
|
||||
|
||||
// NOTE Not used
|
||||
updateField (field, key, value) {
|
||||
const fields = {
|
||||
...this.fields,
|
||||
[field]: {
|
||||
...this.fields[field],
|
||||
[key]: value
|
||||
}
|
||||
};
|
||||
this.$emit("update:fields", fields);
|
||||
},
|
||||
|
||||
removeField (key) {
|
||||
const fields = {...this.fields};
|
||||
delete fields[key];
|
||||
this.$emit("update:fields", fields);
|
||||
},
|
||||
|
||||
removeFixed (idx) {
|
||||
const fixed = [...this.fixed];
|
||||
fixed.splice(idx, 1);
|
||||
//fixed.sort( (a, b) => a.offset - b.offset );
|
||||
this.$emit("update:fixed", fixed);
|
||||
},
|
||||
|
||||
/** Return an HSL colour as a function of an input value
|
||||
* `str`.
|
||||
*/
|
||||
xgetHSLColourFor () {
|
||||
console.log("WILL BE DEFINED ON MOUNT");
|
||||
},
|
||||
|
||||
/** Return a `<span>` opening tag.
|
||||
*/
|
||||
style (name, colour) {
|
||||
return colour
|
||||
? `<span class="${name}" style="color:${colour};border-color:${colour}">`
|
||||
: `<span class="${name}">`;
|
||||
},
|
||||
|
||||
/** Return an array of the intervals that intersect `pos`.
|
||||
* May be empty.
|
||||
*/
|
||||
chunksFor (pos) {
|
||||
return this.chunks.filter( chunk =>
|
||||
pos >= chunk.start &&
|
||||
pos <= chunk.end
|
||||
)
|
||||
},
|
||||
|
||||
/*
|
||||
* Algorithm:
|
||||
*
|
||||
* Go through every character of one line of text and determine in which
|
||||
* part(s) it falls in, if any. Collect adjacent same parts into <span/>
|
||||
* elements.
|
||||
*/
|
||||
renderTextLine (text) {
|
||||
const parts = [];
|
||||
|
||||
let prevStyle;
|
||||
|
||||
for (const pos in text) {
|
||||
const chunks = this.chunksFor(pos);
|
||||
const isEmpty = chunks.length == 0;
|
||||
const isOverlap = chunks.length > 1;
|
||||
const isMismatch = chunks[0]?.text &&
|
||||
(text.substring(chunks[0].start, chunks[0].end+1) != chunks[0].text)
|
||||
|
||||
const style = isEmpty
|
||||
? this.style("chunk-empty")
|
||||
: isMismatch
|
||||
? this.style("chunk-mismatch", chunks[0].colour)
|
||||
: isOverlap
|
||||
? this.style("chunk-overlap")
|
||||
: this.style("chunk-"+chunks[0].class, chunks[0].colour);
|
||||
|
||||
if (style != prevStyle) {
|
||||
if (prevStyle) {
|
||||
parts.push("</span>");
|
||||
}
|
||||
parts.push(style);
|
||||
}
|
||||
parts.push(text[pos]);
|
||||
prevStyle = style;
|
||||
}
|
||||
|
||||
if (parts.length) {
|
||||
parts.push("</span>");
|
||||
}
|
||||
|
||||
return parts.join("");
|
||||
},
|
||||
|
||||
numberLine (number, line) {
|
||||
return `<span class="line-number">${number}</span>${line}`;
|
||||
},
|
||||
|
||||
setText (v) {
|
||||
//console.log(v);
|
||||
this.text_ = v;
|
||||
},
|
||||
|
||||
reset () {
|
||||
this.text_ = this?.text.replaceAll("\r", "");
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.reset();
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -1,122 +0,0 @@
|
||||
<template>
|
||||
<v-row dense no-gutters>
|
||||
|
||||
<v-col cols="1">
|
||||
<slot name="prepend"></slot>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
<v-chip outlined label small :color="colour" style="border: 1px dashed">{{value.text}}</v-chip>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
<v-text-field
|
||||
class="ml-3"
|
||||
dense
|
||||
label="From"
|
||||
type="number"
|
||||
min="0"
|
||||
v-model.number="value.offset"
|
||||
:readonly="readonly"
|
||||
></v-text-field>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
</v-col>
|
||||
|
||||
<v-col cols="1">
|
||||
<slot name="append"></slot>
|
||||
</v-col>
|
||||
|
||||
</v-row>
|
||||
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
.input {
|
||||
flex: 1 1 auto;
|
||||
line-height: 20px;
|
||||
padding: 8px 0 8px;
|
||||
min-height: 32px;
|
||||
max-height: 32px;
|
||||
max-width: 100%;
|
||||
min-width: 0px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.input >>> .chunk {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid;
|
||||
}
|
||||
|
||||
.input >>> .chunk-empty {
|
||||
padding-inline: 1px;
|
||||
}
|
||||
|
||||
.input >>> .chunk-overlap {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid grey;
|
||||
color: grey;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
|
||||
export default {
|
||||
name: "DougalFixedStringText",
|
||||
|
||||
components: {
|
||||
},
|
||||
|
||||
props: {
|
||||
value: Object,
|
||||
colour: String,
|
||||
readonly: Boolean,
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
name_: "",
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
name () {
|
||||
if (this.name != this.name_) {
|
||||
this.name_ = this.name;
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
addField () {
|
||||
if (!this.fieldNameErrors) {
|
||||
this.$emit("update:fields", {
|
||||
...this.fields,
|
||||
[this.fieldName]: { offset: 0, length: 0 }
|
||||
});
|
||||
this.fieldName = "";
|
||||
}
|
||||
},
|
||||
|
||||
reset () {
|
||||
this.text_ = this.text;
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.reset();
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -1,301 +0,0 @@
|
||||
<template>
|
||||
<v-card flat elevation="0">
|
||||
<v-card-title v-if="title">{{ title }}</v-card-title>
|
||||
<v-card-subtitle v-if="subtitle">{{ subtitle }}</v-card-subtitle>
|
||||
<v-card-text>
|
||||
|
||||
<v-tabs v-model="viewTab">
|
||||
<v-tab>Text</v-tab>
|
||||
<v-tab>Parsed</v-tab>
|
||||
</v-tabs>
|
||||
|
||||
<v-tabs-items v-model="viewTab">
|
||||
<v-tab-item>
|
||||
<v-simple-table dense class="text">
|
||||
<template v-slot:default>
|
||||
<colgroup v-if="showLineNumbers">
|
||||
<col class="line_no"/>
|
||||
</colgroup>
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="line_no">
|
||||
<v-simple-checkbox
|
||||
v-ripple
|
||||
off-icon="mdi-format-list-numbered"
|
||||
title="Show line numbers"
|
||||
v-model="showLineNumbers"
|
||||
>
|
||||
</v-simple-checkbox>
|
||||
</th>
|
||||
<th v-for="(header, idx) in headers" :key="idx"
|
||||
:style="`color:${header.colour};`"
|
||||
>
|
||||
{{ header.text }}
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr v-for="(row, ridx) in rows" :key="ridx">
|
||||
<td class="line_no"">
|
||||
<small v-if="showLineNumbers">
|
||||
{{ ridx + (typeof numberedLines == "number" ? numberedLines : 0)+1 }}
|
||||
</small>
|
||||
</td>
|
||||
<td v-for="(cell, cidx) in row" :key="cidx"
|
||||
:style="`background-color:${cell.colour};`"
|
||||
>
|
||||
{{ cell.text }}
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</template>
|
||||
</v-simple-table>
|
||||
</v-tab-item>
|
||||
|
||||
<v-tab-item>
|
||||
<!-- Parsed view -->
|
||||
<v-simple-table dense class="parsed">
|
||||
<template v-slot:default>
|
||||
<thead>
|
||||
<tr>
|
||||
<th
|
||||
title="The line along which the vessel will nominally sail"
|
||||
>Sail line</th>
|
||||
<th
|
||||
title="Whether the line will be acquired in the incrementing or decrementing shot points direction"
|
||||
>Direction</th>
|
||||
<th
|
||||
title="Whether the line is planned to be acquired. Some lines may be in the preplot but not intended to be shot in a particular campaign"
|
||||
>Acquire?</th>
|
||||
<th
|
||||
title="The source lines that will be shot from this vessel line. Typically there is one source line per source array."
|
||||
>Source lines</th>
|
||||
<th
|
||||
title="Any general remarks concerning this sail line (supports Markdown)"
|
||||
>Remarks</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr v-for="(line, line_no) in saillines" :key="line_no">
|
||||
<td>{{ line_no }}</td>
|
||||
<td v-if="line.incr" title="Incrementing">▲</td>
|
||||
<td v-else title="Decrementing">▼</td>
|
||||
<td v-if="line.ntba" title="Not to be acquired" class="ko">✘</td>
|
||||
<td v-else title="Line acquisition planned" class="ok">✔</td>
|
||||
<td v-html="line.source_line.join('<br/>')"></td>
|
||||
<td v-if="line['meta.colour']"
|
||||
:style="`background-color:${line['meta.colour']};`"
|
||||
v-html="$options.filters.markdown(line.remarks)"></td>
|
||||
<td v-else
|
||||
v-html="$options.filters.markdown(line.remarks)"></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</template>
|
||||
</v-simple-table>
|
||||
</v-tab-item>
|
||||
</v-tabs-items>
|
||||
|
||||
</v-card-text>
|
||||
<v-card-actions>
|
||||
</v-card-actions>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
/*.v-data-table table tbody tr td*/
|
||||
.text th {
|
||||
border: 1px solid hsl(0, 0%, 33.3%);
|
||||
}
|
||||
|
||||
.text td {
|
||||
border-inline: 1px solid hsl(0, 0%, 33.3%);
|
||||
}
|
||||
|
||||
.parsed td {
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
.line_no {
|
||||
text-align: right;
|
||||
width: 4ex;
|
||||
border: none !important;
|
||||
}
|
||||
|
||||
.ok {
|
||||
color: green;
|
||||
}
|
||||
|
||||
.ko {
|
||||
color: red;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { parse } from 'csv-parse/sync'
|
||||
import { getHSLColourFor } from '@/lib/hsl'
|
||||
import truncateText from '@/lib/truncate-text'
|
||||
|
||||
export default {
|
||||
name: "DougalSaillinesStringDecoder",
|
||||
|
||||
components: {
|
||||
},
|
||||
|
||||
props: {
|
||||
text: String,
|
||||
//fields: Object,
|
||||
//delimiter: String,
|
||||
headerRow: { type: [ Boolean, Number ], default: false},
|
||||
numberedLines: [ Boolean, Number ],
|
||||
maxHeight: String,
|
||||
editableFieldList: { type: Boolean, default: true },
|
||||
readonly: Boolean,
|
||||
title: String,
|
||||
subtitle: String
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
delimiter: ",",
|
||||
showLineNumbers: null,
|
||||
text_: "",
|
||||
viewTab: null
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
cells () {
|
||||
return parse(this.text_, {delimiter: this.delimiter, trim: true});
|
||||
},
|
||||
|
||||
headers () {
|
||||
return this.cells[0]?.map(cell => ({
|
||||
text: cell,
|
||||
colour: this.getHSLColourFor(cell),
|
||||
backgroundColour: this.getHSLColourFor(cell, 0.2),
|
||||
})) ?? [];
|
||||
},
|
||||
|
||||
rows () {
|
||||
return [...this.cells].slice(1).map(r =>
|
||||
r.map( (c, ι) => ({
|
||||
text: truncateText(c),
|
||||
colour: this.headers[ι]?.backgroundColour
|
||||
})));
|
||||
},
|
||||
|
||||
/*
|
||||
* A saillines object looks like:
|
||||
*
|
||||
* {
|
||||
* [sail_line]: {
|
||||
* incr: true, // or false
|
||||
* ntba: true, // or false
|
||||
* remarks: "",
|
||||
* source_line: [ 1000, 1001, …],
|
||||
* "meta.colour": ""
|
||||
* },
|
||||
* …
|
||||
* }
|
||||
*/
|
||||
saillines () {
|
||||
// Return an array of the column numbers
|
||||
// corresponding to `key`.
|
||||
// This file accepts duplicate column numbers,
|
||||
// notably for `source_line`.
|
||||
const key_indices = (key) =>
|
||||
this.headers.reduce( (acc, cur, ι) => {
|
||||
if (cur.text == key) {
|
||||
acc.push(ι)
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
// Properties of the sailline object
|
||||
const keys = [ "incr", "ntba", "remarks", "source_line", "meta.colour" ];
|
||||
|
||||
function to_bool (v, missing=false) {
|
||||
return (v === undefined || v === null)
|
||||
? missing // Missing value meaning
|
||||
: /^t(rue)|^[1-9-]+$/i.test(String(v).trim())
|
||||
}
|
||||
|
||||
// To transform the input text into the required format for each field
|
||||
const transformer = (key) => {
|
||||
const transformers = {
|
||||
incr: (v) => to_bool(v, true),
|
||||
ntba: (v) => to_bool(v, false),
|
||||
remarks: (v) => (v === undefined || v === null) ? "" : String,
|
||||
source_line: Number,
|
||||
};
|
||||
return transformers[key] ?? String;
|
||||
};
|
||||
|
||||
// This is the saillines object
|
||||
const lines = {};
|
||||
|
||||
// The column numbers for each property
|
||||
const columns = keys.map( k => [ k, key_indices(k) ] );
|
||||
|
||||
// The column number for the sail_line property, which
|
||||
// we use as a key.
|
||||
const sail_line_idx = key_indices("sail_line")[0];
|
||||
|
||||
// Transform each line in the input file into a
|
||||
// sailline object (just for display purposes,
|
||||
// this is not exactly how the server will do it).
|
||||
for (const row of this.rows) {
|
||||
const sail_line = row[sail_line_idx]?.text;
|
||||
const values = columns.map(i => [
|
||||
i[0],
|
||||
i[0] == "source_line"
|
||||
? i[1].map(idx => transformer(i[0])(row[idx]?.text))
|
||||
: transformer(i[0])(row[i[1][0]]?.text)
|
||||
]);
|
||||
|
||||
lines[sail_line] = Object.fromEntries(values);
|
||||
}
|
||||
|
||||
return lines;
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
text () {
|
||||
if (this.text != this.text_) {
|
||||
this.reset();
|
||||
}
|
||||
},
|
||||
|
||||
numberedLines (cur, prev) {
|
||||
if (cur != prev) {
|
||||
this.showLineNumbers = typeof cur == "number" || cur;
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
|
||||
getHSLColourFor: getHSLColourFor.bind(this),
|
||||
|
||||
numberLine (number, line) {
|
||||
return `<span class="line-number">${number}</span>${line}`;
|
||||
},
|
||||
|
||||
reset () {
|
||||
this.text_ = this.text.replaceAll("\r", "");
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.reset();
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -1,269 +0,0 @@
|
||||
<template>
|
||||
<v-row dense no-gutters>
|
||||
|
||||
<v-col>
|
||||
<slot name="prepend"></slot>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
<v-chip v-if="value.item && !readonly"
|
||||
outlined
|
||||
label
|
||||
small
|
||||
:color="colour"
|
||||
:title="description"
|
||||
>{{name}}</v-chip>
|
||||
<v-select v-else-if="items.length && !readonly"
|
||||
label="Item"
|
||||
:items=items
|
||||
v-model="value.item"
|
||||
dense
|
||||
title="Select an item to use as a field"
|
||||
></v-select>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<v-select v-if="type == 'boolean'"
|
||||
label="Condition"
|
||||
:items="[true, false]"
|
||||
v-model="value.when"
|
||||
dense
|
||||
title="Use this configuration only when the value of this item matches the selected state. This allows the user to configure different values for true and false conditions."
|
||||
></v-select>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<v-text-field v-if="type == 'boolean' || type == 'text'"
|
||||
class="ml-3"
|
||||
dense
|
||||
label="Value"
|
||||
v-model="value.value"
|
||||
title="This literal text will be inserted at the designated position"
|
||||
></v-text-field>
|
||||
<v-menu v-else-if="type == 'number'"
|
||||
max-width="600"
|
||||
:close-on-content-click="false"
|
||||
offset-y
|
||||
>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-chip
|
||||
class="ml-3"
|
||||
small
|
||||
:light="$vuetify.theme.isDark"
|
||||
:dark="!$vuetify.theme.isDark"
|
||||
:color="value.scale_offset != null || value.scale_multiplier != null ? 'primary' : ''"
|
||||
:title="`Number scaling${ value.scale_offset != null ? ('\nOffset: ' + value.scale_offset) : '' }${ value.scale_multiplier != null ? ('\nMultiplier: ' + value.scale_multiplier) : ''}`"
|
||||
v-bind="attrs"
|
||||
v-on="on"
|
||||
>
|
||||
<v-icon small>mdi-ruler</v-icon>
|
||||
</v-chip>
|
||||
</template>
|
||||
|
||||
<v-card rounded outlined>
|
||||
<v-card-text>
|
||||
<v-row dense no-gutters>
|
||||
<v-text-field
|
||||
type="number"
|
||||
dense
|
||||
clearable
|
||||
label="Offset"
|
||||
title="Offset the value by this amount (after scaling)"
|
||||
v-model.number="value.scale_offset"
|
||||
></v-text-field>
|
||||
</v-row>
|
||||
<v-row dense no-gutters>
|
||||
<v-text-field
|
||||
type="number"
|
||||
dense
|
||||
clearable
|
||||
label="Scale"
|
||||
title="Mutiply the value by this amount (before scaling)"
|
||||
v-model.number="value.scale_multiplier"
|
||||
></v-text-field>
|
||||
</v-row>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</v-menu>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<v-text-field
|
||||
class="ml-3"
|
||||
dense
|
||||
label="From"
|
||||
type="number"
|
||||
min="0"
|
||||
v-model.number="value.offset"
|
||||
:readonly="readonly"
|
||||
></v-text-field>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<v-text-field
|
||||
class="ml-3"
|
||||
dense
|
||||
label="Length"
|
||||
type="number"
|
||||
min="0"
|
||||
v-model.number="value.length"
|
||||
:readonly="readonly"
|
||||
></v-text-field>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<v-menu v-if="value.length > 1"
|
||||
max-width="600"
|
||||
:close-on-content-click="false"
|
||||
offset-y
|
||||
:disabled="!(value.length>1)"
|
||||
>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-chip
|
||||
class="ml-3"
|
||||
small
|
||||
:light="$vuetify.theme.isDark"
|
||||
:dark="!$vuetify.theme.isDark"
|
||||
title="Text alignment"
|
||||
v-bind="attrs"
|
||||
v-on="on"
|
||||
:disabled="!(value.length>1)"
|
||||
>
|
||||
<v-icon small v-if="value.pad_side=='right'">mdi-format-align-left</v-icon>
|
||||
<v-icon small v-else-if="value.pad_side=='left'">mdi-format-align-right</v-icon>
|
||||
<v-icon small v-else>mdi-format-align-justify</v-icon>
|
||||
</v-chip>
|
||||
</template>
|
||||
|
||||
<v-card rounded outlined>
|
||||
<v-card-text>
|
||||
<v-row dense no-gutters>
|
||||
<v-select
|
||||
label="Alignment"
|
||||
clearable
|
||||
:items='[{text:"Left", value:"right"}, {text:"Right", value:"left"}]'
|
||||
v-model="value.pad_side"
|
||||
></v-select>
|
||||
</v-row>
|
||||
<v-row dense no-gutters v-if="value.pad_side">
|
||||
<v-text-field
|
||||
dense
|
||||
label="Pad character"
|
||||
title="Fill the width of the field on the opposite side by padding with this character"
|
||||
v-model="value.pad_string"
|
||||
></v-text-field>
|
||||
</v-row>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</v-menu>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<slot name="append"></slot>
|
||||
</v-col>
|
||||
|
||||
</v-row>
|
||||
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
.input {
|
||||
flex: 1 1 auto;
|
||||
line-height: 20px;
|
||||
padding: 8px 0 8px;
|
||||
min-height: 32px;
|
||||
max-height: 32px;
|
||||
max-width: 100%;
|
||||
min-width: 0px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.input >>> .chunk {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid;
|
||||
}
|
||||
|
||||
.input >>> .chunk-empty {
|
||||
padding-inline: 1px;
|
||||
}
|
||||
|
||||
.input >>> .chunk-overlap {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid grey;
|
||||
color: grey;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: "DougalFixedStringEncoderField",
|
||||
|
||||
components: {
|
||||
},
|
||||
|
||||
props: {
|
||||
value: Object,
|
||||
properties: Object,
|
||||
colour: String,
|
||||
readonly: Boolean,
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
}
|
||||
},
|
||||
|
||||
watch: {
|
||||
"value.value": function (value, old) {
|
||||
if (value != null && String(value).length > this.value.length) {
|
||||
this.value.length = String(value).length;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
field: {
|
||||
get () {
|
||||
return this.value;
|
||||
},
|
||||
set (v) {
|
||||
console.log("input", v);
|
||||
this.$emit("input", v);
|
||||
}
|
||||
},
|
||||
|
||||
item () {
|
||||
return this.properties?.[this.value?.item] ?? {};
|
||||
},
|
||||
|
||||
items () {
|
||||
return Object.entries(this.properties).map(i => ({text: i[1].summary ?? i[0], value: i[0]}))
|
||||
},
|
||||
|
||||
name () {
|
||||
// TODO Use properties[item].summary or similar
|
||||
return this.item?.summary ?? this.value.item ?? "???";
|
||||
},
|
||||
|
||||
type () {
|
||||
return this.item?.type ?? typeof this.value?.item ?? "undefined";
|
||||
},
|
||||
|
||||
description () {
|
||||
return this.item?.description;
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
reset () {
|
||||
}
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.reset();
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -1,351 +0,0 @@
|
||||
<template>
|
||||
<v-input
|
||||
class="v-text-field"
|
||||
:hint="hint"
|
||||
persistent-hint
|
||||
:value="text"
|
||||
>
|
||||
<label
|
||||
class="v-label"
|
||||
:class="[ $vuetify.theme.isDark && 'theme--dark', text && text.length && 'v-label--active' ]"
|
||||
style="left: 0px; right: auto; position: absolute;"
|
||||
>{{ label }}</label>
|
||||
<div class="input" slot="default"
|
||||
v-html="html"
|
||||
>
|
||||
</div>
|
||||
<template slot="append">
|
||||
<v-menu
|
||||
scrollable
|
||||
offset-y
|
||||
:close-on-content-click="false"
|
||||
>
|
||||
|
||||
<template v-slot:activator="{on, attrs}">
|
||||
<v-btn
|
||||
icon
|
||||
v-bind="attrs"
|
||||
v-on="on"
|
||||
>
|
||||
<v-icon title="Configure sample values">mdi-list-box-outline</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
|
||||
<v-card>
|
||||
<v-card-title>Sample values</v-card-title>
|
||||
<v-card-subtitle>Enter sample values to test your configuration</v-card-subtitle>
|
||||
|
||||
<v-divider></v-divider>
|
||||
|
||||
<v-card-text>
|
||||
|
||||
<v-container>
|
||||
<v-row v-for="(prop, key) in properties" :key="key">
|
||||
<template v-if="prop.type == 'boolean'">
|
||||
<v-col cols="6" align-self="center">
|
||||
<v-chip
|
||||
outlined
|
||||
label
|
||||
small
|
||||
:color="getHSLColourFor(key)"
|
||||
:title="prop.description"
|
||||
>{{prop.summary || key}}</v-chip>
|
||||
</v-col>
|
||||
<v-col cols="6" align-self="center">
|
||||
<v-simple-checkbox v-model="values[key]"></v-simple-checkbox>
|
||||
</v-col>
|
||||
</template>
|
||||
<template v-else-if="key != 'text'">
|
||||
<v-col cols="6" align-self="center">
|
||||
<v-chip
|
||||
outlined
|
||||
label
|
||||
small
|
||||
:color="getHSLColourFor(key)"
|
||||
:title="prop.description"
|
||||
>{{prop.summary || key}}</v-chip>
|
||||
</v-col>
|
||||
<v-col cols="6" align-self="center">
|
||||
<v-text-field v-if="prop.type == 'number'"
|
||||
:type="prop.type"
|
||||
:label="prop.summary || key"
|
||||
:hint="prop.description"
|
||||
v-model.number="values[key]"
|
||||
></v-text-field>
|
||||
<v-text-field v-else
|
||||
:type="prop.type"
|
||||
:label="prop.summary || key"
|
||||
:hint="prop.description"
|
||||
v-model="values[key]"
|
||||
></v-text-field>
|
||||
</v-col>
|
||||
</template>
|
||||
</v-row>
|
||||
</v-container>
|
||||
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
|
||||
</v-menu>
|
||||
</template>
|
||||
<v-icon slot="prepend">mdi-list</v-icon>
|
||||
</v-input>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
.input {
|
||||
flex: 1 1 auto;
|
||||
line-height: 20px;
|
||||
padding: 8px 0 8px;
|
||||
min-height: 32px;
|
||||
max-height: 32px;
|
||||
max-width: 100%;
|
||||
min-width: 0px;
|
||||
width: 100%;
|
||||
white-space-collapse: preserve;
|
||||
}
|
||||
|
||||
.multiline {
|
||||
font-family: mono;
|
||||
white-space: pre;
|
||||
overflow-x: auto;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.multiline >>> .line-number {
|
||||
display: inline-block;
|
||||
font-size: 75%;
|
||||
width: 5ex;
|
||||
margin-inline-end: 1ex;
|
||||
text-align: right;
|
||||
border: none;
|
||||
position: relative;
|
||||
top: -1px;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-field {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-fixed {
|
||||
padding-inline: 1px;
|
||||
border: 1px dashed;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-empty {
|
||||
padding-inline: 1px;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-overlap {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid grey;
|
||||
color: grey;
|
||||
}
|
||||
|
||||
.input >>> .chunk-mismatch {
|
||||
padding-inline: 1px;
|
||||
border: 2px solid red !important;
|
||||
}
|
||||
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { getHSLColourFor } from '@/lib/hsl'
|
||||
|
||||
export default {
|
||||
name: "DougalFixedStringEncoderSample",
|
||||
|
||||
components: {
|
||||
},
|
||||
|
||||
mixins: [
|
||||
{
|
||||
methods: {
|
||||
getHSLColourFor
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
props: {
|
||||
properties: { type: Object, default: () => ({}) },
|
||||
fields: { type: Array, default: () => [] },
|
||||
values: { type: Object, default: () => ({}) },
|
||||
readonly: Boolean,
|
||||
label: String,
|
||||
hint: String,
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
chunks () {
|
||||
const properties = this.properties;
|
||||
const fields = this.fields;
|
||||
const values = this.values;
|
||||
const str = "";
|
||||
const chunks = [];
|
||||
|
||||
for (const field of fields) {
|
||||
const value = this.fieldValue(properties, field, values);
|
||||
|
||||
if (value != null) {
|
||||
const chunk = {
|
||||
start: field.offset,
|
||||
end: field.offset + field.length - 1,
|
||||
colour: this.getHSLColourFor(field.item),
|
||||
class: field.item == "text" ? "fixed" : "field",
|
||||
text: value
|
||||
}
|
||||
chunks.push(chunk);
|
||||
}
|
||||
}
|
||||
|
||||
return chunks;
|
||||
},
|
||||
|
||||
text () {
|
||||
return this.sample(this.properties, this.fields, this.values);
|
||||
},
|
||||
|
||||
html () {
|
||||
return this.renderTextLine(this.text);
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
fieldValue (properties, field, values) {
|
||||
let value;
|
||||
|
||||
if (field.item == "text") {
|
||||
value = field.value;
|
||||
} else if (properties[field.item]?.type == "boolean") {
|
||||
if (values[field.item] === field.when) {
|
||||
value = field.value;
|
||||
}
|
||||
} else {
|
||||
value = values[field.item];
|
||||
}
|
||||
|
||||
if (value != null) {
|
||||
|
||||
if (properties[field.item]?.type == "number") {
|
||||
if (field.scale_multiplier != null) {
|
||||
value *= field.scale_multiplier;
|
||||
}
|
||||
if (field.scale_offset != null) {
|
||||
value += field.scale_offset;
|
||||
}
|
||||
|
||||
if (field.format == "integer") {
|
||||
value = Math.round(value);
|
||||
}
|
||||
}
|
||||
|
||||
value = String(value);
|
||||
if (field.pad_side == "left") {
|
||||
value = value.padStart(field.length, field.pad_string ?? " ");
|
||||
} else if (field.pad_side == "right") {
|
||||
value = value.padEnd(field.length, field.pad_string ?? " ");
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
},
|
||||
|
||||
sample (properties, fields, values, str = "") {
|
||||
|
||||
const length = fields.reduce( (acc, cur) => (cur.offset + cur.length) > acc ? (cur.offset + cur.length) : acc, str.length )
|
||||
|
||||
str = str.padEnd(length);
|
||||
|
||||
for (const field of fields) {
|
||||
//console.log("FIELD", field);
|
||||
const value = this.fieldValue(properties, field, values);
|
||||
if (value != null) {
|
||||
str = str.slice(0, field.offset) + value + str.slice(field.offset + field.length);
|
||||
}
|
||||
}
|
||||
|
||||
return str;
|
||||
},
|
||||
|
||||
/** Return a `<span>` opening tag.
|
||||
*/
|
||||
style (name, colour) {
|
||||
return colour
|
||||
? `<span class="${name}" style="color:${colour};border-color:${colour}">`
|
||||
: `<span class="${name}">`;
|
||||
},
|
||||
|
||||
/** Return an array of the intervals that intersect `pos`.
|
||||
* May be empty.
|
||||
*/
|
||||
chunksFor (pos) {
|
||||
return this.chunks.filter( chunk =>
|
||||
pos >= chunk.start &&
|
||||
pos <= chunk.end
|
||||
)
|
||||
},
|
||||
|
||||
/*
|
||||
* Algorithm:
|
||||
*
|
||||
* Go through every character of one line of text and determine in which
|
||||
* part(s) it falls in, if any. Collect adjacent same parts into <span/>
|
||||
* elements.
|
||||
*/
|
||||
renderTextLine (text) {
|
||||
const parts = [];
|
||||
|
||||
let prevStyle;
|
||||
|
||||
for (const pos in text) {
|
||||
const chunks = this.chunksFor(pos);
|
||||
const isEmpty = chunks.length == 0;
|
||||
const isOverlap = chunks.length > 1;
|
||||
const isMismatch = chunks[0]?.text &&
|
||||
(text.substring(chunks[0].start, chunks[0].end+1) != chunks[0].text);
|
||||
|
||||
const style = isEmpty
|
||||
? this.style("chunk-empty")
|
||||
: isMismatch
|
||||
? this.style("chunk-mismatch", chunks[0].colour)
|
||||
: isOverlap
|
||||
? this.style("chunk-overlap")
|
||||
: this.style("chunk-"+chunks[0].class, chunks[0].colour);
|
||||
|
||||
if (style != prevStyle) {
|
||||
if (prevStyle) {
|
||||
parts.push("</span>");
|
||||
}
|
||||
parts.push(style);
|
||||
}
|
||||
parts.push(text[pos]);
|
||||
prevStyle = style;
|
||||
}
|
||||
|
||||
if (parts.length) {
|
||||
parts.push("</span>");
|
||||
}
|
||||
|
||||
return parts.join("");
|
||||
},
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -1,307 +0,0 @@
|
||||
<template>
|
||||
<v-card flat elevation="0">
|
||||
<v-card-title v-if="title">{{ title }}</v-card-title>
|
||||
<v-card-subtitle v-if="subtitle">{{ subtitle }}</v-card-subtitle>
|
||||
<v-card-text>
|
||||
<v-form>
|
||||
|
||||
<!-- Sample text -->
|
||||
|
||||
<dougal-fixed-string-encoder-sample
|
||||
:label="label"
|
||||
:hint="hint"
|
||||
:properties="properties"
|
||||
:fields="fields"
|
||||
:values.sync="values"
|
||||
></dougal-fixed-string-encoder-sample>
|
||||
|
||||
<!-- Fields -->
|
||||
|
||||
<v-container>
|
||||
|
||||
<v-row no-gutters class="mb-2">
|
||||
<h4>Fields</h4>
|
||||
</v-row>
|
||||
|
||||
<dougal-fixed-string-encoder-field v-for="(field, key) in fields" :key="key"
|
||||
v-model="fields[key]"
|
||||
:properties="properties"
|
||||
:colour="getHSLColourFor(field.item)"
|
||||
:readonly="readonly"
|
||||
>
|
||||
<template v-slot:append v-if="editableFieldList && !readonly">
|
||||
<v-btn
|
||||
class="ml-3"
|
||||
fab
|
||||
text
|
||||
small
|
||||
title="Remove this field"
|
||||
>
|
||||
<v-icon
|
||||
color="error"
|
||||
@click="removeField(key)"
|
||||
>mdi-minus</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
</dougal-fixed-string-encoder-field>
|
||||
|
||||
<v-row no-gutters class="mb-2" v-if="editableFieldList && !readonly">
|
||||
<h4>Add new field</h4>
|
||||
</v-row>
|
||||
|
||||
<dougal-fixed-string-encoder-field v-if="editableFieldList && !readonly"
|
||||
v-model="newField"
|
||||
:properties="properties"
|
||||
:colour="getHSLColourFor(newField.item)"
|
||||
>
|
||||
<template v-slot:prepend>
|
||||
<v-btn v-if="isFieldDirty(newField)"
|
||||
top
|
||||
text
|
||||
small
|
||||
title="Reset"
|
||||
>
|
||||
<v-icon
|
||||
color="warning"
|
||||
@click="resetField(newField)"
|
||||
>mdi-backspace-reverse-outline</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
<template v-slot:append>
|
||||
<v-btn
|
||||
class="ml-3"
|
||||
fab
|
||||
text
|
||||
small
|
||||
title="Add field"
|
||||
:disabled="isFieldValid(newField) !== true"
|
||||
>
|
||||
<v-icon
|
||||
color="primary"
|
||||
@click="addField(newField)"
|
||||
>mdi-plus</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
</dougal-fixed-string-encoder-field>
|
||||
|
||||
</v-container>
|
||||
|
||||
|
||||
|
||||
</v-form>
|
||||
</v-card-text>
|
||||
<v-card-actions>
|
||||
</v-card-actions>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
.input {
|
||||
flex: 1 1 auto;
|
||||
line-height: 20px;
|
||||
padding: 8px 0 8px;
|
||||
min-height: 32px;
|
||||
max-height: 32px;
|
||||
max-width: 100%;
|
||||
min-width: 0px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-field {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-fixed {
|
||||
padding-inline: 1px;
|
||||
border: 1px dashed;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-empty {
|
||||
padding-inline: 1px;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-overlap {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid grey;
|
||||
color: grey;
|
||||
}
|
||||
|
||||
.input >>> .chunk-mismatch {
|
||||
padding-inline: 1px;
|
||||
border: 2px solid red !important;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { getHSLColourFor } from '@/lib/hsl'
|
||||
import DougalFixedStringEncoderField from './fixed-string-encoder-field'
|
||||
import DougalFixedStringEncoderSample from './fixed-string-encoder-sample'
|
||||
|
||||
export default {
|
||||
name: "DougalFixedStringEncoder",
|
||||
|
||||
components: {
|
||||
DougalFixedStringEncoderField,
|
||||
DougalFixedStringEncoderSample
|
||||
},
|
||||
|
||||
mixins: [
|
||||
{
|
||||
methods: {
|
||||
getHSLColourFor
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
props: {
|
||||
properties: { type: Object },
|
||||
fields: { type: Array },
|
||||
values: { type: Object },
|
||||
editableFieldList: { type: Boolean, default: true },
|
||||
readonly: Boolean,
|
||||
title: String,
|
||||
subtitle: String,
|
||||
label: String,
|
||||
hint: String,
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
//< The reason for not using this.text directly is that at some point
|
||||
//< we might extend this component to allow editing the sample text.
|
||||
text_: "",
|
||||
//< The value of a fixed string that should be always present at a specific position
|
||||
fixedName: "",
|
||||
fixedOffset: 0,
|
||||
//< The name of a new field to add.
|
||||
fieldName: "",
|
||||
newField: {
|
||||
item: null,
|
||||
when: null,
|
||||
offset: null,
|
||||
length: null,
|
||||
value: null,
|
||||
pad_side: null,
|
||||
pad_string: null
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
chunks () {
|
||||
const properties = this.properties;
|
||||
const fields = this.fields;
|
||||
const values = this.values;
|
||||
const str = "";
|
||||
const chunks = [];
|
||||
|
||||
for (const field of fields) {
|
||||
|
||||
//console.log("FIELD", structuredClone(field));
|
||||
//console.log("VALUES DATA", values[field.item]);
|
||||
let value;
|
||||
|
||||
if (field.item == "text") {
|
||||
value = field.value;
|
||||
} else if (properties[field.item]?.type == "boolean") {
|
||||
if (values[field.item] === field.when) {
|
||||
value = field.value;
|
||||
}
|
||||
} else {
|
||||
value = values[field.item];
|
||||
}
|
||||
|
||||
if (value != null) {
|
||||
|
||||
value = String(value);
|
||||
if (field.pad_side == "left") {
|
||||
value = value.padStart(field.length, field.pad_string);
|
||||
} else {
|
||||
value = value.padEnd(field.length, field.pad_string);
|
||||
}
|
||||
|
||||
const chunk = {
|
||||
start: field.offset,
|
||||
end: field.offset + field.length - 1,
|
||||
colour: this.getHSLColourFor(field.item),
|
||||
class: field.item == "text" ? "fixed" : "field",
|
||||
text: value
|
||||
}
|
||||
|
||||
//console.log("CHUNK", chunk);
|
||||
chunks.push(chunk);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return chunks;
|
||||
},
|
||||
|
||||
html () {
|
||||
return this.renderTextLine(this.sample(this.properties, this.fields, this.values));
|
||||
//return this.sample(this.properties, this.fields, this.values);
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
isFieldDirty (field) {
|
||||
return Object.entries(field).reduce( (acc, cur) => cur[1] === null ? acc : true, false );
|
||||
},
|
||||
|
||||
isFieldValid (field) {
|
||||
if (!field.item) return "Missing item";
|
||||
if (typeof field.offset !== "number" || field.offset < 0) return "Missing offset";
|
||||
if (typeof field.length !== "number" || field.length < 1) return "Missing length";
|
||||
if (!this.properties[field.item]) return "Unrecognised property";
|
||||
if (this.properties[field.item].type == "text" && !field.value?.length) return "Missing value";
|
||||
if (this.properties[field.item].type == "boolean" && !field.value?.length) return "Missing value (boolean)";
|
||||
if(!!field.pad_side && !field.pad_string) return "Missing pad string";
|
||||
|
||||
return true;
|
||||
},
|
||||
|
||||
resetField (field) {
|
||||
field.item = null;
|
||||
field.when = null;
|
||||
field.offset = null;
|
||||
field.length = null;
|
||||
field.value = null;
|
||||
field.pad_side = null;
|
||||
field.pad_string = null;
|
||||
|
||||
return field;
|
||||
},
|
||||
|
||||
addField (field) {
|
||||
if (this.isFieldValid(field)) {
|
||||
const fields = structuredClone(this.fields);
|
||||
fields.push({...field});
|
||||
this.resetField(field);
|
||||
console.log("update:fields", fields);
|
||||
this.$emit("update:fields", fields);
|
||||
}
|
||||
},
|
||||
|
||||
removeField (key) {
|
||||
console.log("REMOVE", "update:fields", key, this.fields);
|
||||
const fields = structuredClone(this.fields);
|
||||
fields.splice(key, 1);
|
||||
this.$emit("update:fields", fields);
|
||||
},
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -44,7 +44,7 @@
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-text-field
|
||||
v-model="tsDate"
|
||||
:disabled="!!(entrySequence || entryPoint)"
|
||||
:disabled="!!(sequence || point || entrySequence || entryPoint)"
|
||||
label="Date"
|
||||
suffix="UTC"
|
||||
prepend-icon="mdi-calendar"
|
||||
@@ -64,7 +64,7 @@
|
||||
<v-col>
|
||||
<v-text-field
|
||||
v-model="tsTime"
|
||||
:disabled="!!(entrySequence || entryPoint)"
|
||||
:disabled="!!(sequence || point || entrySequence || entryPoint)"
|
||||
label="Time"
|
||||
suffix="UTC"
|
||||
prepend-icon="mdi-clock-outline"
|
||||
@@ -123,11 +123,28 @@
|
||||
|
||||
<v-row dense>
|
||||
<v-col cols="12">
|
||||
<dougal-event-select
|
||||
v-bind.sync="entryRemarks"
|
||||
:preset-remarks="presetRemarks"
|
||||
@update:labels="(v) => this.entryLabels = v"
|
||||
></dougal-event-select>
|
||||
<v-combobox
|
||||
ref="remarks"
|
||||
v-model="entryRemarks"
|
||||
:disabled="loading"
|
||||
:search-input.sync="entryRemarksInput"
|
||||
:items="remarksAvailable"
|
||||
:filter="searchRemarks"
|
||||
item-text="text"
|
||||
return-object
|
||||
label="Remarks"
|
||||
prepend-icon="mdi-text-box-outline"
|
||||
append-outer-icon="mdi-magnify"
|
||||
@click:append-outer="(e) => remarksMenu = e"
|
||||
></v-combobox>
|
||||
|
||||
<dougal-context-menu
|
||||
:value="remarksMenu"
|
||||
@input="handleRemarksMenu"
|
||||
:items="presetRemarks"
|
||||
absolute
|
||||
></dougal-context-menu>
|
||||
|
||||
</v-col>
|
||||
</v-row>
|
||||
|
||||
@@ -238,15 +255,6 @@
|
||||
>
|
||||
Cancel
|
||||
</v-btn>
|
||||
<v-btn v-if="!id && (entrySequence || entryPoint)"
|
||||
color="info"
|
||||
text
|
||||
title="Enter an event by time"
|
||||
@click="timed"
|
||||
>
|
||||
<v-icon left small>mdi-clock-outline</v-icon>
|
||||
Timed
|
||||
</v-btn>
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn
|
||||
:disabled="!canSave"
|
||||
@@ -272,7 +280,6 @@
|
||||
<script>
|
||||
import { mapActions } from 'vuex';
|
||||
import DougalContextMenu from '@/components/context-menu';
|
||||
import DougalEventSelect from '@/components/event-select';
|
||||
|
||||
function stringSort (a, b) {
|
||||
return a == b
|
||||
@@ -291,7 +298,6 @@ function flattenRemarks(items, keywords=[], labels=[]) {
|
||||
if (!item.items) {
|
||||
result.push({
|
||||
text: item.text,
|
||||
properties: item.properties,
|
||||
labels: labels.concat(item.labels??[]),
|
||||
keywords
|
||||
})
|
||||
@@ -326,8 +332,7 @@ export default {
|
||||
name: 'DougalEventEdit',
|
||||
|
||||
components: {
|
||||
DougalContextMenu,
|
||||
DougalEventSelect
|
||||
DougalContextMenu
|
||||
},
|
||||
|
||||
props: {
|
||||
@@ -339,7 +344,6 @@ export default {
|
||||
sequence: { type: Number },
|
||||
point: { type: Number },
|
||||
remarks: { type: String },
|
||||
meta: { type: Object },
|
||||
labels: { type: Array, default: () => [] },
|
||||
latitude: { type: Number },
|
||||
longitude: { type: Number },
|
||||
@@ -357,11 +361,18 @@ export default {
|
||||
entrySequence: null,
|
||||
entryPoint: null,
|
||||
entryRemarks: null,
|
||||
entryRemarksInput: null,
|
||||
entryLatitude: null,
|
||||
entryLongitude: null
|
||||
}),
|
||||
|
||||
computed: {
|
||||
remarksAvailable () {
|
||||
return this.entryRemarksInput == this.entryRemarks?.text ||
|
||||
this.entryRemarksInput == this.entryRemarks
|
||||
? []
|
||||
: flattenRemarks(this.presetRemarks);
|
||||
},
|
||||
|
||||
allSelected () {
|
||||
return this.entryLabels.length === this.items.length
|
||||
@@ -373,6 +384,11 @@ export default {
|
||||
return true;
|
||||
}
|
||||
|
||||
// The user is editing the remarks
|
||||
if (this.entryRemarksText != this.entryRemarksInput) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Selected label set distinct from input labels
|
||||
if (distinctSets(this.selectedLabels, this.entryLabels, (i) => i.text)) {
|
||||
return true;
|
||||
@@ -476,8 +492,11 @@ export default {
|
||||
|
||||
this.entrySequence = this.sequence;
|
||||
this.entryPoint = this.point;
|
||||
this.entryRemarks = this.remarks;
|
||||
this.entryLabels = [...(this.labels??[])];
|
||||
this.makeEntryRemarks();
|
||||
|
||||
// Focus remarks field
|
||||
this.$nextTick(() => this.$refs.remarks.focus());
|
||||
}
|
||||
},
|
||||
|
||||
@@ -548,13 +567,22 @@ export default {
|
||||
};
|
||||
},
|
||||
|
||||
makeEntryRemarks () {
|
||||
this.entryRemarks = {
|
||||
template: null,
|
||||
schema: {},
|
||||
values: [],
|
||||
...this.meta?.structured_values,
|
||||
text: this.remarks
|
||||
searchRemarks (item, queryText, itemText) {
|
||||
const needle = queryText.toLowerCase();
|
||||
const text = item.text.toLowerCase();
|
||||
const keywords = item.keywords.map(i => i.toLowerCase());
|
||||
const labels = item.labels.map(i => i.toLowerCase());
|
||||
return text.includes(needle) ||
|
||||
keywords.some(i => i.includes(needle)) ||
|
||||
labels.some(i => i.includes(needle));
|
||||
},
|
||||
|
||||
handleRemarksMenu (event) {
|
||||
if (typeof event == 'boolean') {
|
||||
this.remarksMenu = event;
|
||||
} else {
|
||||
this.entryRemarks = event;
|
||||
this.remarksMenu = false;
|
||||
}
|
||||
},
|
||||
|
||||
@@ -603,14 +631,6 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
timed () {
|
||||
const tstamp = (new Date()).toISOString();
|
||||
this.entrySequence = null;
|
||||
this.entryPoint = null;
|
||||
this.tsDate = tstamp.substr(0, 10);
|
||||
this.tsTime = tstamp.substr(11, 8);
|
||||
},
|
||||
|
||||
close () {
|
||||
this.entryLabels = this.selectedLabels.map(this.labelToItem)
|
||||
this.$emit("input", false);
|
||||
@@ -619,24 +639,14 @@ export default {
|
||||
save () {
|
||||
// In case the focus goes directly from the remarks field
|
||||
// to the Save button.
|
||||
|
||||
let meta;
|
||||
|
||||
if (this.entryRemarks.values?.length) {
|
||||
meta = {
|
||||
structured_values: {
|
||||
template: this.entryRemarks.template,
|
||||
schema: this.entryRemarks.schema,
|
||||
values: this.entryRemarks.values
|
||||
}
|
||||
};
|
||||
if (this.entryRemarksInput != this.entryRemarksText) {
|
||||
this.entryRemarks = this.entryRemarksInput;
|
||||
}
|
||||
|
||||
const data = {
|
||||
id: this.id,
|
||||
remarks: this.entryRemarksText,
|
||||
labels: this.entryLabels,
|
||||
meta
|
||||
labels: this.entryLabels
|
||||
};
|
||||
|
||||
/* NOTE This is the purist way.
|
||||
|
||||
@@ -1,142 +0,0 @@
|
||||
<template>
|
||||
<v-card flat>
|
||||
<v-card-subtitle v-text="text">
|
||||
</v-card-subtitle>
|
||||
<v-card-text style="max-height:350px;overflow:scroll;">
|
||||
<v-form>
|
||||
<template v-for="key in fieldKeys">
|
||||
<template v-if="schema[key].enum">
|
||||
<v-select v-if="schema[key].type == 'number'" :key="key"
|
||||
v-model.number="fieldValues[key]"
|
||||
:items="schema[key].enum"
|
||||
:label="schema[key].title"
|
||||
:hint="schema[key].description"
|
||||
@input="updateFieldValue(key, Number($event))"
|
||||
></v-select>
|
||||
<v-select v-else :key="key"
|
||||
v-model="fieldValues[key]"
|
||||
:items="schema[key].enum"
|
||||
:label="schema[key].title"
|
||||
:hint="schema[key].description"
|
||||
@input="updateFieldValue(key, $event)"
|
||||
></v-select>
|
||||
</template>
|
||||
<template v-else>
|
||||
<v-text-field v-if="schema[key].type == 'number'" :key="key"
|
||||
v-model.number="fieldValues[key]"
|
||||
type="number"
|
||||
:min="schema[key].minimum"
|
||||
:max="schema[key].maximum"
|
||||
:step="schema[key].multiplier"
|
||||
:label="schema[key].title"
|
||||
:hint="schema[key].description"
|
||||
@input="updateFieldValue(key, Number($event))"
|
||||
>
|
||||
</v-text-field>
|
||||
<v-text-field v-else-if="schema[key].type == 'string'" :key="key"
|
||||
v-model="fieldValues[key]"
|
||||
:label="schema[key].title"
|
||||
:hint="schema[key].description"
|
||||
@input="updateFieldValue(key, $event)"
|
||||
>
|
||||
</v-text-field>
|
||||
<v-checkbox v-else-if="schema[key].type == 'boolean'" :key="key"
|
||||
v-model="fieldValues[key]"
|
||||
:label="schema[key].title"
|
||||
:hint="schema[key].description"
|
||||
@change="updateFieldValue(key, $event)"
|
||||
>
|
||||
</v-checkbox>
|
||||
<v-text-field v-else :key="key"
|
||||
v-model="fieldValues[key]"
|
||||
:label="schema[key].title"
|
||||
:hint="schema[key].description"
|
||||
@input="updateFieldValue(key, $event)"
|
||||
>
|
||||
</v-text-field>
|
||||
</template>
|
||||
</template>
|
||||
</v-form>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
|
||||
export default {
|
||||
name: "DougalEventPropertiesEdit",
|
||||
|
||||
components: {
|
||||
},
|
||||
|
||||
props: {
|
||||
value: String,
|
||||
template: String,
|
||||
schema: Object,
|
||||
values: Array
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
fieldKeys () {
|
||||
return Object.entries(this.schema).sort((a, b) => a[1].title > b[1].title ? 1 : -1).map(i => i[0]);
|
||||
},
|
||||
|
||||
fieldValues () {
|
||||
const keys = Object.keys(this.schema ?? this.values);
|
||||
return Object.fromEntries(
|
||||
keys.map( (k, idx) =>
|
||||
[ k, this.values?.[idx] ?? this.schema[k].default ]));
|
||||
},
|
||||
|
||||
/*
|
||||
fields () {
|
||||
// TODO Remove this and rename fields → schema
|
||||
return this.schema;
|
||||
},
|
||||
*/
|
||||
|
||||
text () {
|
||||
if (this.template) {
|
||||
const rx = /{{([a-z_][a-z0-9_]*)}}/ig;
|
||||
return this.template.replace(rx, (match, p1) => this.fieldValues[p1] ?? "(n/a)");
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
values () {
|
||||
this.$emit("input", this.text);
|
||||
},
|
||||
|
||||
template () {
|
||||
this.$emit("input", this.text);
|
||||
},
|
||||
|
||||
schema () {
|
||||
this.$emit("input", this.text);
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
methods: {
|
||||
updateFieldValue(key, ev) {
|
||||
const values = {...this.fieldValues};
|
||||
values[key] = ev;
|
||||
this.$emit("update:values", Object.values(values));
|
||||
}
|
||||
},
|
||||
|
||||
mount () {
|
||||
}
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -1,163 +0,0 @@
|
||||
<template>
|
||||
<div>
|
||||
<v-combobox
|
||||
ref="remarks"
|
||||
:value="text"
|
||||
@input="handleComboBox"
|
||||
:search-input.sync="entryRemarksInput"
|
||||
:items="remarksAvailable"
|
||||
:filter="searchRemarks"
|
||||
item-text="text"
|
||||
return-object
|
||||
label="Remarks"
|
||||
hint="Placeholders: @DMS@, @DEG@, @EN@, @WD@, @BSP@, @CMG@, …"
|
||||
prepend-icon="mdi-text-box-outline"
|
||||
append-outer-icon="mdi-magnify"
|
||||
@click:append-outer="(e) => remarksMenu = e"
|
||||
></v-combobox>
|
||||
|
||||
<dougal-context-menu
|
||||
:value="remarksMenu"
|
||||
@input="handleRemarksMenu"
|
||||
:items="presetRemarks"
|
||||
absolute
|
||||
></dougal-context-menu>
|
||||
|
||||
<v-expansion-panels v-if="haveProperties"
|
||||
class="px-8"
|
||||
:value="0"
|
||||
>
|
||||
<v-expansion-panel>
|
||||
<v-expansion-panel-header>Properties</v-expansion-panel-header>
|
||||
<v-expansion-panel-content>
|
||||
<dougal-event-properties-edit
|
||||
:value="text"
|
||||
@input="$emit('update:text', $event)"
|
||||
:template="template"
|
||||
:schema="schema"
|
||||
:values="values"
|
||||
@update:values="$emit('update:values', $event)"
|
||||
>
|
||||
</dougal-event-properties-edit>
|
||||
</v-expansion-panel-content>
|
||||
</v-expansion-panel>
|
||||
</v-expansion-panels>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import DougalContextMenu from '@/components/context-menu';
|
||||
import DougalEventPropertiesEdit from '@/components/event-properties';
|
||||
|
||||
export default {
|
||||
name: "DougalEventSelect",
|
||||
|
||||
components: {
|
||||
DougalContextMenu,
|
||||
DougalEventPropertiesEdit
|
||||
},
|
||||
|
||||
props: {
|
||||
text: String,
|
||||
template: String,
|
||||
schema: Object,
|
||||
values: Array,
|
||||
presetRemarks: Array
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
entryRemarksInput: null,
|
||||
remarksMenu: false,
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
remarksAvailable () {
|
||||
return this.entryRemarksInput == this.text
|
||||
? []
|
||||
: this.flattenRemarks(this.presetRemarks);
|
||||
},
|
||||
|
||||
haveProperties () {
|
||||
for (const key in this.schema) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
|
||||
flattenRemarks (items, keywords=[], labels=[]) {
|
||||
const result = [];
|
||||
|
||||
if (items) {
|
||||
for (const item of items) {
|
||||
if (!item.items) {
|
||||
result.push({
|
||||
text: item.text,
|
||||
properties: item.properties,
|
||||
labels: labels.concat(item.labels??[]),
|
||||
keywords
|
||||
})
|
||||
} else {
|
||||
const k = [...keywords, item.text];
|
||||
const l = [...labels, ...(item.labels??[])];
|
||||
result.push(...this.flattenRemarks(item.items, k, l))
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
},
|
||||
|
||||
searchRemarks (item, queryText, itemText) {
|
||||
const needle = queryText.toLowerCase();
|
||||
const text = item.text.toLowerCase();
|
||||
const keywords = item.keywords.map(i => i.toLowerCase());
|
||||
const labels = item.labels.map(i => i.toLowerCase());
|
||||
return text.includes(needle) ||
|
||||
keywords.some(i => i.includes(needle)) ||
|
||||
labels.some(i => i.includes(needle));
|
||||
},
|
||||
|
||||
handleComboBox (event) {
|
||||
if (typeof event == "object") {
|
||||
this.$emit("update:text", event.text);
|
||||
this.$emit("update:template", event.template ?? event.text);
|
||||
this.$emit("update:schema", event.properties);
|
||||
this.$emit("update:labels", event.labels);
|
||||
} else {
|
||||
this.$emit("update:text", event);
|
||||
this.$emit("update:template", null);
|
||||
this.$emit("update:properties", null);
|
||||
this.$emit("update:labels", []);
|
||||
}
|
||||
},
|
||||
|
||||
handleRemarksMenu (event) {
|
||||
if (typeof event == 'boolean') {
|
||||
this.remarksMenu = event;
|
||||
} else {
|
||||
this.$emit("update:text", event.text);
|
||||
this.$emit("update:template", event.template ?? event.text);
|
||||
this.$emit("update:schema", event.properties);
|
||||
this.$emit("update:labels", event.labels);
|
||||
this.remarksMenu = false;
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
mount () {
|
||||
// Focus remarks field
|
||||
this.$nextTick(() => this.$refs.remarks.focus());
|
||||
}
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -1,109 +0,0 @@
|
||||
<template>
|
||||
<v-dialog
|
||||
max-width="600"
|
||||
:close-on-content-click="false"
|
||||
offset-y
|
||||
>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-chip
|
||||
class="ml-3"
|
||||
small
|
||||
:light="$vuetify.theme.isDark"
|
||||
:dark="!$vuetify.theme.isDark"
|
||||
:title="getFriendlyTypeName(value.type)"
|
||||
:color="getHSLColourFor(value.type||'str', .4, .5)"
|
||||
v-bind="attrs"
|
||||
v-on="on"
|
||||
>
|
||||
<v-icon small>{{ getTypeIcon(value.type||'str') }}</v-icon>
|
||||
<v-icon small v-if="value.enum"
|
||||
:title="'Values: '+Object.entries(value.enum).map(i => `${i[0]}=${i[1]}`).join('; ')+'\nDefault: '+value.default"
|
||||
>mdi-format-list-group</v-icon>
|
||||
</v-chip>
|
||||
</template>
|
||||
|
||||
<dougal-field-content
|
||||
:readonly="readonly"
|
||||
:value="value"
|
||||
@input="$emit('input', $event)"
|
||||
></dougal-field-content>
|
||||
|
||||
</v-dialog>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import DougalFieldContent from './field-content'
|
||||
|
||||
export default {
|
||||
|
||||
name: "DougalFieldContentDialog",
|
||||
|
||||
components: {
|
||||
DougalFieldContent
|
||||
},
|
||||
|
||||
props: {
|
||||
value: Object,
|
||||
readonly: Boolean
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
getFriendlyTypeName (type) {
|
||||
switch (type) {
|
||||
case "str":
|
||||
return "Text";
|
||||
case "int":
|
||||
return "Integer";
|
||||
case "float":
|
||||
return "Float";
|
||||
case "bool":
|
||||
return "Boolean";
|
||||
default:
|
||||
return type ?? "Text (default)";
|
||||
}
|
||||
},
|
||||
|
||||
getTypeIcon (type) {
|
||||
switch (type) {
|
||||
case "str":
|
||||
return "mdi-format-text-variant";
|
||||
case "int":
|
||||
return "mdi-numeric";
|
||||
case "float":
|
||||
return "mdi-decimal";
|
||||
case "bool":
|
||||
return "mdi-format-list-checks";
|
||||
default:
|
||||
return "mdi-format-text";
|
||||
}
|
||||
},
|
||||
|
||||
getHSLColourFor (str, saturation = 1, lightness = 0.25, offset = 0) {
|
||||
|
||||
function getHash (v) {
|
||||
return [...v].reduce( (acc, cur) => String(cur).charCodeAt(0) + ((acc << 5) - acc), 0 );
|
||||
}
|
||||
|
||||
const h = (getHash(str) + offset) % 360;
|
||||
const s = saturation * 100;
|
||||
const l = this.$vuetify.theme.isDark
|
||||
? (1-lightness) * 100
|
||||
: lightness * 100;
|
||||
|
||||
return `hsl(${h},${s}%,${l}%)`;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -1,242 +0,0 @@
|
||||
<template>
|
||||
<v-card flat elevation="0">
|
||||
<v-card-subtitle>Item options</v-card-subtitle>
|
||||
<v-card-text>
|
||||
<v-select
|
||||
label="Value type"
|
||||
v-model="type"
|
||||
:items="types"
|
||||
value="int"
|
||||
:readonly="readonly"
|
||||
></v-select>
|
||||
|
||||
<v-checkbox
|
||||
label="Enumerated values"
|
||||
v-model="enumerated"
|
||||
:readonly="readonly"
|
||||
></v-checkbox>
|
||||
</v-card-text>
|
||||
|
||||
<template v-if="enumerated">
|
||||
<v-card-subtitle>Valid options</v-card-subtitle>
|
||||
<v-card-text>
|
||||
<v-list dense>
|
||||
<v-list-item v-for="(out, key) in value.enum" :key=key
|
||||
>
|
||||
<v-list-item-content class="mr-1">
|
||||
<v-text-field
|
||||
dense
|
||||
hide-details="auto"
|
||||
v-model="key"
|
||||
:readonly="readonly"
|
||||
></v-text-field>
|
||||
</v-list-item-content>
|
||||
<v-list-item-content class="ml-1">
|
||||
<v-select v-if="type == 'bool'"
|
||||
dense
|
||||
hide-details="auto"
|
||||
:items="[ true, false ]"
|
||||
v-model="value.enum[key]"
|
||||
:readonly="readonly"
|
||||
></v-select>
|
||||
<v-text-field v-else
|
||||
dense
|
||||
hide-details="auto"
|
||||
v-model="value.enum[key]"
|
||||
:readonly="readonly"
|
||||
></v-text-field>
|
||||
</v-list-item-content>
|
||||
<v-list-item-action>
|
||||
<v-icon
|
||||
small
|
||||
color="error"
|
||||
:disabled="readonly"
|
||||
@click="removeEnum(key)"
|
||||
>mdi-minus-circle</v-icon>
|
||||
</v-list-item-action>
|
||||
</v-list-item>
|
||||
|
||||
<v-list-item v-if="!readonly"
|
||||
>
|
||||
<v-list-item-content class="mr-1">
|
||||
<v-text-field
|
||||
dense
|
||||
hide-details="auto"
|
||||
label="New input value"
|
||||
v-model="newEnumKey"
|
||||
></v-text-field>
|
||||
</v-list-item-content>
|
||||
<v-list-item-content class="ml-1">
|
||||
<v-select v-if="type == 'bool'"
|
||||
dense
|
||||
hide-details="auto"
|
||||
label="New output value"
|
||||
:items="[ true, false ]"
|
||||
v-model="newEnumValue"
|
||||
></v-select>
|
||||
<v-text-field v-else
|
||||
dense
|
||||
hide-details="auto"
|
||||
label="New output value"
|
||||
v-model="newEnumValue"
|
||||
></v-text-field>
|
||||
</v-list-item-content>
|
||||
<v-list-item-action>
|
||||
<v-icon
|
||||
small
|
||||
color="primary"
|
||||
:disabled="!isNewEnumValid"
|
||||
@click="addEnum"
|
||||
>mdi-plus-circle</v-icon>
|
||||
</v-list-item-action>
|
||||
</v-list-item>
|
||||
|
||||
<v-list-item>
|
||||
<v-list-item-content>
|
||||
<v-select v-if="type == 'bool'"
|
||||
dense
|
||||
hide-details="auto"
|
||||
label="Default value"
|
||||
hint="Value to use if none matches"
|
||||
:items="[ true, false ]"
|
||||
v-model="defaultValue"
|
||||
:readonly="readonly"
|
||||
></v-select>
|
||||
<v-text-field v-else
|
||||
label="Default value"
|
||||
hint="Value to use if none matches"
|
||||
persistent-hint
|
||||
v-model="defaultValue"
|
||||
:readonly="readonly"
|
||||
></v-text-field>
|
||||
</v-list-item-content>
|
||||
<v-list-item-action>
|
||||
<v-icon
|
||||
small
|
||||
color="secondary"
|
||||
:disabled="readonly"
|
||||
@click="defaultValue = null"
|
||||
>mdi-backspace</v-icon>
|
||||
</v-list-item-action>
|
||||
</v-list-item>
|
||||
|
||||
</v-list>
|
||||
|
||||
</v-card-text>
|
||||
</template>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
|
||||
export default {
|
||||
name: "DougalFieldContent",
|
||||
|
||||
props: {
|
||||
value: Object,
|
||||
readonly: Boolean
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
newEnumKey: null,
|
||||
newEnumValue: null,
|
||||
types: [
|
||||
{ text: "Text", value: "str" },
|
||||
{ text: "Integer", value: "int" },
|
||||
{ text: "Float", value: "float" },
|
||||
{ text: "Boolean", value: "bool" },
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
type: {
|
||||
get () {
|
||||
return this.value?.type ?? "str";
|
||||
},
|
||||
|
||||
set (v) {
|
||||
this.$emit("input", {
|
||||
...this.value,
|
||||
type: v
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
enumerated: {
|
||||
get () {
|
||||
return typeof this.value?.enum === "object";
|
||||
},
|
||||
|
||||
set (v) {
|
||||
if (v) {
|
||||
this.$emit("input", {
|
||||
enum: {},
|
||||
...this.value
|
||||
})
|
||||
} else {
|
||||
const obj = {...this.value};
|
||||
delete obj.enum;
|
||||
this.$emit("input", obj)
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
defaultValue: {
|
||||
|
||||
get () {
|
||||
return this.value?.default;
|
||||
},
|
||||
|
||||
set (v) {
|
||||
this.$emit("input", {
|
||||
...this.value,
|
||||
"default": v
|
||||
});
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
isNewEnumValid () {
|
||||
return !!(this.newEnumKey &&
|
||||
!Object.keys(this.value.enum).includes(this.newEnumKey) &&
|
||||
(typeof this.newEnumValue == "boolean" || this.newEnumValue));
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
addEnum () {
|
||||
this.$emit("input", {
|
||||
...this.value,
|
||||
enum: {
|
||||
...this.value.enum,
|
||||
[this.newEnumKey]: this.newEnumValue
|
||||
}
|
||||
});
|
||||
this.newEnumKey = null;
|
||||
this.newEnumValue = null;
|
||||
},
|
||||
|
||||
removeEnum (key) {
|
||||
const obj = {...this.value.enum};
|
||||
delete obj[key];
|
||||
this.$emit("input", {
|
||||
...this.value,
|
||||
enum: obj
|
||||
});
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -1,83 +0,0 @@
|
||||
<template>
|
||||
<v-dialog
|
||||
max-width="600"
|
||||
v-model="open"
|
||||
>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-icon
|
||||
v-bind="attrs"
|
||||
v-on="on"
|
||||
:title="title"
|
||||
>mdi-folder-network-outline</v-icon>
|
||||
</template>
|
||||
<v-card>
|
||||
<v-card-title>File picker</v-card-title>
|
||||
<v-divider></v-divider>
|
||||
<v-card-text>
|
||||
<dougal-file-browser
|
||||
v-model="selected"
|
||||
:mimetypes="mimetypes"
|
||||
:root="root"
|
||||
ref="browser"
|
||||
>
|
||||
</dougal-file-browser>
|
||||
</v-card-text>
|
||||
<v-card-actions>
|
||||
<v-btn text @click="save" :disabled="!selected">
|
||||
<v-icon small flat color="primary" class="mr-2">mdi-content-save</v-icon>
|
||||
Ok
|
||||
</v-btn>
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn text @click="refresh">
|
||||
<v-icon small flat class="mr-2">mdi-reload</v-icon>
|
||||
Refresh
|
||||
</v-btn>
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn text @click="close">
|
||||
<v-icon small flat color="red" class="mr-2">mdi-close</v-icon>
|
||||
Cancel
|
||||
</v-btn>
|
||||
</v-card-actions>
|
||||
</v-card>
|
||||
</v-dialog>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import DougalFileBrowser from './file-browser';
|
||||
|
||||
export default {
|
||||
name: "DougalFileBrowserDialog",
|
||||
|
||||
components: { DougalFileBrowser },
|
||||
|
||||
props: [ "path", "mimetypes", "root", "title" ],
|
||||
|
||||
data () {
|
||||
return {
|
||||
open: false,
|
||||
selected: ""
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
refresh () {
|
||||
this.$refs.browser.refresh();
|
||||
},
|
||||
|
||||
close () {
|
||||
this.open = false;
|
||||
},
|
||||
|
||||
save () {
|
||||
this.$emit('input', this.selected);
|
||||
this.close();
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.selected = this.path;
|
||||
}
|
||||
}
|
||||
</script>
|
||||
@@ -1,150 +0,0 @@
|
||||
<template>
|
||||
<v-layout fill-height style="overflow-y:auto;max-height:400px;">
|
||||
<v-progress-circular v-if="loading && !items.length"></v-progress-circular>
|
||||
<v-treeview v-else
|
||||
activatable
|
||||
:active.sync="active"
|
||||
:items="items"
|
||||
item-key="path"
|
||||
item-name="basename"
|
||||
:load-children="readdir"
|
||||
@update:active="activeChanged"
|
||||
style="min-width:100%"
|
||||
>
|
||||
<template v-slot:label="{item}">
|
||||
<div style="cursor:pointer;">
|
||||
{{ item.basename }}
|
||||
</div>
|
||||
</template>
|
||||
</v-treeview>
|
||||
</v-layout>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
|
||||
function find(haystack, needle) {
|
||||
for (const item of haystack) {
|
||||
if (item.path == needle) {
|
||||
return item;
|
||||
} else if (item.children) {
|
||||
const found = find(item.children, needle);
|
||||
if (found) {
|
||||
return found;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
name: "DougalFileBrowser",
|
||||
|
||||
props: [ "value", "mimetypes", "root" ],
|
||||
|
||||
data () {
|
||||
return {
|
||||
loading: false,
|
||||
items: [],
|
||||
active: [],
|
||||
selected: null,
|
||||
path: "",
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
dirsAreSelectable () {
|
||||
return !this.mimetypes ||
|
||||
this.mimetypes == "inode/directory" ||
|
||||
(Array.isArray(this.mimetypes) && this.mimetypes.includes("inode/directory"));
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
activeChanged (active) {
|
||||
const candidate = find(this.items, active[0]);
|
||||
if (!this.dirsAreSelectable && this.isDirectory(candidate)) {
|
||||
this.selected = null;
|
||||
} else {
|
||||
this.selected = candidate;
|
||||
}
|
||||
this.$emit("input", this.selected?.path);
|
||||
},
|
||||
|
||||
isDirectory (item) {
|
||||
return item && item["Content-Type"] == "inode/directory";
|
||||
},
|
||||
|
||||
filterMimetypes (item) {
|
||||
if (!this.mimetypes) {
|
||||
return true;
|
||||
} else if (Array.isArray(this.mimetypes)) {
|
||||
return item["Content-Type"] == "inode/directory" ||
|
||||
this.mimetypes.includes(item["Content-Type"].split(";")[0]) ||
|
||||
this.filterGlob(item);
|
||||
} else {
|
||||
return item["Content-Type"] == "inode/directory" ||
|
||||
this.mimetypes == item["Content-Type"].split(";")[0];
|
||||
}
|
||||
return false;
|
||||
},
|
||||
|
||||
filterGlob (item) {
|
||||
const globs = (Array.isArray(this.mimetypes)
|
||||
? this.mimetypes
|
||||
: [ this.mimetypes ])
|
||||
.filter(i => /^\*\..+$/.test(i));
|
||||
|
||||
for (const glob of globs) {
|
||||
const ext = (glob.match(/^\*\.(.+)$/)||[])[1];
|
||||
if (item.path.toLowerCase().endsWith(ext.toLowerCase())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
|
||||
async readdir (item) {
|
||||
this.loading = true;
|
||||
const url = `/files/${item? item.path : (this.root || this.path || "")}`;
|
||||
const list = await this.api([url]);
|
||||
this.loading = false;
|
||||
const items = list?.map(item => {
|
||||
if (item["Content-Type"] == "inode/directory") {
|
||||
item.children = [];
|
||||
}
|
||||
item.id = item.path;
|
||||
item.name = item.basename;
|
||||
return item;
|
||||
}).filter(this.filterMimetypes);
|
||||
if (item) {
|
||||
item.children = items;
|
||||
} else {
|
||||
this.items = items;
|
||||
}
|
||||
},
|
||||
|
||||
async refresh () {
|
||||
this.items = []
|
||||
this.$nextTick(this.readdir);
|
||||
},
|
||||
|
||||
...mapActions(["api"])
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
if (this.value) {
|
||||
this.path = this.value;
|
||||
}
|
||||
this.readdir();
|
||||
}
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -9,17 +9,8 @@
|
||||
|
||||
<v-spacer></v-spacer>
|
||||
|
||||
<template v-if="isFrontendRemote">
|
||||
<template v-if="serverConnected">
|
||||
<v-icon v-if="isGatewayReliable" class="mr-6" title="Connected to server via gateway">mdi-cloud-outline</v-icon>
|
||||
<v-icon v-else class="mr-6" color="orange" title="Gateway connection is unreliable. Expect outages.">mdi-cloud-off</v-icon>
|
||||
</template>
|
||||
<v-icon v-else class="mr-6" color="red" :title="`Server connection lost: the gateway cannot reach the remote server.\nWe will reconnect automatically when the link with the remote server is restored.`">mdi-cloud-off</v-icon>
|
||||
</template>
|
||||
<template v-else>
|
||||
<v-icon v-if="serverConnected" class="mr-6" small title="Connected to server">mdi-lan-connect</v-icon>
|
||||
<v-icon v-else class="mr-6" small color="red" :title="`Server connection lost.\nWe will reconnect automatically when the server comes back.`">mdi-lan-disconnect</v-icon>
|
||||
</template>
|
||||
<v-icon v-if="serverConnected" class="mr-6" small title="Connected to server">mdi-lan-connect</v-icon>
|
||||
<v-icon v-else class="mr-6" small color="red" title="Server connection lost (we'll reconnect automatically when the server comes back)">mdi-lan-disconnect</v-icon>
|
||||
|
||||
<dougal-notifications-control class="mr-6"></dougal-notifications-control>
|
||||
|
||||
@@ -38,7 +29,7 @@
|
||||
<style>
|
||||
@font-face {
|
||||
font-family: "Bank Gothic Medium";
|
||||
src: local("Bank Gothic Medium"), url("/public/fonts/bank-gothic-medium.woff");
|
||||
src: local("Bank Gothic Medium"), url("/fonts/bank-gothic-medium.woff");
|
||||
}
|
||||
|
||||
.brand {
|
||||
@@ -60,39 +51,13 @@ export default {
|
||||
DougalNotificationsControl
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
lastGatewayErrorTimestamp: 0,
|
||||
gatewayErrorSilencePeriod: 60000,
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
year () {
|
||||
const date = new Date();
|
||||
return date.getUTCFullYear();
|
||||
},
|
||||
|
||||
...mapState({
|
||||
serverConnected: state => state.notify.serverConnected,
|
||||
isFrontendRemote: state => state.api.serverInfo?.["remote-frontend"] ?? false,
|
||||
isGatewayReliable: state => state.api.isGatewayReliable
|
||||
})
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
isGatewayReliable (val) {
|
||||
if (val === false) {
|
||||
const elapsed = Date.now() - this.lastGatewayErrorTimestamp;
|
||||
const lastGatewayErrorTimestamp = Date.now();
|
||||
if (elapsed > this.gatewayErrorSilencePeriod) {
|
||||
this.$root.showSnack("Gateway error", "warning");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
...mapState({serverConnected: state => state.notify.serverConnected})
|
||||
}
|
||||
|
||||
};
|
||||
</script>
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
<template>
|
||||
<v-card style="min-height:400px;" outlined>
|
||||
<v-card style="min-height:400px;">
|
||||
<v-card-title class="headline">
|
||||
Array inline / crossline error
|
||||
<v-spacer></v-spacer>
|
||||
<!--
|
||||
<v-switch v-model="scatterplot" label="Scatterplot"></v-switch>
|
||||
<v-switch class="ml-4" v-model="histogram" label="Histogram"></v-switch>
|
||||
-->
|
||||
</v-card-title>
|
||||
|
||||
<v-container fluid fill-height>
|
||||
@@ -37,6 +35,7 @@
|
||||
<style scoped>
|
||||
|
||||
.graph-container {
|
||||
background-color: red;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
@@ -59,8 +58,8 @@ export default {
|
||||
graph: [],
|
||||
busy: false,
|
||||
resizeObserver: null,
|
||||
scatterplot: true,
|
||||
histogram: true
|
||||
scatterplot: false,
|
||||
histogram: false
|
||||
};
|
||||
},
|
||||
|
||||
@@ -96,10 +95,6 @@ export default {
|
||||
scatterplot () {
|
||||
this.plot();
|
||||
this.$emit("update:settings", {[`${this.$options.name}.scatterplot`]: this.scatterplot});
|
||||
},
|
||||
|
||||
"$vuetify.theme.isDark" () {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
@@ -180,11 +175,6 @@ export default {
|
||||
title: "Shotpoint",
|
||||
anchor: "x1"
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
@@ -243,11 +233,6 @@ export default {
|
||||
xaxis: {
|
||||
title: "Crossline (m)"
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
@@ -321,11 +306,6 @@ export default {
|
||||
domain: [ 0.55, 1 ],
|
||||
anchor: 'x2'
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
<template>
|
||||
<v-card style="min-height:400px;" outlined>
|
||||
<v-card style="min-height:400px;">
|
||||
<v-card-title class="headline">
|
||||
Gun depth
|
||||
<v-spacer></v-spacer>
|
||||
<!--
|
||||
<v-switch v-model="shotpoint" label="Shotpoint"></v-switch>
|
||||
<v-switch class="ml-4" v-model="violinplot" label="Violin plot"></v-switch>
|
||||
-->
|
||||
</v-card-title>
|
||||
|
||||
<v-container fluid fill-height>
|
||||
@@ -61,7 +59,7 @@ export default {
|
||||
busy: false,
|
||||
resizeObserver: null,
|
||||
shotpoint: true,
|
||||
violinplot: true
|
||||
violinplot: false
|
||||
};
|
||||
},
|
||||
|
||||
@@ -100,10 +98,6 @@ export default {
|
||||
this.plotViolin();
|
||||
}
|
||||
this.$emit("update:settings", {[`${this.$options.name}.violinplot`]: this.violinplot});
|
||||
},
|
||||
|
||||
"$vuetify.theme.isDark" () {
|
||||
this.plot();
|
||||
}
|
||||
|
||||
},
|
||||
@@ -202,11 +196,6 @@ export default {
|
||||
title: "Shotpoint",
|
||||
showspikes: true
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
@@ -243,11 +232,6 @@ export default {
|
||||
title: "Gun number",
|
||||
type: 'category'
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: {
|
||||
point
|
||||
}
|
||||
@@ -321,11 +305,6 @@ export default {
|
||||
xaxis: {
|
||||
title: "Gun number"
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<template>
|
||||
<v-card style="min-height:400px;" outlined>
|
||||
<v-card style="min-height:400px;">
|
||||
<v-card-title class="headline">
|
||||
Gun details
|
||||
</v-card-title>
|
||||
@@ -76,10 +76,6 @@ export default {
|
||||
if (this.violinplot) {
|
||||
this.plotViolin();
|
||||
}
|
||||
},
|
||||
|
||||
"$vuetify.theme.isDark" () {
|
||||
this.plot();
|
||||
}
|
||||
|
||||
},
|
||||
@@ -336,11 +332,6 @@ export default {
|
||||
title: "Shotpoint",
|
||||
showspikes: true
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
<template>
|
||||
<v-card style="min-height:400px;" outlined>
|
||||
<v-card style="min-height:400px;">
|
||||
<v-card-title class="headline">
|
||||
Gun pressures
|
||||
<v-spacer></v-spacer>
|
||||
<!--
|
||||
<v-switch v-model="shotpoint" label="Shotpoint"></v-switch>
|
||||
<v-switch class="ml-4" v-model="violinplot" label="Violin plot"></v-switch>
|
||||
-->
|
||||
</v-card-title>
|
||||
|
||||
<v-container fluid fill-height>
|
||||
@@ -61,7 +59,7 @@ export default {
|
||||
busy: false,
|
||||
resizeObserver: null,
|
||||
shotpoint: true,
|
||||
violinplot: true
|
||||
violinplot: false
|
||||
};
|
||||
},
|
||||
|
||||
@@ -100,10 +98,6 @@ export default {
|
||||
this.plotViolin();
|
||||
}
|
||||
this.$emit("update:settings", {[`${this.$options.name}.violinplot`]: this.violinplot});
|
||||
},
|
||||
|
||||
"$vuetify.theme.isDark" () {
|
||||
this.plot();
|
||||
}
|
||||
|
||||
},
|
||||
@@ -129,7 +123,7 @@ export default {
|
||||
const gunPressuresSorted = gunPressures.map(s => d3a.sort(s));
|
||||
const gunVolumes = guns.map(s => s.map(g => g[12]));
|
||||
const gunPressureWeights = gunVolumes.map( (s, sidx) => s.map( v => v/meta[sidx].volume ));
|
||||
const gunsWeightedAvgPressure = gunPressures.map( (s, sidx) =>
|
||||
const gunsWeightedAvgPressure = gunPressures.map( (s, sidx) =>
|
||||
d3a.sum(s.map( (pressure, gidx) => pressure * gunPressureWeights[sidx][gidx] )) / d3a.sum(gunPressureWeights[sidx])
|
||||
);
|
||||
|
||||
@@ -216,11 +210,6 @@ export default {
|
||||
title: "Shotpoint",
|
||||
showspikes: true
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
@@ -260,11 +249,6 @@ export default {
|
||||
title: "Gun number",
|
||||
type: 'category'
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: {
|
||||
point
|
||||
}
|
||||
@@ -338,11 +322,6 @@ export default {
|
||||
xaxis: {
|
||||
title: "Gun number"
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
<template>
|
||||
<v-card style="min-height:400px;" outlined>
|
||||
<v-card style="min-height:400px;">
|
||||
<v-card-title class="headline">
|
||||
Gun timing
|
||||
<v-spacer></v-spacer>
|
||||
<!--
|
||||
<v-switch v-model="shotpoint" label="Shotpoint"></v-switch>
|
||||
<v-switch class="ml-4" v-model="violinplot" label="Violin plot"></v-switch>
|
||||
-->
|
||||
</v-card-title>
|
||||
|
||||
<v-container fluid fill-height>
|
||||
@@ -61,7 +59,7 @@ export default {
|
||||
busy: false,
|
||||
resizeObserver: null,
|
||||
shotpoint: true,
|
||||
violinplot: true
|
||||
violinplot: false
|
||||
};
|
||||
},
|
||||
|
||||
@@ -100,10 +98,6 @@ export default {
|
||||
this.plotViolin();
|
||||
}
|
||||
this.$emit("update:settings", {[`${this.$options.name}.violinplot`]: this.violinplot});
|
||||
},
|
||||
|
||||
"$vuetify.theme.isDark" () {
|
||||
this.plot();
|
||||
}
|
||||
|
||||
},
|
||||
@@ -202,11 +196,6 @@ export default {
|
||||
title: "Shotpoint",
|
||||
showspikes: true
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
@@ -243,11 +232,6 @@ export default {
|
||||
title: "Gun number",
|
||||
type: 'category'
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: {
|
||||
point
|
||||
}
|
||||
@@ -321,11 +305,6 @@ export default {
|
||||
xaxis: {
|
||||
title: "Gun number"
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
|
||||
@@ -127,7 +127,7 @@ export default {
|
||||
},
|
||||
|
||||
computed: {
|
||||
...mapGetters(['user', 'loading', 'serverEvent'])
|
||||
...mapGetters(['user', 'writeaccess', 'loading', 'serverEvent'])
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
@@ -1,299 +0,0 @@
|
||||
<template>
|
||||
<div ref="graph"
|
||||
class="graph-container"
|
||||
></div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
.graph-container {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import Plotly from 'plotly.js-dist';
|
||||
import unpack from '@/lib/unpack.js';
|
||||
|
||||
export default {
|
||||
name: "DougalGraphProjectSequenceInlineCrossline",
|
||||
|
||||
props: {
|
||||
items: Array,
|
||||
gunDataFormat: { type: String, default: "smsrc" },
|
||||
facet: { type: String, default: "scatter" }
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
plotted: false,
|
||||
resizeObserver: null
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
config () {
|
||||
switch (this.facet) {
|
||||
case "scatter":
|
||||
default:
|
||||
return {
|
||||
editable: false,
|
||||
displaylogo: false
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
layout () {
|
||||
const base = {
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
}
|
||||
};
|
||||
|
||||
switch (this.facet) {
|
||||
case "scatter":
|
||||
return {
|
||||
...base,
|
||||
autocolorscale: true,
|
||||
title: {text: `Preplot deviation <span style="font-size:smaller;">(x̅: %{data[0].meta.avg_x} ±%{data[0].meta.std_x} m; y̅: %{data[0].meta.avg_y} ±%{data[0].meta.std_y} m)</span>`},
|
||||
xaxis: {
|
||||
title: "Crossline (m)"
|
||||
},
|
||||
yaxis: {
|
||||
title: "Inline (m)"
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)"
|
||||
};
|
||||
|
||||
case "crossline":
|
||||
return {
|
||||
...base,
|
||||
autocolorscale: true,
|
||||
title: {text: `Crossline deviation <span style="font-size:smaller;">(x̅: %{data[0].meta.avg_x} ±%{data[0].meta.std_x} m)</span>`},
|
||||
xaxis: {
|
||||
title: "Shotpoint"
|
||||
},
|
||||
yaxis: {
|
||||
title: "Crossline (m)"
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)"
|
||||
};
|
||||
|
||||
case "2dhist":
|
||||
return {
|
||||
...base,
|
||||
showlegend: true,
|
||||
title: {text: `Preplot deviation <span style="font-size:smaller;">(x̅: %{data[0].meta.avg_x} ±%{data[0].meta.std_x} m; y̅: %{data[0].meta.avg_y} ±%{data[0].meta.std_y} m)</span>`},
|
||||
xaxis: {
|
||||
title: "Crossline (m)",
|
||||
showgrid: true,
|
||||
zeroline: true
|
||||
},
|
||||
yaxis: {
|
||||
title: "Inline (m)",
|
||||
showgrid: true,
|
||||
zeroline: true
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)"
|
||||
};
|
||||
|
||||
case "c-o":
|
||||
return {
|
||||
...base,
|
||||
showlegend: true,
|
||||
title: {
|
||||
text: this.data[0]?.x?.length
|
||||
? `Final vs raw <span style="font-size:smaller;">(x̅: %{data[0].meta.avg_x} ±%{data[0].meta.std_x} m; y̅: %{data[0].meta.avg_y} ±%{data[0].meta.std_y} m)</span>`
|
||||
: `Final vs raw: no data`
|
||||
},
|
||||
xaxis: {
|
||||
title: "Crossline (m)",
|
||||
showgrid: true,
|
||||
zeroline: true
|
||||
},
|
||||
yaxis: {
|
||||
title: "Inline (m)",
|
||||
showgrid: true,
|
||||
zeroline: true
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)"
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
data () {
|
||||
if (!this.items?.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let x, y, avg_x, avg_y, std_x, std_y;
|
||||
|
||||
const items = this.items.sort( (a, b) => a.point - b.point );
|
||||
const meta = unpack(items, "meta");
|
||||
const src_number = unpack(unpack(unpack(meta, "raw"), this.gunDataFormat), "src_number");
|
||||
|
||||
if (this.facet == "c-o") {
|
||||
const _items = items.filter(i => i.errorfinal && i.errorraw);
|
||||
const εf = unpack(unpack(_items, "errorfinal"), "coordinates");
|
||||
const εr = unpack(unpack(_items, "errorraw"), "coordinates");
|
||||
|
||||
x = εf.map( (f, idx) => f[0] - εr[idx][0] )
|
||||
y = εf.map( (f, idx) => f[1] - εr[idx][1] )
|
||||
|
||||
} else {
|
||||
const coords = unpack(unpack(items, ((row) => row?.errorfinal ? row.errorfinal : row.errorraw)), "coordinates");
|
||||
|
||||
x = unpack(coords, 0);
|
||||
y = unpack(coords, 1);
|
||||
|
||||
|
||||
}
|
||||
|
||||
// No chance of overflow
|
||||
avg_x = (x.reduce((acc, cur) => acc + cur, 0) / x.length).toFixed(2);
|
||||
avg_y = (y.reduce((acc, cur) => acc + cur, 0) / y.length).toFixed(2);
|
||||
std_x = Math.sqrt(x.reduce((acc, cur) => (cur-avg_x)**2 + acc, 0) / x.length).toFixed(2);
|
||||
std_y = Math.sqrt(y.reduce((acc, cur) => (cur-avg_y)**2 + acc, 0) / y.length).toFixed(2);
|
||||
|
||||
if (this.facet == "scatter") {
|
||||
|
||||
const data = [{
|
||||
type: "scatter",
|
||||
mode: "markers",
|
||||
x,
|
||||
y,
|
||||
meta: { avg_x, avg_y, std_x, std_y},
|
||||
transforms: [{
|
||||
type: "groupby",
|
||||
groups: src_number,
|
||||
styles: [
|
||||
{target: 1, value: {line: {color: "green"}}},
|
||||
{target: 2, value: {line: {color: "red"}}},
|
||||
{target: 3, value: {line: {color: "blue"}}}
|
||||
]
|
||||
}],
|
||||
}];
|
||||
|
||||
return data;
|
||||
|
||||
} else if (this.facet == "crossline") {
|
||||
|
||||
const s = unpack(items, "point");
|
||||
|
||||
const data = [{
|
||||
type: "scatter",
|
||||
x: s,
|
||||
y: x,
|
||||
meta: { avg_x, avg_y, std_x, std_y},
|
||||
_transforms: [{
|
||||
type: "groupby",
|
||||
groups: src_number,
|
||||
styles: [
|
||||
{target: 1, value: {line: {color: "green"}}},
|
||||
{target: 2, value: {line: {color: "red"}}},
|
||||
{target: 3, value: {line: {color: "blue"}}}
|
||||
]
|
||||
}],
|
||||
}];
|
||||
|
||||
return data;
|
||||
|
||||
} else if (this.facet == "2dhist" || this.facet == "c-o") {
|
||||
|
||||
const bottomValue = this.$vuetify.theme.isDark
|
||||
? ['0.0', 'rgba(0,0,0,0)']
|
||||
: ['0.0', 'rgb(165,0,38)'];
|
||||
const topValue = this.$vuetify.theme.isDark
|
||||
? ['1.0', 'rgb(49,54,149)']
|
||||
: ['1.0', 'rgba(0,0,0,0)'];
|
||||
|
||||
const colourscale = this.facet == "c-o"
|
||||
? [bottomValue, [0.1, 'rgb(0,0,0)'], [0.9, 'rgb(255,255,255)'], topValue]
|
||||
: [
|
||||
bottomValue,
|
||||
['0.111111111111', 'rgb(215,48,39)'],
|
||||
['0.222222222222', 'rgb(244,109,67)'],
|
||||
['0.333333333333', 'rgb(253,174,97)'],
|
||||
['0.444444444444', 'rgb(254,224,144)'],
|
||||
['0.555555555556', 'rgb(224,243,248)'],
|
||||
['0.666666666667', 'rgb(171,217,233)'],
|
||||
['0.777777777778', 'rgb(116,173,209)'],
|
||||
['0.888888888889', 'rgb(69,117,180)'],
|
||||
topValue
|
||||
];
|
||||
|
||||
const data = [{
|
||||
type: "histogram2dcontour",
|
||||
ncontours: 20,
|
||||
colorscale: colourscale,
|
||||
showscale: false,
|
||||
reversescale: !this.$vuetify.theme.isDark,
|
||||
contours: {
|
||||
coloring: this.facet == "c-o" ? "fill" : "heatmap",
|
||||
},
|
||||
x,
|
||||
y,
|
||||
meta: { avg_x, avg_y, std_x, std_y}
|
||||
}];
|
||||
|
||||
return data;
|
||||
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
watch: {
|
||||
items (cur, prev) {
|
||||
if (cur != prev) {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
"$vuetify.theme.isDark" () {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
plot () {
|
||||
if (this.items?.length) {
|
||||
Plotly.newPlot(this.$refs.graph, this.data, this.layout, this.config);
|
||||
this.plotted = true;
|
||||
} else {
|
||||
Plotly.purge(this.$refs.graph);
|
||||
this.plotted = false;
|
||||
}
|
||||
},
|
||||
|
||||
replot () {
|
||||
if (this.plotted) {
|
||||
const ref = this.$refs.graph;
|
||||
Plotly.relayout(ref, {
|
||||
width: ref.clientWidth,
|
||||
height: ref.clientHeight
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graph);
|
||||
},
|
||||
|
||||
beforeDestroy () {
|
||||
if (this.resizeObserver) {
|
||||
this.resizeObserver.unobserve(this.$refs.graph);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -1,205 +0,0 @@
|
||||
<template>
|
||||
<div ref="graph"
|
||||
class="graph-container"
|
||||
></div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
.graph-container {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import Plotly from 'plotly.js-dist';
|
||||
import unpack from '@/lib/unpack.js';
|
||||
|
||||
export default {
|
||||
name: "DougalGraphProjectSequenceShotpointTiming",
|
||||
|
||||
props: {
|
||||
items: Array,
|
||||
gunDataFormat: { type: String, default: "smsrc" },
|
||||
facet: { type: String, default: "bars" }
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
plotted: false,
|
||||
resizeObserver: null
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
config () {
|
||||
return {
|
||||
editable: false,
|
||||
displaylogo: false
|
||||
};
|
||||
},
|
||||
|
||||
layout () {
|
||||
return {
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
title: {text: "Shotpoint timing %{data[0].meta.subtitle}"},
|
||||
xaxis: {
|
||||
title: "Shotpoint"
|
||||
},
|
||||
yaxis: {
|
||||
title: "Time (s)"
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)"
|
||||
};
|
||||
},
|
||||
|
||||
data () {
|
||||
|
||||
const items = this.items.map(i => {
|
||||
return {
|
||||
point: i.point,
|
||||
tstamp: new Date(i.tstamp)
|
||||
}
|
||||
}).sort( (a, b) => a.tstamp - b.tstamp );
|
||||
const x = [...unpack(items, "point")];
|
||||
const y = items.map( (i, idx, ary) => (ary[idx+1]?.tstamp - i.tstamp)/1000 );
|
||||
const src_number = unpack(this.items, ["meta", "raw", this.gunDataFormat, "src_number"]);
|
||||
|
||||
// We're dealing with intervals not points
|
||||
x.pop(); y.pop(); src_number.pop();
|
||||
|
||||
const meta = {};
|
||||
|
||||
const stats = this.stats(x, y, src_number);
|
||||
|
||||
// We need to do the subtitle here rather than in layout as layout knows nothing
|
||||
// about the number of arrays
|
||||
|
||||
if (stats.src_ids.length == 1) {
|
||||
meta.subtitle = `<span style="font-size:smaller;">(μ = ${stats.avg.all.toFixed(2)} ±${stats.std.all.toFixed(2)} s)</span>`;
|
||||
} else {
|
||||
meta.subtitle = `<span style="font-size:smaller;">(μ = ${stats.avg.all.toFixed(2)} ±${stats.std.all.toFixed(2)} s)</span>`;
|
||||
const per_source = [];
|
||||
for (const key in stats.avg) {
|
||||
if (key == "all") continue;
|
||||
const s = `μ<sub>${key}</sub> = ${stats.avg[key].toFixed(2)} ±${stats.std[key].toFixed(2)} s`;
|
||||
per_source.push(s);
|
||||
}
|
||||
meta.subtitle += `<br><span style="font-size:smaller;">` + per_source.join("; ") + "</span>";
|
||||
}
|
||||
|
||||
|
||||
const trace0 = {
|
||||
type: "bar",
|
||||
x,
|
||||
y,
|
||||
transforms: [{
|
||||
type: "groupby",
|
||||
groups: src_number,
|
||||
styles: [
|
||||
{value: {showlegend: false}},
|
||||
{target: 1, value: {line: {color: "green"}}},
|
||||
{target: 2, value: {line: {color: "red"}}},
|
||||
{target: 3, value: {line: {color: "blue"}}}
|
||||
]
|
||||
}],
|
||||
meta
|
||||
};
|
||||
|
||||
switch (this.facet) {
|
||||
case "lines":
|
||||
trace0.type = "scatter";
|
||||
break;
|
||||
case "area":
|
||||
trace0.type = "scatter";
|
||||
trace0.fill = "tozeroy";
|
||||
break;
|
||||
case "bars":
|
||||
default:
|
||||
// Nothing
|
||||
}
|
||||
|
||||
return [trace0]
|
||||
}
|
||||
},
|
||||
|
||||
watch: {
|
||||
items (cur, prev) {
|
||||
if (cur != prev) {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
"$vuetify.theme.isDark" () {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
plot () {
|
||||
if (this.items?.length) {
|
||||
Plotly.newPlot(this.$refs.graph, this.data, this.layout, this.config);
|
||||
this.plotted = true;
|
||||
} else {
|
||||
Plotly.purge(this.$refs.graph);
|
||||
this.plotted = false;
|
||||
}
|
||||
},
|
||||
|
||||
replot () {
|
||||
if (this.plotted) {
|
||||
const ref = this.$refs.graph;
|
||||
Plotly.relayout(ref, {
|
||||
width: ref.clientWidth,
|
||||
height: ref.clientHeight
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
stats (x, y, src_number) {
|
||||
const avg = {};
|
||||
const std = {};
|
||||
|
||||
const avg_all = (y.reduce((acc, cur) => acc + cur, 0) / y.length);
|
||||
const std_all = Math.sqrt(y.reduce((acc, cur) => (cur-avg_all)**2 + acc, 0) / y.length);
|
||||
|
||||
avg.all = avg_all;
|
||||
std.all = std_all;
|
||||
|
||||
const src_ids = new Set(src_number);
|
||||
|
||||
for (const src of src_ids) {
|
||||
// Ignore shots without source data
|
||||
if (!src) continue;
|
||||
|
||||
const v = y.filter((i, idx) => src_number[idx] == src);
|
||||
const μ = (v.reduce((acc, cur) => acc + cur, 0) / v.length);
|
||||
const σ = Math.sqrt(v.reduce((acc, cur) => (cur-μ)**2 + acc, 0) / v.length);
|
||||
avg[src] = μ;
|
||||
std[src] = σ;
|
||||
}
|
||||
|
||||
return { avg, std, src_ids };
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graph);
|
||||
},
|
||||
|
||||
beforeDestroy () {
|
||||
if (this.resizeObserver) {
|
||||
this.resizeObserver.unobserve(this.$refs.graph);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -1,187 +0,0 @@
|
||||
<template>
|
||||
<v-card v-if="comparison" class="ma-1">
|
||||
<v-card-title>Comparison Summary: Baseline {{ baseline.pid }} vs Monitor {{ monitor.pid }}</v-card-title>
|
||||
<v-card-text>
|
||||
<v-row>
|
||||
<v-col cols="12" md="6">
|
||||
<h3>Deviation Statistics</h3>
|
||||
<v-simple-table dense>
|
||||
<template v-slot:default>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Metric</th>
|
||||
<th>I (m)</th>
|
||||
<th>J (m)</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Mean (μ)</td>
|
||||
<td>{{ comparison['μ'][0].toFixed(3) }}</td>
|
||||
<td>{{ comparison['μ'][1].toFixed(3) }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Std Dev (σ)</td>
|
||||
<td>{{ comparison['σ'][0].toFixed(3) }}</td>
|
||||
<td>{{ comparison['σ'][1].toFixed(3) }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>RMS</td>
|
||||
<td>{{ comparison.rms[0].toFixed(3) }}</td>
|
||||
<td>{{ comparison.rms[1].toFixed(3) }}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</template>
|
||||
</v-simple-table>
|
||||
|
||||
<h3 class="mt-4">Error distribution</h3>
|
||||
<ul>
|
||||
<li title="Relative to I-axis positive direction">Primary Direction: {{ (comparison.primaryDirection * 180 / Math.PI).toFixed(2) }}°</li>
|
||||
<li>Anisotropy: {{ comparison.anisotropy.toFixed(2) }}</li>
|
||||
<li title="Length of the semi-major axis of the error ellipse">Semi-Major Axis: {{ semiMajorAxis.toFixed(2) }} m</li>
|
||||
<li title="Length of the semi-minor axis of the error ellipse">Semi-Minor Axis: {{ semiMinorAxis.toFixed(2) }} m</li>
|
||||
<li title="Area of the error ellipse">Error Ellipse Area: {{ ellipseArea.toFixed(2) }} m²</li>
|
||||
</ul>
|
||||
|
||||
<h3 class="mt-4">Counts</h3>
|
||||
<ul>
|
||||
<li title="Unique line / point pairs found in both projects">Common Points: {{ comparison.common }}</li>
|
||||
<li title="Total number of points compared, including reshoots, infills, etc.">Comparison Length: {{ comparison.length }}</li>
|
||||
<li title="Number of points in the baseline project">Baseline Points: {{ comparison.baselineLength }} (Unique: {{ comparison.baselineUniqueLength }})</li>
|
||||
<li title="Number of points in the monitor project">Monitor Points: {{ comparison.monitorLength }} (Unique: {{ comparison.monitorUniqueLength }})</li>
|
||||
</ul>
|
||||
|
||||
<p class="mt-3" title="Date and time when the comparison was last performed">Computation timestamp: {{ new Date(comparison.tstamp).toLocaleString() }}</p>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="12" md="6">
|
||||
<h3>Error Ellipse</h3>
|
||||
<svg width="300" height="300" style="border: 1px solid #ccc;">
|
||||
<g :transform="`translate(150, 150) scale(${ellipseScale})`">
|
||||
<line x1="0" y1="-150" x2="0" y2="150" stroke="lightgray" stroke-dasharray="5,5"/>
|
||||
<line x1="-150" y1="0" x2="150" y2="0" stroke="lightgray" stroke-dasharray="5,5"/>
|
||||
<ellipse
|
||||
:rx="Math.sqrt(comparison.eigenvalues[0])"
|
||||
:ry="Math.sqrt(comparison.eigenvalues[1])"
|
||||
:transform="`rotate(${ellipseAngle})`"
|
||||
fill="none"
|
||||
stroke="blue"
|
||||
stroke-width="2"
|
||||
/>
|
||||
<line
|
||||
:x1="0"
|
||||
:y1="0"
|
||||
:x2="Math.sqrt(comparison.eigenvalues[0]) * Math.cos(ellipseRad)"
|
||||
:y2="Math.sqrt(comparison.eigenvalues[0]) * Math.sin(ellipseRad)"
|
||||
stroke="red"
|
||||
stroke-width="2"
|
||||
arrow-end="classic-wide-long"
|
||||
/>
|
||||
<line
|
||||
:x1="0"
|
||||
:y1="0"
|
||||
:x2="Math.sqrt(comparison.eigenvalues[1]) * Math.cos(ellipseRad + Math.PI / 2)"
|
||||
:y2="Math.sqrt(comparison.eigenvalues[1]) * Math.sin(ellipseRad + Math.PI / 2)"
|
||||
stroke="green"
|
||||
stroke-width="2"
|
||||
arrow-end="classic-wide-long"
|
||||
/>
|
||||
</g>
|
||||
</svg>
|
||||
<p class="text-caption">Ellipse scaled for visibility (factor: {{ ellipseScale.toFixed(1) }}). Axes represent sqrt(eigenvalues).</p>
|
||||
</v-col>
|
||||
</v-row>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: "DougalGroupComparisonSummary",
|
||||
|
||||
props: {
|
||||
baseline: { type: Object, required: true },
|
||||
monitor: { type: Object, required: true },
|
||||
comparison: { type: Object, required: true }
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
ellipseAngle () {
|
||||
if (!this.comparison) return 0;
|
||||
const ev = this.comparison.eigenvectors[0];
|
||||
return Math.atan2(ev[1], ev[0]) * 180 / Math.PI;
|
||||
},
|
||||
|
||||
ellipseRad () {
|
||||
return this.ellipseAngle * Math.PI / 180;
|
||||
},
|
||||
|
||||
ellipseRx () {
|
||||
if (!this.comparison) return 0;
|
||||
return Math.sqrt(this.comparison.eigenvalues[0]) * this.ellipseScale;
|
||||
},
|
||||
|
||||
ellipseRy () {
|
||||
if (!this.comparison) return 0;
|
||||
return Math.sqrt(this.comparison.eigenvalues[1]) * this.ellipseScale;
|
||||
},
|
||||
|
||||
ellipseScale () {
|
||||
if (!this.comparison) return 1;
|
||||
const maxSigma = Math.max(
|
||||
Math.sqrt(this.comparison.eigenvalues[0]),
|
||||
Math.sqrt(this.comparison.eigenvalues[1])
|
||||
);
|
||||
const maxMu = Math.max(
|
||||
Math.abs(this.comparison['μ'][0]),
|
||||
Math.abs(this.comparison['μ'][1])
|
||||
);
|
||||
//const maxExtent = maxMu + 3 * maxSigma;
|
||||
const maxExtent = 20;
|
||||
return 100 / maxExtent; // Adjust scale to fit within ~200 pixels diameter
|
||||
},
|
||||
|
||||
ellipseArea () {
|
||||
if (!this.comparison) return 0;
|
||||
const a = Math.sqrt(this.comparison.eigenvalues[0]);
|
||||
const b = Math.sqrt(this.comparison.eigenvalues[1]);
|
||||
return Math.PI * a * b;
|
||||
},
|
||||
|
||||
semiMajorAxis () {
|
||||
if (!this.comparison) return 0;
|
||||
return Math.max(
|
||||
Math.sqrt(this.comparison.eigenvalues[0]),
|
||||
Math.sqrt(this.comparison.eigenvalues[1])
|
||||
);
|
||||
},
|
||||
|
||||
semiMinorAxis () {
|
||||
if (!this.comparison) return 0;
|
||||
return Math.min(
|
||||
Math.sqrt(this.comparison.eigenvalues[0]),
|
||||
Math.sqrt(this.comparison.eigenvalues[1])
|
||||
);
|
||||
},
|
||||
|
||||
meanX () {
|
||||
return this.comparison ? this.comparison['μ'][0] : 0;
|
||||
},
|
||||
|
||||
meanY () {
|
||||
return this.comparison ? this.comparison['μ'][1] : 0;
|
||||
},
|
||||
|
||||
ellipseViewBox () {
|
||||
return '-150 -150 300 300';
|
||||
},
|
||||
|
||||
}
|
||||
}
|
||||
</script>
|
||||
@@ -1,118 +0,0 @@
|
||||
<template>
|
||||
<v-card class="ma-1">
|
||||
<v-card-title>Group Repeatability Summary</v-card-title>
|
||||
<v-card-text>
|
||||
<p>Error ellipse area for each baseline-monitor pair. Lower values indicate better repeatability. Colors range from green (best) to red (worst).</p>
|
||||
<v-simple-table dense>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Baseline \ Monitor</th>
|
||||
<th v-for="project in projects" :key="project.pid">{{ project.pid }}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr v-for="(baselineProject, rowIndex) in projects" :key="baselineProject.pid">
|
||||
<td>{{ baselineProject.pid }}</td>
|
||||
<td v-for="(monitorProject, colIndex) in projects" :key="monitorProject.pid">
|
||||
<v-tooltip v-if="colIndex > rowIndex" top>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<div
|
||||
:style="{ backgroundColor: getEllipseAreaColor(baselineProject.pid, monitorProject.pid), color: 'white', textAlign: 'center', padding: '4px' }"
|
||||
v-bind="attrs"
|
||||
v-on="on"
|
||||
@click="emitInput(baselineProject, monitorProject)"
|
||||
>
|
||||
{{ formatEllipseArea(baselineProject.pid, monitorProject.pid) }}
|
||||
</div>
|
||||
</template>
|
||||
<span v-if="getComp(baselineProject.pid, monitorProject.pid)">
|
||||
<div>σ_i: {{ getComp(baselineProject.pid, monitorProject.pid).meta['σ'][0].toFixed(2) }} m</div>
|
||||
<div>σ_j: {{ getComp(baselineProject.pid, monitorProject.pid).meta['σ'][1].toFixed(2) }} m</div>
|
||||
<div>Anisotropy: {{ getComp(baselineProject.pid, monitorProject.pid).meta.anisotropy.toFixed(0) }}</div>
|
||||
<div>Ellipse Area: {{ getEllipseArea(baselineProject.pid, monitorProject.pid).toFixed(2) }} m²</div>
|
||||
<div>Primary Direction: {{ formatPrimaryDirection(getComp(baselineProject.pid, monitorProject.pid)) }}°</div>
|
||||
</span>
|
||||
</v-tooltip>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</v-simple-table>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: 'DougalGroupRepeatabilitySummary',
|
||||
|
||||
props: {
|
||||
comparisons: {
|
||||
type: Array,
|
||||
required: true
|
||||
},
|
||||
projects: {
|
||||
type: Array,
|
||||
required: true
|
||||
}
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
compMap () {
|
||||
return new Map(this.comparisons.map(c => [`${c.baseline_pid}-${c.monitor_pid}`, c]));
|
||||
},
|
||||
minEllipseArea () {
|
||||
if (!this.comparisons.length) return 0;
|
||||
return Math.min(...this.comparisons.map(c => {
|
||||
const a = Math.sqrt(c.meta.eigenvalues[0]);
|
||||
const b = Math.sqrt(c.meta.eigenvalues[1]);
|
||||
return Math.PI * a * b;
|
||||
}));
|
||||
},
|
||||
maxEllipseArea () {
|
||||
if (!this.comparisons.length) return 0;
|
||||
return Math.max(...this.comparisons.map(c => {
|
||||
const a = Math.sqrt(c.meta.eigenvalues[0]);
|
||||
const b = Math.sqrt(c.meta.eigenvalues[1]);
|
||||
return Math.PI * a * b;
|
||||
}));
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
getComp (basePid, monPid) {
|
||||
return this.compMap.get(`${basePid}-${monPid}`);
|
||||
},
|
||||
getEllipseArea (basePid, monPid) {
|
||||
const comp = this.getComp(basePid, monPid);
|
||||
if (!comp) return null;
|
||||
const a = Math.sqrt(comp.meta.eigenvalues[0]);
|
||||
const b = Math.sqrt(comp.meta.eigenvalues[1]);
|
||||
return Math.PI * a * b;
|
||||
},
|
||||
formatEllipseArea (basePid, monPid) {
|
||||
const val = this.getEllipseArea(basePid, monPid);
|
||||
return val !== null ? val.toFixed(1) : '';
|
||||
},
|
||||
getEllipseAreaColor (basePid, monPid) {
|
||||
const val = this.getEllipseArea(basePid, monPid);
|
||||
if (val === null) return '';
|
||||
const ratio = (val - this.minEllipseArea) / (this.maxEllipseArea - this.minEllipseArea);
|
||||
const hue = (1 - ratio) * 120;
|
||||
return `hsl(${hue}, 70%, 70%)`;
|
||||
},
|
||||
formatPrimaryDirection (comp) {
|
||||
if (!comp) return '';
|
||||
return (comp.meta.primaryDirection * 180 / Math.PI).toFixed(1);
|
||||
},
|
||||
emitInput (baselineProject, monitorProject) {
|
||||
if (this.getComp(baselineProject.pid, monitorProject.pid)) {
|
||||
this.$emit('input', baselineProject, monitorProject);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
@@ -2,7 +2,6 @@
|
||||
<v-dialog
|
||||
v-model="dialog"
|
||||
max-width="500"
|
||||
scrollable
|
||||
style="z-index:2020;"
|
||||
>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
@@ -15,54 +14,15 @@
|
||||
</template>
|
||||
|
||||
<v-card>
|
||||
<v-window v-model="page">
|
||||
<v-window-item value="support">
|
||||
<v-card-title class="headline">
|
||||
Dougal user support
|
||||
</v-card-title>
|
||||
<v-card-title class="headline">
|
||||
Dougal user support
|
||||
</v-card-title>
|
||||
|
||||
<v-card-text>
|
||||
<p>You can get help or report a problem by sending an email to <a :href="`mailto:${email}`">{{email}}</a>. Please include as much information as possible about your problem or question—screenshots are often a good idea, and data files may also be attached.</p>
|
||||
<v-card-text>
|
||||
<p>You can get help or report a problem by sending an email to <a :href="`mailto:${email}`">{{email}}</a>. Please include as much information as possible about your problem or question—screenshots are often a good idea, and data files may also be attached.</p>
|
||||
|
||||
<p>When you write to the above address a ticket will be automatically created in the project's issue tracking system.</p>
|
||||
|
||||
<v-alert dense type="info" border="left" outlined>
|
||||
<div class="text-body-2">
|
||||
You are using Dougal version:
|
||||
<ul>
|
||||
<li><code>{{clientVersion}}</code> (client)</li>
|
||||
<li><code>{{serverVersion}}</code> (server)</li>
|
||||
</ul>
|
||||
</div>
|
||||
</v-alert>
|
||||
|
||||
</v-card-text>
|
||||
</v-window-item>
|
||||
|
||||
<v-window-item value="changelog">
|
||||
<v-card-title class="headline">
|
||||
Dougal release notes
|
||||
</v-card-title>
|
||||
|
||||
<v-card-text>
|
||||
<v-carousel v-model="releaseShown"
|
||||
:continuous="false"
|
||||
:cycle="false"
|
||||
:show-arrows="true"
|
||||
:hide-delimiters="true"
|
||||
>
|
||||
<v-carousel-item v-for="release in releaseHistory">
|
||||
<pre>{{release}}</pre>
|
||||
</v-carousel-item>
|
||||
</v-carousel>
|
||||
</v-card-text>
|
||||
|
||||
|
||||
</v-window-item>
|
||||
<v-window-item value="serverinfo">
|
||||
<dougal-server-status :status="serverStatus"></dougal-server-status>
|
||||
</v-window-item>
|
||||
</v-window>
|
||||
<p>When you write to the above address a ticket will be automatically created in the project's issue tracking system.</p>
|
||||
</v-card-text>
|
||||
|
||||
<v-divider></v-divider>
|
||||
|
||||
@@ -73,7 +33,8 @@
|
||||
text
|
||||
:href="`mailto:${email}?Subject=Question`"
|
||||
>
|
||||
<v-icon title="Ask a question">mdi-help-circle</v-icon>
|
||||
<v-icon class="d-lg-none">mdi-help-circle</v-icon>
|
||||
<span class="d-none d-lg-inline">Ask a question</span>
|
||||
</v-btn>
|
||||
|
||||
<v-btn
|
||||
@@ -81,10 +42,10 @@
|
||||
text
|
||||
href="mailto:dougal-support@aaltronav.eu?Subject=Bug report"
|
||||
>
|
||||
<v-icon title="Report a bug">mdi-bug</v-icon>
|
||||
<v-icon class="d-lg-none">mdi-bug</v-icon>
|
||||
<span class="d-none d-lg-inline">Report a bug</span>
|
||||
</v-btn>
|
||||
|
||||
<!---
|
||||
<v-btn
|
||||
color="info"
|
||||
text
|
||||
@@ -93,37 +54,6 @@
|
||||
>
|
||||
<v-icon>mdi-rss</v-icon>
|
||||
</v-btn>
|
||||
--->
|
||||
|
||||
<v-btn
|
||||
color="info"
|
||||
text
|
||||
title="View support info"
|
||||
:input-value="page == 'support'"
|
||||
@click="page = 'support'"
|
||||
>
|
||||
<v-icon>mdi-account-question</v-icon>
|
||||
</v-btn>
|
||||
|
||||
<v-btn v-if="versionHistory"
|
||||
color="info"
|
||||
text
|
||||
title="View release notes"
|
||||
:input-value="page == 'changelog'"
|
||||
@click="page = 'changelog'"
|
||||
>
|
||||
<v-icon>mdi-history</v-icon>
|
||||
</v-btn>
|
||||
|
||||
<v-btn v-if="serverStatus"
|
||||
color="info"
|
||||
text
|
||||
title="View server status"
|
||||
:input-value="page == 'serverinfo'"
|
||||
@click="page = 'serverinfo'"
|
||||
>
|
||||
<v-icon>mdi-server-network</v-icon>
|
||||
</v-btn>
|
||||
|
||||
<v-spacer></v-spacer>
|
||||
|
||||
@@ -145,111 +75,15 @@
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import DougalServerStatus from './server-status';
|
||||
|
||||
export default {
|
||||
name: 'DougalHelpDialog',
|
||||
|
||||
components: {
|
||||
DougalServerStatus
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
dialog: false,
|
||||
email: "dougal-support@aaltronav.eu",
|
||||
feed: btoa(encodeURIComponent("https://gitlab.com/wgp/dougal/software.atom?feed_token=XSPpvsYEny8YmH75Nz5W")),
|
||||
serverStatus: null,
|
||||
clientVersion: process.env.DOUGAL_FRONTEND_VERSION ?? "(unknown)",
|
||||
serverVersion: null,
|
||||
versionHistory: null,
|
||||
releaseHistory: [],
|
||||
releaseShown: null,
|
||||
page: "support",
|
||||
|
||||
lastUpdate: 0,
|
||||
updateInterval: 12000,
|
||||
refreshTimer: null
|
||||
feed: btoa(encodeURIComponent("https://gitlab.com/wgp/dougal/software.atom?feed_token=XSPpvsYEny8YmH75Nz5W"))
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
sinceUpdate () {
|
||||
return this.lastUpdate
|
||||
? (Date.now() - this.lastUpdate)
|
||||
: +Infinity;
|
||||
}
|
||||
},
|
||||
|
||||
watch: {
|
||||
dialog(newVal) {
|
||||
if (newVal) {
|
||||
this.startAutoRefresh();
|
||||
} else {
|
||||
this.stopAutoRefresh();
|
||||
}
|
||||
},
|
||||
page(newVal) {
|
||||
if (newVal === 'serverinfo' && this.dialog) {
|
||||
this.getServerStatus(); // Immediate update when switching to serverinfo
|
||||
this.startAutoRefresh();
|
||||
} else {
|
||||
this.stopAutoRefresh();
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
async getServerVersion () {
|
||||
if (!this.serverVersion) {
|
||||
const version = await this.api(['/version', {}, null, {silent:true}]);
|
||||
this.serverVersion = version?.tag ?? "(unknown)";
|
||||
if (version) this.lastUpdate = Date.now();
|
||||
}
|
||||
if (!this.versionHistory) {
|
||||
const history = await this.api(['/version/history?count=6', {}, null, {silent:true}]);
|
||||
this.releaseHistory = history;
|
||||
this.versionHistory = history?.[this.serverVersion.replace(/-.*$/, "")] ?? null;
|
||||
}
|
||||
},
|
||||
|
||||
async getServerStatus () {
|
||||
const status = await this.api(['/diagnostics', {}, null, {silent: true}]);
|
||||
if (status) {
|
||||
this.serverStatus = status;
|
||||
this.lastUpdate = Date.now();
|
||||
}
|
||||
},
|
||||
|
||||
startAutoRefresh() {
|
||||
if (this.refreshTimer) return; // Prevent multiple timers
|
||||
this.refreshTimer = setInterval(() => {
|
||||
if (this.dialog && this.page === 'serverinfo') {
|
||||
this.getServerStatus();
|
||||
// Optionally refresh server version if needed
|
||||
// this.getServerVersion();
|
||||
}
|
||||
}, this.updateInterval);
|
||||
},
|
||||
|
||||
stopAutoRefresh() {
|
||||
if (this.refreshTimer) {
|
||||
clearInterval(this.refreshTimer);
|
||||
this.refreshTimer = null;
|
||||
}
|
||||
},
|
||||
|
||||
...mapActions(["api"])
|
||||
},
|
||||
|
||||
async mounted () {
|
||||
this.getServerVersion();
|
||||
this.getServerStatus();
|
||||
},
|
||||
|
||||
beforeDestroy() {
|
||||
this.stopAutoRefresh(); // Clean up timer on component destruction
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
@@ -1,557 +0,0 @@
|
||||
<template>
|
||||
<v-container>
|
||||
<v-row>
|
||||
<v-treeview
|
||||
dense
|
||||
activatable
|
||||
hoverable
|
||||
:multiple-active="false"
|
||||
:active.sync="active"
|
||||
:open.sync="open"
|
||||
:items="treeview"
|
||||
style="cursor:pointer;width:100%;"
|
||||
>
|
||||
<template v-slot:prepend="{item}">
|
||||
<template v-if="item.icon">
|
||||
<v-icon
|
||||
small
|
||||
left
|
||||
:title="item.leaf ? item.type : `${item.type} (${item.children.length} children)`"
|
||||
>{{item.icon}}</v-icon>
|
||||
</template>
|
||||
</template>
|
||||
|
||||
<template v-slot:label="{item}">
|
||||
<template v-if="!('path' in item)">
|
||||
{{item.name}}
|
||||
</template>
|
||||
<template v-else-if="item.leaf">
|
||||
<v-chip
|
||||
small
|
||||
label
|
||||
outlined
|
||||
:color="item.isArrayItem ? 'secondary' : 'primary'"
|
||||
>
|
||||
{{item.name}}
|
||||
</v-chip>
|
||||
<code class="ml-4" v-if="item.type == 'bigint'">{{item.value+"n"}}</code>
|
||||
<code class="ml-4" v-else-if="item.type == 'boolean'"><b>{{item.value}}</b></code>
|
||||
<code class="ml-4" v-else>{{item.value}}</code>
|
||||
<v-icon v-if="item.type == 'string' && (/^#[0-9a-fA-F]{3}([0-9a-fA-F]{3}([0-9a-fA-F]{2})?)?$/.test(item.value) || item.name == 'colour' || item.name == 'color')"
|
||||
right
|
||||
:color="item.value"
|
||||
>mdi-square</v-icon>
|
||||
</template>
|
||||
<template v-else>
|
||||
<v-chip
|
||||
small
|
||||
label
|
||||
outlined
|
||||
:color="item.isArrayItem ? 'secondary' : 'primary'"
|
||||
>
|
||||
{{item.name}}
|
||||
</v-chip>
|
||||
</template>
|
||||
</template>
|
||||
|
||||
<template v-slot:append="{item}">
|
||||
<template>
|
||||
<v-icon v-if="item.type == 'array'"
|
||||
small
|
||||
right
|
||||
outline
|
||||
color="primary"
|
||||
title="Add item"
|
||||
@click="itemAddDialog(item)"
|
||||
>mdi-plus</v-icon>
|
||||
<v-icon v-if="item.type == 'object'"
|
||||
small
|
||||
right
|
||||
outline
|
||||
color="primary"
|
||||
title="Add property"
|
||||
@click="itemAddDialog(item)"
|
||||
>mdi-plus</v-icon>
|
||||
<v-icon v-if="item.type == 'boolean'"
|
||||
small
|
||||
right
|
||||
outline
|
||||
color="primary"
|
||||
title="Toggle value"
|
||||
@click="itemToggle(item)"
|
||||
>{{ item.value ? "mdi-checkbox-blank-outline" : "mdi-checkbox-marked-outline" }}</v-icon>
|
||||
<v-icon v-if="item.type == 'string' || item.type == 'number'"
|
||||
small
|
||||
right
|
||||
outline
|
||||
color="primary"
|
||||
title="Edit value"
|
||||
@click="itemAddDialog(item, true)"
|
||||
>mdi-pencil-outline</v-icon>
|
||||
<v-icon
|
||||
small
|
||||
right
|
||||
outlined
|
||||
color="red"
|
||||
title="Delete"
|
||||
:disabled="item.id == rootId"
|
||||
@click="itemDelete(item)"
|
||||
>mdi-minus</v-icon>
|
||||
</template>
|
||||
</template>
|
||||
</v-treeview>
|
||||
<dougal-json-builder-property-dialog
|
||||
:open="editor"
|
||||
v-model="edit"
|
||||
v-bind="editorProperties"
|
||||
@save="editorSave"
|
||||
@close="editorClose"
|
||||
></dougal-json-builder-property-dialog>
|
||||
</v-row>
|
||||
</v-container>
|
||||
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { deepValue, deepSet } from '@/lib/utils';
|
||||
import DougalJsonBuilderPropertyDialog from './property-dialog';
|
||||
|
||||
export default {
|
||||
name: "DougalJsonBuilder",
|
||||
|
||||
components: {
|
||||
DougalJsonBuilderPropertyDialog
|
||||
},
|
||||
|
||||
props: {
|
||||
value: Object,
|
||||
name: String,
|
||||
sort: String
|
||||
},
|
||||
|
||||
data () {
|
||||
const rootId = Symbol("rootNode");
|
||||
return {
|
||||
rootId,
|
||||
active: [],
|
||||
open: [ rootId ],
|
||||
editor: false,
|
||||
editorProperties: {
|
||||
nameShown: true,
|
||||
nameEditable: true,
|
||||
typeShown: true,
|
||||
typeEditable: true,
|
||||
valueShown: true,
|
||||
serialisable: true
|
||||
},
|
||||
onEditorSave: (evt) => {},
|
||||
edit: {
|
||||
name: null,
|
||||
type: null,
|
||||
value: null
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
treeview () {
|
||||
|
||||
function sorter (key) {
|
||||
return function λ (a, b) {
|
||||
return a?.[key] > b?.[key]
|
||||
? 1
|
||||
: a?.[key] < b?.[key]
|
||||
? -1
|
||||
: 0;
|
||||
}
|
||||
}
|
||||
|
||||
function getType (value) {
|
||||
const t = typeof value;
|
||||
switch (t) {
|
||||
case "symbol":
|
||||
case "string":
|
||||
case "bigint":
|
||||
case "number":
|
||||
case "boolean":
|
||||
case "undefined":
|
||||
return t;
|
||||
case "object":
|
||||
return value === null
|
||||
? "null"
|
||||
: Array.isArray(value)
|
||||
? "array"
|
||||
: t;
|
||||
}
|
||||
}
|
||||
|
||||
function getIcon (type) {
|
||||
switch (type) {
|
||||
case "symbol":
|
||||
return "mdi-symbol";
|
||||
case "string":
|
||||
return "mdi-format-text";
|
||||
case "bigint":
|
||||
return "mdi-numeric";
|
||||
case "number":
|
||||
return "mdi-numeric";
|
||||
case "boolean":
|
||||
return "mdi-checkbox-intermediate-variant";
|
||||
case "undefined":
|
||||
return "mdi-border-none-variant";
|
||||
case "null":
|
||||
return "mdi-null";
|
||||
case "array":
|
||||
return "mdi-list-box-outline";
|
||||
case "object":
|
||||
return "mdi-format-list-bulleted-type";
|
||||
}
|
||||
return "mdi-help";
|
||||
}
|
||||
|
||||
const leaf = ([key, value], parent) => {
|
||||
const id = parent
|
||||
? parent.id+"."+key
|
||||
: key;
|
||||
const name = key;
|
||||
const type = getType(value);
|
||||
const icon = getIcon(type);
|
||||
const isArrayItem = parent?.type == "array";
|
||||
|
||||
const obj = {
|
||||
id,
|
||||
name,
|
||||
type,
|
||||
icon,
|
||||
isArrayItem,
|
||||
};
|
||||
|
||||
if (parent) {
|
||||
obj.path = [...parent.path, key];
|
||||
} else {
|
||||
obj.path = [ key ];
|
||||
}
|
||||
|
||||
if (type == "object" || type == "array") {
|
||||
const children = [];
|
||||
for (const child of Object.entries(value)) {
|
||||
children.push(leaf(child, obj));
|
||||
}
|
||||
if (this.sort) {
|
||||
children.sort(sorter(this.sort));
|
||||
}
|
||||
obj.children = children;
|
||||
} else {
|
||||
obj.leaf = true;
|
||||
obj.value = value;
|
||||
/*
|
||||
obj.children = [{
|
||||
id: id+".value",
|
||||
name: String(value)
|
||||
}]
|
||||
*/
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
const rootNode = {
|
||||
id: this.rootId,
|
||||
name: this.name,
|
||||
type: getType(this.value),
|
||||
icon: getIcon(getType(this.value)),
|
||||
children: []
|
||||
};
|
||||
const view = [rootNode];
|
||||
|
||||
if (this.value) {
|
||||
for (const child of Object.entries(this.value)) {
|
||||
rootNode.children.push(leaf(child));
|
||||
}
|
||||
if (this.sort) {
|
||||
rootNode.children.sort(sorter(this.sort));
|
||||
}
|
||||
}
|
||||
|
||||
return view;
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
treeview () {
|
||||
if (!this.open.includes(this.rootId)) {
|
||||
this.open.push(this.rootId);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
openAll (open = true) {
|
||||
const walk = (obj) => {
|
||||
if (obj?.children) {
|
||||
for (const child of obj.children) {
|
||||
walk(child);
|
||||
}
|
||||
if (obj?.id) {
|
||||
this.open.push(obj.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const item of this.treeview) {
|
||||
walk (item);
|
||||
}
|
||||
},
|
||||
|
||||
itemDelete (item) {
|
||||
const parents = [...item.path];
|
||||
const key = parents.pop();
|
||||
|
||||
if (key) {
|
||||
|
||||
const value = structuredClone(this.value);
|
||||
const obj = parents.length ? deepValue(value, parents) : value;
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
obj.splice(key, 1);
|
||||
} else {
|
||||
delete obj[key];
|
||||
}
|
||||
|
||||
this.$emit("input", value);
|
||||
|
||||
} else {
|
||||
|
||||
this.$emit("input", {});
|
||||
|
||||
}
|
||||
},
|
||||
|
||||
itemToggle (item, state) {
|
||||
const parents = [...item.path];
|
||||
const value = structuredClone(this.value);
|
||||
|
||||
if (parents.length) {
|
||||
deepSet(value, parents, state ?? !item.value)
|
||||
} else {
|
||||
value = state ?? !item.value;
|
||||
}
|
||||
|
||||
this.$emit("input", value);
|
||||
},
|
||||
|
||||
itemSet (path, content) {
|
||||
const parents = [...(path??[])];
|
||||
const key = parents.pop();
|
||||
|
||||
if (key !== undefined) {
|
||||
|
||||
const value = structuredClone(this.value);
|
||||
const obj = parents.length ? deepValue(value, parents) : value;
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
if (key === null) {
|
||||
obj.push(content);
|
||||
} else {
|
||||
obj[key] = content;
|
||||
}
|
||||
} else {
|
||||
obj[key] = content;
|
||||
}
|
||||
|
||||
this.$emit("input", value);
|
||||
|
||||
} else {
|
||||
this.$emit("input", content);
|
||||
|
||||
}
|
||||
},
|
||||
|
||||
itemAdd (path, content) {
|
||||
let value = structuredClone(this.value);
|
||||
let path_ = [...(path??[])];
|
||||
|
||||
if (path_.length) {
|
||||
try {
|
||||
deepSet(value, path_, content);
|
||||
} catch (err) {
|
||||
if (err instanceof TypeError) {
|
||||
this.itemSet(path, content);
|
||||
return;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
value = content;
|
||||
}
|
||||
|
||||
this.$emit("input", value);
|
||||
},
|
||||
|
||||
itemAddDialog (item, edit=false) {
|
||||
|
||||
if (!this.open.includes(item.id)) {
|
||||
this.open.push(item.id);
|
||||
}
|
||||
|
||||
if (edit) {
|
||||
this.editorReset({
|
||||
name: item.name,
|
||||
type: item.type,
|
||||
value: item.value
|
||||
}, {nameEditable: false});
|
||||
} else {
|
||||
this.editorReset({}, {
|
||||
nameShown: item.type != "array",
|
||||
nameRequired: item.type != "array"
|
||||
});
|
||||
}
|
||||
|
||||
this.onEditorSave = (evt) => {
|
||||
this.editor = false;
|
||||
|
||||
let transformer;
|
||||
switch(this.edit.type) {
|
||||
case "symbol":
|
||||
transformer = Symbol;
|
||||
break;
|
||||
case "string":
|
||||
transformer = String;
|
||||
break;
|
||||
case "bigint":
|
||||
transformer = BigInt;
|
||||
break;
|
||||
case "number":
|
||||
transformer = Number;
|
||||
break;
|
||||
case "boolean":
|
||||
transformer = Boolean;
|
||||
break;
|
||||
case "undefined":
|
||||
transformer = () => { return undefined; };
|
||||
break;
|
||||
case "object":
|
||||
transformer = (v) =>
|
||||
typeof v == "object"
|
||||
? v
|
||||
: (typeof v == "string" && v.length)
|
||||
? JSON.parse(v)
|
||||
: {};
|
||||
break;
|
||||
case "null":
|
||||
transformer = () => null;
|
||||
break;
|
||||
case "array":
|
||||
// FIXME not great
|
||||
transformer = (v) =>
|
||||
Array.isArray(v)
|
||||
? v
|
||||
: [];
|
||||
break;
|
||||
}
|
||||
|
||||
const value = transformer(this.edit.value);
|
||||
|
||||
const path = [...(item.path??[])];
|
||||
|
||||
if (!edit) {
|
||||
if (item.type == "array") {
|
||||
path.push(null);
|
||||
} else {
|
||||
path.push(this.edit.name);
|
||||
}
|
||||
}
|
||||
this.itemAdd(path, value);
|
||||
};
|
||||
this.editor = true;
|
||||
|
||||
},
|
||||
|
||||
XXitemEditDialog (item) {
|
||||
|
||||
this.editorReset({
|
||||
name: item.name,
|
||||
type: item.type,
|
||||
value: item.value}, {nameEditable: false});
|
||||
|
||||
this.onEditorSave = (evt) => {
|
||||
this.editor = false;
|
||||
|
||||
let transformer;
|
||||
switch(this.edit.type) {
|
||||
case "symbol":
|
||||
transformer = Symbol;
|
||||
break;
|
||||
case "string":
|
||||
transformer = String;
|
||||
break;
|
||||
case "bigint":
|
||||
transformer = BigInt;
|
||||
break;
|
||||
case "number":
|
||||
transformer = Number;
|
||||
break;
|
||||
case "boolean":
|
||||
transformer = Boolean;
|
||||
break;
|
||||
case "undefined":
|
||||
transformer = () => { return undefined; };
|
||||
break;
|
||||
case "object":
|
||||
transformer = (v) =>
|
||||
typeof v == "object"
|
||||
? v
|
||||
: (typeof v == "string" && v.length)
|
||||
? JSON.parse(v)
|
||||
: {};
|
||||
break;
|
||||
case "null":
|
||||
transformer = () => null;
|
||||
break;
|
||||
case "array":
|
||||
// FIXME not great
|
||||
transformer = (v) =>
|
||||
Array.isArray(v)
|
||||
? v
|
||||
: [];
|
||||
break;
|
||||
}
|
||||
|
||||
const key = this.edit.name;
|
||||
const value = transformer(this.edit.value);
|
||||
this.itemAdd(item, key, value);
|
||||
}
|
||||
this.editor = true;
|
||||
|
||||
},
|
||||
|
||||
editorReset (values, props) {
|
||||
this.edit = {
|
||||
name: values?.name,
|
||||
type: values?.type,
|
||||
value: values?.value
|
||||
};
|
||||
|
||||
this.editorProperties = {
|
||||
nameShown: props?.nameShown ?? true,
|
||||
nameEditable: props?.nameEditable ?? true,
|
||||
nameRequired: props?.nameRequired ?? true,
|
||||
typeShown: props?.typeShown ?? true,
|
||||
typeEditable: props?.typeEditable ?? true,
|
||||
valueShown: props?.valueShown ?? true,
|
||||
serialisable: props?.serialisable ?? true
|
||||
};
|
||||
},
|
||||
|
||||
editorSave (evt) {
|
||||
this.onEditorSave?.(evt);
|
||||
},
|
||||
|
||||
editorClose () {
|
||||
this.editor = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
</script>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user