mirror of
https://gitlab.com/wgp/dougal/software.git
synced 2025-12-06 11:07:08 +00:00
Compare commits
262 Commits
182-period
...
280-consol
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4c2a2617a1 | ||
|
|
5021888d03 | ||
|
|
bf633f7fdf | ||
|
|
847f49ad7c | ||
|
|
171feb9dd2 | ||
|
|
503a0de12f | ||
|
|
cf89a43f64 | ||
|
|
680e376ed1 | ||
|
|
a26974670a | ||
|
|
16a6cb59dc | ||
|
|
829e206831 | ||
|
|
83244fcd1a | ||
|
|
851369a0b4 | ||
|
|
5065d62443 | ||
|
|
2d1e1e9532 | ||
|
|
051049581a | ||
|
|
da5ae18b0b | ||
|
|
ac9353c101 | ||
|
|
c4c5c44bf1 | ||
|
|
d3659ebf02 | ||
|
|
6b5070e634 | ||
|
|
09ff96ceee | ||
|
|
f231acf109 | ||
|
|
e576e1662c | ||
|
|
6a21ddd1cd | ||
|
|
c1e35b2459 | ||
|
|
eee2a96029 | ||
|
|
6f5e5a4d20 | ||
|
|
9e73cb7e00 | ||
|
|
d7ab4eec7c | ||
|
|
cdd96a4bc7 | ||
|
|
39a21766b6 | ||
|
|
0e33c18b5c | ||
|
|
7f411ac7dd | ||
|
|
ed1da11c9d | ||
|
|
66ec28dd83 | ||
|
|
b928d96774 | ||
|
|
73335f9c1e | ||
|
|
7b6b81dbc5 | ||
|
|
2e11c574c2 | ||
|
|
d07565807c | ||
|
|
6eccbf215a | ||
|
|
8abc05f04e | ||
|
|
8f587467f9 | ||
|
|
3d7a91c7ff | ||
|
|
3fd408074c | ||
|
|
f71cbd8f51 | ||
|
|
915df8ac16 | ||
|
|
d5ecb08a2d | ||
|
|
9388cd4861 | ||
|
|
180590b411 | ||
|
|
4ec37539bf | ||
|
|
8755fe01b6 | ||
|
|
0bfe54e0c2 | ||
|
|
29bc689b84 | ||
|
|
65682febc7 | ||
|
|
d408665d62 | ||
|
|
64fceb0a01 | ||
|
|
ab58e578c9 | ||
|
|
0e58b8fa5b | ||
|
|
99ac082f00 | ||
|
|
4d3fddc051 | ||
|
|
42456439a9 | ||
|
|
ee0c0e7308 | ||
|
|
998c272bf8 | ||
|
|
daddd1f0e8 | ||
|
|
17f20535cb | ||
|
|
0829ea3ea1 | ||
|
|
2069d9c3d7 | ||
|
|
8a2d526c50 | ||
|
|
8ad96d6f73 | ||
|
|
947faf8c05 | ||
|
|
a948556455 | ||
|
|
835384b730 | ||
|
|
c5b93794f4 | ||
|
|
056cd32f0e | ||
|
|
49bb413110 | ||
|
|
ceccc42050 | ||
|
|
aa3379e1c6 | ||
|
|
4063af0e25 | ||
|
|
d53e6060a4 | ||
|
|
85d8fc8cc0 | ||
|
|
0fe40b1839 | ||
|
|
21de4b757f | ||
|
|
96cdbb2cff | ||
|
|
d531643b58 | ||
|
|
a1779ef488 | ||
|
|
5239dece1e | ||
|
|
a7d7837816 | ||
|
|
ebcfc7df47 | ||
|
|
dc4b9002fe | ||
|
|
33618b6b82 | ||
|
|
597d407acc | ||
|
|
6162a5bdee | ||
|
|
696bbf7a17 | ||
|
|
821fcf0922 | ||
|
|
b1712d838f | ||
|
|
895b865505 | ||
|
|
5a2af5c49e | ||
|
|
24658f4017 | ||
|
|
6707cda75e | ||
|
|
1302a31b3d | ||
|
|
871a1e8f3a | ||
|
|
04e1144bab | ||
|
|
6312d94f3e | ||
|
|
ed91026319 | ||
|
|
441a4e296d | ||
|
|
c33c3f61df | ||
|
|
2cc293b724 | ||
|
|
ee129b2faa | ||
|
|
98d9b3b093 | ||
|
|
57b9b420f8 | ||
|
|
9e73f2603a | ||
|
|
707889be42 | ||
|
|
f9a70e0145 | ||
|
|
b71489cee1 | ||
|
|
0a9bde5f10 | ||
|
|
36d5862375 | ||
|
|
398c702004 | ||
|
|
b2d1798338 | ||
|
|
4f165b0c83 | ||
|
|
2c86944a51 | ||
|
|
5fc51de7d8 | ||
|
|
158e0fb788 | ||
|
|
941d15c1bc | ||
|
|
cd00f8b995 | ||
|
|
44515f8e78 | ||
|
|
54fbc76da5 | ||
|
|
c1b5196134 | ||
|
|
fb3d3be546 | ||
|
|
8e11e242ed | ||
|
|
8a815ce3ef | ||
|
|
91076a50ad | ||
|
|
e624dcdde0 | ||
|
|
a25676122c | ||
|
|
e4dfbe2c9a | ||
|
|
78fb34d049 | ||
|
|
38c4125f4f | ||
|
|
04d6cbafe3 | ||
|
|
e6319172d8 | ||
|
|
5230ff63e3 | ||
|
|
2b364bbff7 | ||
|
|
c4b330b2bb | ||
|
|
308eda6342 | ||
|
|
e8b1cb27f1 | ||
|
|
ed14fd0ced | ||
|
|
fb10e56487 | ||
|
|
56ed0cbc79 | ||
|
|
227e588782 | ||
|
|
53f2108e37 | ||
|
|
ccf4bbf547 | ||
|
|
c99a625b60 | ||
|
|
25ab623328 | ||
|
|
455888bdac | ||
|
|
b650ece0ee | ||
|
|
2cb96c0252 | ||
|
|
70cf59bb4c | ||
|
|
ec03627119 | ||
|
|
675c19f060 | ||
|
|
6721b1b96b | ||
|
|
b4f23822c4 | ||
|
|
3dd1aaeddb | ||
|
|
1e593e6d75 | ||
|
|
ddbcb90c1f | ||
|
|
229fdf20ef | ||
|
|
72e67d0e5d | ||
|
|
b26fefbc37 | ||
|
|
04e0482f60 | ||
|
|
62f90846a8 | ||
|
|
1f9c0e56fe | ||
|
|
fe9d3563a0 | ||
|
|
38a07dffc6 | ||
|
|
1a6500308f | ||
|
|
6033b45ed3 | ||
|
|
33edef6647 | ||
|
|
8f8e8b7492 | ||
|
|
ab5e3198aa | ||
|
|
60ed850d2d | ||
|
|
63b9cc5b16 | ||
|
|
f2edd2bec5 | ||
|
|
44ad59130f | ||
|
|
ecbb1e04ee | ||
|
|
7cb2c3ef49 | ||
|
|
ff4f6bfd78 | ||
|
|
fbe0cb5efa | ||
|
|
aa7cbed611 | ||
|
|
89061f6411 | ||
|
|
838883d8a3 | ||
|
|
cd196f1acd | ||
|
|
a2b894fceb | ||
|
|
c3b3a4c70f | ||
|
|
8118641231 | ||
|
|
6d8a199a3c | ||
|
|
5a44e20a5b | ||
|
|
374739c133 | ||
|
|
992205da4a | ||
|
|
f5e08c68af | ||
|
|
105fee0623 | ||
|
|
aff974c03f | ||
|
|
bada6dc2e2 | ||
|
|
d5aac5e84d | ||
|
|
3577a2ba4a | ||
|
|
04df9f41cc | ||
|
|
fdb5e0cbab | ||
|
|
4b832babfd | ||
|
|
cc3a9b4e5c | ||
|
|
da5a708760 | ||
|
|
9834e85eb9 | ||
|
|
e19601218a | ||
|
|
15c56d3f64 | ||
|
|
632dd1ee75 | ||
|
|
aeff5a491d | ||
|
|
9179c9332d | ||
|
|
bb5de9a00e | ||
|
|
d6b985fcd2 | ||
|
|
3ed8339aa3 | ||
|
|
1b925502bc | ||
|
|
7cea79a9be | ||
|
|
69f565f357 | ||
|
|
23de4d00d7 | ||
|
|
1992efe914 | ||
|
|
c7f3f565cd | ||
|
|
1da02738b0 | ||
|
|
732d8e9be6 | ||
|
|
a2bd614b17 | ||
|
|
003c833293 | ||
|
|
a4c458dc16 | ||
|
|
f7b6ca3f79 | ||
|
|
a7cce69c81 | ||
|
|
2b20a5d69f | ||
|
|
4fc5d1deda | ||
|
|
df13343063 | ||
|
|
a5603cf243 | ||
|
|
b6d4236325 | ||
|
|
7e8f00d9f2 | ||
|
|
721cfb36d1 | ||
|
|
222c951e49 | ||
|
|
45d2e56ed1 | ||
|
|
c5b6c87278 | ||
|
|
fd37e8b8d6 | ||
|
|
ce0310d0b0 | ||
|
|
546bc45861 | ||
|
|
602f2c0a34 | ||
|
|
37de5ab223 | ||
|
|
d69c6c4150 | ||
|
|
d80f44547b | ||
|
|
6c8515a879 | ||
|
|
bb9340a0af | ||
|
|
672c14fb67 | ||
|
|
f4ee798bf0 | ||
|
|
c8ef089b28 | ||
|
|
1f6d560d7e | ||
|
|
f37e07796c | ||
|
|
349c052db0 | ||
|
|
1c291db6c6 | ||
|
|
f46fd4b6bc | ||
|
|
10883eb1a6 | ||
|
|
af6e419aab | ||
|
|
6516896bae | ||
|
|
c495dce27d | ||
|
|
40d96230d2 | ||
|
|
d607b4618a |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -11,3 +11,5 @@ lib/www/client/dist/
|
||||
etc/surveys/*.yaml
|
||||
!etc/surveys/_*.yaml
|
||||
etc/ssl/*
|
||||
etc/config.yaml
|
||||
var/*
|
||||
|
||||
27
bin/check_mounts_present.py
Executable file
27
bin/check_mounts_present.py
Executable file
@@ -0,0 +1,27 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
Check if any of the directories provided in the imports.mounts configuration
|
||||
section are empty.
|
||||
|
||||
Returns 0 if all arguments are non-empty, 1 otherwise. It stops at the first
|
||||
empty directory.
|
||||
"""
|
||||
|
||||
import os
|
||||
import configuration
|
||||
|
||||
cfg = configuration.read()
|
||||
|
||||
if cfg and "imports" in cfg and "mounts" in cfg["imports"]:
|
||||
|
||||
mounts = cfg["imports"]["mounts"]
|
||||
for item in mounts:
|
||||
with os.scandir(item) as contents:
|
||||
if not any(contents):
|
||||
exit(1)
|
||||
|
||||
else:
|
||||
print("No mounts in configuration")
|
||||
|
||||
exit(0)
|
||||
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
import pathlib
|
||||
from glob import glob
|
||||
from yaml import full_load as _load
|
||||
|
||||
@@ -11,6 +12,18 @@ surveys should be under $HOME/etc/surveys/*.yaml. In both cases,
|
||||
$HOME is the home directory of the user running this script.
|
||||
"""
|
||||
|
||||
def is_relative_to(it, other):
|
||||
"""
|
||||
is_relative_to() is not present version Python 3.9, so we
|
||||
need this kludge to get Dougal to run on OpenSUSE 15.4
|
||||
"""
|
||||
|
||||
if "is_relative_to" in dir(it):
|
||||
return it.is_relative_to(other)
|
||||
|
||||
return str(it.absolute()).startswith(str(other.absolute()))
|
||||
|
||||
|
||||
prefix = os.environ.get("DOUGAL_ROOT", os.environ.get("HOME", ".")+"/software")
|
||||
|
||||
DOUGAL_ROOT = os.environ.get("DOUGAL_ROOT", os.environ.get("HOME", ".")+"/software")
|
||||
@@ -54,6 +67,10 @@ def files (globspec = None, include_archived = False):
|
||||
quickly and temporarily “disabling” a survey configuration by renaming
|
||||
the relevant file.
|
||||
"""
|
||||
|
||||
print("This method is obsolete")
|
||||
return
|
||||
|
||||
tuples = []
|
||||
|
||||
if globspec is None:
|
||||
@@ -87,3 +104,73 @@ def rxflags (flagstr):
|
||||
for flag in flagstr:
|
||||
flags |= cases.get(flag, 0)
|
||||
return flags
|
||||
|
||||
def translate_path (file):
|
||||
"""
|
||||
Translate a path from a Dougal import directory to an actual
|
||||
physical path on disk.
|
||||
|
||||
Any user files accessible by Dougal must be under a path prefixed
|
||||
by `(config.yaml).imports.paths`. The value of `imports.paths` may
|
||||
be either a string, in which case this represents the prefix under
|
||||
which all Dougal data resides, or a dictionary where the keys are
|
||||
logical paths and their values the corresponding physical path.
|
||||
"""
|
||||
cfg = read()
|
||||
root = pathlib.Path(DOUGAL_ROOT)
|
||||
filepath = pathlib.Path(file).resolve()
|
||||
import_paths = cfg["imports"]["paths"]
|
||||
|
||||
if filepath.is_absolute():
|
||||
if type(import_paths) == str:
|
||||
# Substitute the root for the real physical path
|
||||
# NOTE: `root` deals with import_paths not being absolute
|
||||
prefix = root.joinpath(pathlib.Path(import_paths)).resolve()
|
||||
return str(pathlib.Path(prefix).joinpath(*filepath.parts[2:]))
|
||||
else:
|
||||
# Look for a match on the second path element
|
||||
if filepath.parts[1] in import_paths:
|
||||
# NOTE: `root` deals with import_paths[…] not being absolute
|
||||
prefix = root.joinpath(import_paths[filepath.parts[1]])
|
||||
return str(pathlib.Path(prefix).joinpath(*filepath.parts[2:]))
|
||||
else:
|
||||
# This path is invalid
|
||||
raise TypeError("invalid path or file: {0!r}".format(filepath))
|
||||
else:
|
||||
# A relative filepath is always resolved relative to the logical root
|
||||
root = pathlib.Path("/")
|
||||
return translate_path(root.joinpath(filepath))
|
||||
|
||||
def untranslate_path (file):
|
||||
"""
|
||||
Attempt to convert a physical path into a logical one.
|
||||
See `translate_path()` above for details.
|
||||
"""
|
||||
cfg = read()
|
||||
dougal_root = pathlib.Path(DOUGAL_ROOT)
|
||||
filepath = pathlib.Path(file).resolve()
|
||||
import_paths = cfg["imports"]["paths"]
|
||||
physical_root = pathlib.Path("/")
|
||||
|
||||
if filepath.is_absolute():
|
||||
if type(import_paths) == str:
|
||||
if is_relative_to(filepath, import_paths):
|
||||
physical_root = pathlib.Path("/")
|
||||
physical_prefix = pathlib.Path(import_paths)
|
||||
return str(root.joinpath(filepath.relative_to(physical_prefix)))
|
||||
else:
|
||||
raise TypeError("invalid path or file: {0!r}".format(filepath))
|
||||
else:
|
||||
for key, value in import_paths.items():
|
||||
value = dougal_root.joinpath(value)
|
||||
physical_prefix = pathlib.Path(value)
|
||||
if is_relative_to(filepath, physical_prefix):
|
||||
logical_prefix = physical_root.joinpath(pathlib.Path(key)).resolve()
|
||||
return str(logical_prefix.joinpath(filepath.relative_to(physical_prefix)))
|
||||
|
||||
# If we got here with no matches, this is not a valid
|
||||
# Dougal data path
|
||||
raise TypeError("invalid path or file: {0!r}".format(filepath))
|
||||
else:
|
||||
# A relative filepath is always resolved relative to DOUGAL_ROOT
|
||||
return untranslate_path(root.joinpath(filepath))
|
||||
|
||||
26
bin/daily_tasks.py
Executable file
26
bin/daily_tasks.py
Executable file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
Do daily housekeeping on the database.
|
||||
|
||||
This is meant to run shortly after midnight every day.
|
||||
"""
|
||||
|
||||
import configuration
|
||||
from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
db.set_survey(survey["schema"])
|
||||
|
||||
print("Daily tasks")
|
||||
db.run_daily_tasks()
|
||||
|
||||
print("Done")
|
||||
111
bin/datastore.py
111
bin/datastore.py
@@ -52,7 +52,7 @@ class Datastore:
|
||||
|
||||
self.conn = psycopg2.connect(configuration.read()["db"]["connection_string"], **opts)
|
||||
|
||||
def set_autocommit(value = True):
|
||||
def set_autocommit(self, value = True):
|
||||
"""
|
||||
Enable or disable autocommit.
|
||||
|
||||
@@ -95,7 +95,7 @@ class Datastore:
|
||||
cursor.execute(qry, (filepath,))
|
||||
results = cursor.fetchall()
|
||||
if len(results):
|
||||
return (filepath, file_hash(filepath)) in results
|
||||
return (filepath, file_hash(configuration.translate_path(filepath))) in results
|
||||
|
||||
|
||||
def add_file(self, path, cursor = None):
|
||||
@@ -107,7 +107,8 @@ class Datastore:
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
hash = file_hash(path)
|
||||
realpath = configuration.translate_path(path)
|
||||
hash = file_hash(realpath)
|
||||
qry = "CALL add_file(%s, %s);"
|
||||
cur.execute(qry, (path, hash))
|
||||
if cursor is None:
|
||||
@@ -176,7 +177,7 @@ class Datastore:
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
hash = file_hash(path)
|
||||
hash = file_hash(configuration.translate_path(path))
|
||||
qry = """
|
||||
UPDATE raw_lines rl
|
||||
SET ntbp = %s
|
||||
@@ -412,7 +413,11 @@ class Datastore:
|
||||
qry = """
|
||||
INSERT INTO raw_lines (sequence, line, remarks, ntbp, incr, meta)
|
||||
VALUES (%s, %s, '', %s, %s, %s)
|
||||
ON CONFLICT DO NOTHING;
|
||||
ON CONFLICT (sequence) DO UPDATE SET
|
||||
line = EXCLUDED.line,
|
||||
ntbp = EXCLUDED.ntbp,
|
||||
incr = EXCLUDED.incr,
|
||||
meta = EXCLUDED.meta;
|
||||
"""
|
||||
|
||||
cursor.execute(qry, (fileinfo["sequence"], fileinfo["line"], ntbp, incr, json.dumps(fileinfo["meta"])))
|
||||
@@ -462,7 +467,9 @@ class Datastore:
|
||||
qry = """
|
||||
INSERT INTO final_lines (sequence, line, remarks, meta)
|
||||
VALUES (%s, %s, '', %s)
|
||||
ON CONFLICT DO NOTHING;
|
||||
ON CONFLICT (sequence) DO UPDATE SET
|
||||
line = EXCLUDED.line,
|
||||
meta = EXCLUDED.meta;
|
||||
"""
|
||||
|
||||
cursor.execute(qry, (fileinfo["sequence"], fileinfo["line"], json.dumps(fileinfo["meta"])))
|
||||
@@ -582,7 +589,63 @@ class Datastore:
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
# we assume that we are in the middle of a transaction
|
||||
|
||||
def get_file_data(self, path, cursor = None):
|
||||
"""
|
||||
Retrieve arbitrary data associated with a file.
|
||||
"""
|
||||
|
||||
if cursor is None:
|
||||
cur = self.conn.cursor()
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
realpath = configuration.translate_path(path)
|
||||
hash = file_hash(realpath)
|
||||
|
||||
qry = """
|
||||
SELECT data
|
||||
FROM file_data
|
||||
WHERE hash = %s;
|
||||
"""
|
||||
|
||||
cur.execute(qry, (hash,))
|
||||
res = cur.fetchone()
|
||||
|
||||
if cursor is None:
|
||||
self.maybe_commit()
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
# we assume that we are in the middle of a transaction
|
||||
return res[0]
|
||||
|
||||
def surveys (self, include_archived = False):
|
||||
"""
|
||||
Return list of survey definitions.
|
||||
"""
|
||||
|
||||
if self.conn is None:
|
||||
self.connect()
|
||||
|
||||
if include_archived:
|
||||
qry = """
|
||||
SELECT meta
|
||||
FROM public.projects;
|
||||
"""
|
||||
else:
|
||||
qry = """
|
||||
SELECT meta
|
||||
FROM public.projects
|
||||
WHERE NOT (meta->'archived')::boolean IS true
|
||||
"""
|
||||
|
||||
with self.conn:
|
||||
with self.conn.cursor() as cursor:
|
||||
|
||||
cursor.execute(qry)
|
||||
results = cursor.fetchall()
|
||||
return [r[0] for r in results if r[0]]
|
||||
|
||||
|
||||
# TODO Does this need tweaking on account of #246?
|
||||
def apply_survey_configuration(self, cursor = None):
|
||||
if cursor is None:
|
||||
cur = self.conn.cursor()
|
||||
@@ -679,6 +742,21 @@ class Datastore:
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
# we assume that we are in the middle of a transaction
|
||||
|
||||
def adjust_planner(self, cursor = None):
|
||||
"""
|
||||
Adjust estimated times on the planner
|
||||
"""
|
||||
if cursor is None:
|
||||
cur = self.conn.cursor()
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
qry = "CALL adjust_planner();"
|
||||
cur.execute(qry)
|
||||
if cursor is None:
|
||||
self.maybe_commit()
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
# we assume that we are in the middle of a transaction
|
||||
|
||||
def housekeep_event_log(self, cursor = None):
|
||||
"""
|
||||
@@ -691,6 +769,27 @@ class Datastore:
|
||||
|
||||
qry = "CALL augment_event_data();"
|
||||
cur.execute(qry)
|
||||
|
||||
qry = "CALL scan_placeholders();"
|
||||
cur.execute(qry)
|
||||
|
||||
if cursor is None:
|
||||
self.maybe_commit()
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
# we assume that we are in the middle of a transaction
|
||||
|
||||
def run_daily_tasks(self, cursor = None):
|
||||
"""
|
||||
Run once-a-day tasks
|
||||
"""
|
||||
if cursor is None:
|
||||
cur = self.conn.cursor()
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
qry = "CALL log_midnight_shots();"
|
||||
cur.execute(qry)
|
||||
|
||||
if cursor is None:
|
||||
self.maybe_commit()
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
|
||||
@@ -9,17 +9,18 @@ from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
db.set_survey(survey["schema"])
|
||||
|
||||
print("Planner adjustment")
|
||||
db.adjust_planner()
|
||||
print("Event log housekeeping")
|
||||
db.housekeep_event_log()
|
||||
|
||||
print("Done")
|
||||
|
||||
@@ -51,12 +51,11 @@ def del_pending_remark(db, sequence):
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
db.connect()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
@@ -77,29 +76,31 @@ if __name__ == '__main__':
|
||||
pendingRx = re.compile(survey["final"]["pending"]["pattern"]["regex"])
|
||||
|
||||
for fileprefix in final_p111["paths"]:
|
||||
print(f"Path prefix: {fileprefix}")
|
||||
realprefix = configuration.translate_path(fileprefix)
|
||||
print(f"Path prefix: {fileprefix} → {realprefix}")
|
||||
|
||||
for globspec in final_p111["globs"]:
|
||||
for filepath in pathlib.Path(fileprefix).glob(globspec):
|
||||
filepath = str(filepath)
|
||||
print(f"Found {filepath}")
|
||||
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
|
||||
physical_filepath = str(physical_filepath)
|
||||
logical_filepath = configuration.untranslate_path(physical_filepath)
|
||||
print(f"Found {logical_filepath}")
|
||||
|
||||
pending = False
|
||||
if pendingRx:
|
||||
pending = pendingRx.search(filepath) is not None
|
||||
pending = pendingRx.search(physical_filepath) is not None
|
||||
|
||||
if not db.file_in_db(filepath):
|
||||
if not db.file_in_db(logical_filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(filepath)
|
||||
age = time.time() - os.path.getmtime(physical_filepath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", filepath)
|
||||
print("Skipping file because too new", logical_filepath)
|
||||
continue
|
||||
|
||||
print("Importing")
|
||||
|
||||
match = rx.match(os.path.basename(filepath))
|
||||
match = rx.match(os.path.basename(logical_filepath))
|
||||
if not match:
|
||||
error_message = f"File path not match the expected format! ({filepath} ~ {pattern['regex']})"
|
||||
error_message = f"File path not match the expected format! ({logical_filepath} ~ {pattern['regex']})"
|
||||
print(error_message, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
@@ -108,21 +109,21 @@ if __name__ == '__main__':
|
||||
file_info["meta"] = {}
|
||||
|
||||
if pending:
|
||||
print("Skipping / removing final file because marked as PENDING", filepath)
|
||||
print("Skipping / removing final file because marked as PENDING", logical_filepath)
|
||||
db.del_sequence_final(file_info["sequence"])
|
||||
add_pending_remark(db, file_info["sequence"])
|
||||
continue
|
||||
else:
|
||||
del_pending_remark(db, file_info["sequence"])
|
||||
|
||||
p111_data = p111.from_file(filepath)
|
||||
p111_data = p111.from_file(physical_filepath)
|
||||
|
||||
print("Saving")
|
||||
|
||||
p111_records = p111.p111_type("S", p111_data)
|
||||
file_info["meta"]["lineName"] = p111.line_name(p111_data)
|
||||
|
||||
db.save_final_p111(p111_records, file_info, filepath, survey["epsg"])
|
||||
db.save_final_p111(p111_records, file_info, logical_filepath, survey["epsg"])
|
||||
else:
|
||||
print("Already in DB")
|
||||
if pending:
|
||||
|
||||
127
bin/import_map_layers.py
Executable file
127
bin/import_map_layers.py
Executable file
@@ -0,0 +1,127 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
Import SmartSource data.
|
||||
|
||||
For each survey in configuration.surveys(), check for new
|
||||
or modified final gun header files and (re-)import them into the
|
||||
database.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import pathlib
|
||||
import re
|
||||
import time
|
||||
import json
|
||||
import configuration
|
||||
from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
"""
|
||||
Imports map layers from the directories defined in the configuration object
|
||||
`import.map.layers`. The content of that key is an object with the following
|
||||
structure:
|
||||
|
||||
{
|
||||
layer1Name: [
|
||||
format: "geojson",
|
||||
path: "…", // Logical path to a directory
|
||||
globs: [
|
||||
"**/*.geojson", // List of globs matching map data files
|
||||
…
|
||||
]
|
||||
],
|
||||
|
||||
layer2Name: …
|
||||
…
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def process (layer_name, layer, physical_filepath):
|
||||
physical_filepath = str(physical_filepath)
|
||||
logical_filepath = configuration.untranslate_path(physical_filepath)
|
||||
print(f"Found {logical_filepath}")
|
||||
|
||||
if not db.file_in_db(logical_filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(physical_filepath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", logical_filepath)
|
||||
return
|
||||
|
||||
print("Importing")
|
||||
|
||||
file_info = {
|
||||
"type": "map_layer",
|
||||
"format": layer["format"],
|
||||
"name": layer_name,
|
||||
"tooltip": layer.get("tooltip"),
|
||||
"popup": layer.get("popup")
|
||||
}
|
||||
|
||||
db.save_file_data(logical_filepath, json.dumps(file_info))
|
||||
|
||||
else:
|
||||
file_info = db.get_file_data(logical_filepath)
|
||||
dirty = False
|
||||
if file_info:
|
||||
if file_info["name"] != layer_name:
|
||||
print("Renaming to", layer_name)
|
||||
file_info["name"] = layer_name
|
||||
dirty = True
|
||||
if file_info.get("tooltip") != layer.get("tooltip"):
|
||||
print("Changing tooltip to", layer.get("tooltip") or "null")
|
||||
file_info["tooltip"] = layer.get("tooltip")
|
||||
dirty = True
|
||||
if file_info.get("popup") != layer.get("popup"):
|
||||
print("Changing popup to", layer.get("popup") or "null")
|
||||
file_info["popup"] = layer.get("popup")
|
||||
dirty = True
|
||||
|
||||
if dirty:
|
||||
db.save_file_data(logical_filepath, json.dumps(file_info))
|
||||
else:
|
||||
print("Already in DB")
|
||||
|
||||
|
||||
print("Reading configuration")
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
|
||||
db.set_survey(survey["schema"])
|
||||
|
||||
try:
|
||||
map_layers = survey["imports"]["map"]["layers"]
|
||||
except KeyError:
|
||||
print("No map layers defined")
|
||||
continue
|
||||
|
||||
for layer_name, layer_items in map_layers.items():
|
||||
|
||||
for layer in layer_items:
|
||||
fileprefix = layer["path"]
|
||||
realprefix = configuration.translate_path(fileprefix)
|
||||
|
||||
if os.path.isfile(realprefix):
|
||||
|
||||
process(layer_name, layer, realprefix)
|
||||
|
||||
elif os.path.isdir(realprefix):
|
||||
|
||||
if not "globs" in layer:
|
||||
layer["globs"] = [ "**/*.geojson" ]
|
||||
|
||||
for globspec in layer["globs"]:
|
||||
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
|
||||
process(layer_name, layer, physical_filepath)
|
||||
|
||||
print("Done")
|
||||
@@ -17,29 +17,31 @@ from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading configuration")
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
db.set_survey(survey["schema"])
|
||||
for file in survey["preplots"]:
|
||||
realpath = configuration.translate_path(file["path"])
|
||||
|
||||
print(f"Preplot: {file['path']}")
|
||||
if not db.file_in_db(file["path"]):
|
||||
|
||||
age = time.time() - os.path.getmtime(file["path"])
|
||||
age = time.time() - os.path.getmtime(realpath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", file["path"])
|
||||
continue
|
||||
|
||||
print("Importing")
|
||||
try:
|
||||
preplot = preplots.from_file(file)
|
||||
preplot = preplots.from_file(file, realpath)
|
||||
except FileNotFoundError:
|
||||
print(f"File does not exist: {file['path']}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
@@ -20,12 +20,11 @@ from datastore import Datastore
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
db.connect()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
@@ -46,30 +45,32 @@ if __name__ == '__main__':
|
||||
ntbpRx = re.compile(survey["raw"]["ntbp"]["pattern"]["regex"])
|
||||
|
||||
for fileprefix in raw_p111["paths"]:
|
||||
print(f"Path prefix: {fileprefix}")
|
||||
realprefix = configuration.translate_path(fileprefix)
|
||||
print(f"Path prefix: {fileprefix} → {realprefix}")
|
||||
|
||||
for globspec in raw_p111["globs"]:
|
||||
for filepath in pathlib.Path(fileprefix).glob(globspec):
|
||||
filepath = str(filepath)
|
||||
print(f"Found {filepath}")
|
||||
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
|
||||
physical_filepath = str(physical_filepath)
|
||||
logical_filepath = configuration.untranslate_path(physical_filepath)
|
||||
print(f"Found {logical_filepath}")
|
||||
|
||||
if ntbpRx:
|
||||
ntbp = ntbpRx.search(filepath) is not None
|
||||
ntbp = ntbpRx.search(physical_filepath) is not None
|
||||
else:
|
||||
ntbp = False
|
||||
|
||||
if not db.file_in_db(filepath):
|
||||
if not db.file_in_db(logical_filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(filepath)
|
||||
age = time.time() - os.path.getmtime(physical_filepath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", filepath)
|
||||
print("Skipping file because too new", logical_filepath)
|
||||
continue
|
||||
|
||||
print("Importing")
|
||||
|
||||
match = rx.match(os.path.basename(filepath))
|
||||
match = rx.match(os.path.basename(logical_filepath))
|
||||
if not match:
|
||||
error_message = f"File path not match the expected format! ({filepath} ~ {pattern['regex']})"
|
||||
error_message = f"File path not matching the expected format! ({logical_filepath} ~ {pattern['regex']})"
|
||||
print(error_message, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
@@ -77,7 +78,7 @@ if __name__ == '__main__':
|
||||
file_info = dict(zip(pattern["captures"], match.groups()))
|
||||
file_info["meta"] = {}
|
||||
|
||||
p111_data = p111.from_file(filepath)
|
||||
p111_data = p111.from_file(physical_filepath)
|
||||
|
||||
print("Saving")
|
||||
|
||||
@@ -85,7 +86,7 @@ if __name__ == '__main__':
|
||||
if len(p111_records):
|
||||
file_info["meta"]["lineName"] = p111.line_name(p111_data)
|
||||
|
||||
db.save_raw_p111(p111_records, file_info, filepath, survey["epsg"], ntbp=ntbp)
|
||||
db.save_raw_p111(p111_records, file_info, logical_filepath, survey["epsg"], ntbp=ntbp)
|
||||
else:
|
||||
print("No source records found in file")
|
||||
else:
|
||||
@@ -93,7 +94,7 @@ if __name__ == '__main__':
|
||||
|
||||
# Update the NTBP status to whatever the latest is,
|
||||
# as it might have changed.
|
||||
db.set_ntbp(filepath, ntbp)
|
||||
db.set_ntbp(logical_filepath, ntbp)
|
||||
if ntbp:
|
||||
print("Sequence is NTBP")
|
||||
|
||||
|
||||
@@ -20,12 +20,11 @@ from datastore import Datastore
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
db.connect()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
@@ -47,36 +46,38 @@ if __name__ == '__main__':
|
||||
rx = re.compile(pattern["regex"], flags)
|
||||
|
||||
for fileprefix in raw_smsrc["paths"]:
|
||||
print(f"Path prefix: {fileprefix}")
|
||||
realprefix = configuration.translate_path(fileprefix)
|
||||
print(f"Path prefix: {fileprefix} → {realprefix}")
|
||||
|
||||
for globspec in raw_smsrc["globs"]:
|
||||
for filepath in pathlib.Path(fileprefix).glob(globspec):
|
||||
filepath = str(filepath)
|
||||
print(f"Found {filepath}")
|
||||
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
|
||||
physical_filepath = str(physical_filepath)
|
||||
logical_filepath = configuration.untranslate_path(physical_filepath)
|
||||
print(f"Found {logical_filepath}")
|
||||
|
||||
if not db.file_in_db(filepath):
|
||||
if not db.file_in_db(logical_filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(filepath)
|
||||
age = time.time() - os.path.getmtime(physical_filepath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", filepath)
|
||||
print("Skipping file because too new", logical_filepath)
|
||||
continue
|
||||
|
||||
print("Importing")
|
||||
|
||||
match = rx.match(os.path.basename(filepath))
|
||||
match = rx.match(os.path.basename(logical_filepath))
|
||||
if not match:
|
||||
error_message = f"File path not matching the expected format! ({filepath} ~ {pattern['regex']})"
|
||||
error_message = f"File path not matching the expected format! ({logical_filepath} ~ {pattern['regex']})"
|
||||
print(error_message, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
|
||||
file_info = dict(zip(pattern["captures"], match.groups()))
|
||||
|
||||
smsrc_records = smsrc.from_file(filepath)
|
||||
smsrc_records = smsrc.from_file(physical_filepath)
|
||||
|
||||
print("Saving")
|
||||
|
||||
db.save_raw_smsrc(smsrc_records, file_info, filepath)
|
||||
db.save_raw_smsrc(smsrc_records, file_info, logical_filepath)
|
||||
else:
|
||||
print("Already in DB")
|
||||
|
||||
|
||||
@@ -15,25 +15,4 @@ from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
configs = configuration.files(include_archived = True)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
#db.connect()
|
||||
|
||||
print("Reading surveys")
|
||||
for config in configs:
|
||||
filepath = config[0]
|
||||
survey = config[1]
|
||||
print(f'Survey: {survey["id"]} ({filepath})')
|
||||
db.set_survey(survey["schema"])
|
||||
if not db.file_in_db(filepath):
|
||||
print("Saving to DB")
|
||||
db.save_file_data(filepath, json.dumps(survey))
|
||||
print("Applying survey configuration")
|
||||
db.apply_survey_configuration()
|
||||
else:
|
||||
print("Already in DB")
|
||||
|
||||
print("Done")
|
||||
print("This function is obsolete. Returning with no action")
|
||||
|
||||
@@ -38,11 +38,11 @@ if __name__ == '__main__':
|
||||
|
||||
message = " ".join(args["remarks"])
|
||||
|
||||
print("new event:", schema, tstamp, message)
|
||||
print("new event:", schema, tstamp, message, args["label"])
|
||||
|
||||
if schema and tstamp and message:
|
||||
db.set_survey(schema)
|
||||
with db.conn.cursor() as cursor:
|
||||
qry = "INSERT INTO events_timed (tstamp, remarks) VALUES (%s, %s);"
|
||||
cursor.execute(qry, (tstamp, message))
|
||||
qry = "INSERT INTO event_log (tstamp, remarks, labels) VALUES (%s, replace_placeholders(%s, %s, NULL, NULL), %s);"
|
||||
cursor.execute(qry, (tstamp, message, tstamp, args["label"]))
|
||||
db.maybe_commit()
|
||||
|
||||
@@ -4,9 +4,10 @@ import sps
|
||||
Preplot importing functions.
|
||||
"""
|
||||
|
||||
def from_file (file):
|
||||
def from_file (file, realpath = None):
|
||||
filepath = realpath or file["path"]
|
||||
if not "type" in file or file["type"] == "sps":
|
||||
records = sps.from_file(file["path"], file["format"] if "format" in file else None )
|
||||
records = sps.from_file(filepath, file["format"] if "format" in file else None )
|
||||
else:
|
||||
return "Not an SPS file"
|
||||
|
||||
|
||||
@@ -13,21 +13,27 @@ from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
db.set_survey(survey["schema"])
|
||||
|
||||
for file in db.list_files():
|
||||
path = file[0]
|
||||
if not os.path.exists(path):
|
||||
print(path, "NOT FOUND")
|
||||
db.del_file(path)
|
||||
try:
|
||||
path = configuration.translate_path(file[0])
|
||||
if not os.path.exists(path):
|
||||
print(path, "NOT FOUND")
|
||||
db.del_file(file[0])
|
||||
except TypeError:
|
||||
# In case the logical path no longer matches
|
||||
# the Dougal configuration.
|
||||
print(file[0], "COULD NOT BE TRANSLATED TO A PHYSICAL PATH. DELETING")
|
||||
db.del_file(file[0])
|
||||
|
||||
print("Done")
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
|
||||
DOUGAL_ROOT=${DOUGAL_ROOT:-$(dirname "$0")/..}
|
||||
|
||||
BINDIR="$DOUGAL_ROOT/bin"
|
||||
@@ -8,6 +9,20 @@ LOCKFILE=${LOCKFILE:-$VARDIR/runner.lock}
|
||||
|
||||
[ -f ~/.profile ] && . ~/.profile
|
||||
|
||||
DOUGAL_LOG_TAG="dougal.runner[$$]"
|
||||
|
||||
# Only send output to the logger if we have the appropriate
|
||||
# configuration set.
|
||||
if [[ -n "$DOUGAL_LOG_TAG" && -n "$DOUGAL_LOG_FACILITY" ]]; then
|
||||
function _logger () {
|
||||
logger $*
|
||||
}
|
||||
else
|
||||
function _logger () {
|
||||
: # This is the Bash null command
|
||||
}
|
||||
fi
|
||||
|
||||
function tstamp () {
|
||||
date -u +%Y-%m-%dT%H:%M:%SZ
|
||||
}
|
||||
@@ -18,26 +33,44 @@ function prefix () {
|
||||
|
||||
function print_log () {
|
||||
printf "$(prefix)\033[36m%s\033[0m\n" "$*"
|
||||
_logger -t "$DOUGAL_LOG_TAG" -p "$DOUGAL_LOG_FACILITY.info" "$*"
|
||||
}
|
||||
|
||||
function print_info () {
|
||||
printf "$(prefix)\033[0m%s\n" "$*"
|
||||
_logger -t "$DOUGAL_LOG_TAG" -p "$DOUGAL_LOG_FACILITY.debug" "$*"
|
||||
}
|
||||
|
||||
function print_warning () {
|
||||
printf "$(prefix)\033[33;1m%s\033[0m\n" "$*"
|
||||
_logger -t "$DOUGAL_LOG_TAG" -p "$DOUGAL_LOG_FACILITY.warning" "$*"
|
||||
}
|
||||
|
||||
function print_error () {
|
||||
printf "$(prefix)\033[31m%s\033[0m\n" "$*"
|
||||
_logger -t "$DOUGAL_LOG_TAG" -p "$DOUGAL_LOG_FACILITY.error" "$*"
|
||||
}
|
||||
|
||||
function run () {
|
||||
PROGNAME=$(basename "$1")
|
||||
PROGNAME=${PROGNAME:-$(basename "$1")}
|
||||
|
||||
STDOUTLOG="$VARDIR/$PROGNAME.out"
|
||||
STDERRLOG="$VARDIR/$PROGNAME.err"
|
||||
|
||||
"$1" >"$STDOUTLOG" 2>"$STDERRLOG" || {
|
||||
# What follows runs the command that we have been given (with any arguments passed)
|
||||
# and logs:
|
||||
# * stdout to $STDOUTLOG (a temporary file) and possibly to syslog, if enabled.
|
||||
# * stderr to $STDERRLOG (a temporary file) and possibly to syslog, if enabled.
|
||||
#
|
||||
# When logging to syslog, stdout goes as debug level and stderr as warning (not error)
|
||||
#
|
||||
# The temporary file is used in case the command fails, at which point we try to log
|
||||
# a warning in GitLab's alerts facility.
|
||||
|
||||
$* \
|
||||
> >(tee $STDOUTLOG |_logger -t "dougal.runner.$PROGNAME[$$]" -p "$DOUGAL_LOG_FACILITY.debug") \
|
||||
2> >(tee $STDERRLOG |_logger -t "dougal.runner.$PROGNAME[$$]" -p "$DOUGAL_LOG_FACILITY.warning") || {
|
||||
|
||||
print_error "Failed: $PROGNAME"
|
||||
cat $STDOUTLOG
|
||||
cat $STDERRLOG
|
||||
@@ -52,11 +85,17 @@ function run () {
|
||||
|
||||
exit 2
|
||||
}
|
||||
# cat $STDOUTLOG
|
||||
|
||||
unset PROGNAME
|
||||
rm $STDOUTLOG $STDERRLOG
|
||||
}
|
||||
|
||||
function cleanup () {
|
||||
if [[ -f $LOCKFILE ]]; then
|
||||
rm "$LOCKFILE"
|
||||
fi
|
||||
}
|
||||
|
||||
if [[ -f $LOCKFILE ]]; then
|
||||
PID=$(cat "$LOCKFILE")
|
||||
if pgrep -F "$LOCKFILE"; then
|
||||
@@ -74,6 +113,13 @@ echo "$$" > "$LOCKFILE" || {
|
||||
}
|
||||
print_info "Start run"
|
||||
|
||||
print_log "Check if data is accessible"
|
||||
$BINDIR/check_mounts_present.py || {
|
||||
print_warning "Import mounts not accessible. Inhibiting all tasks!"
|
||||
cleanup
|
||||
exit 253
|
||||
}
|
||||
|
||||
print_log "Purge deleted files"
|
||||
run $BINDIR/purge_deleted_files.py
|
||||
|
||||
@@ -86,27 +132,30 @@ run $BINDIR/import_preplots.py
|
||||
print_log "Import raw P1/11"
|
||||
run $BINDIR/import_raw_p111.py
|
||||
|
||||
print_log "Import raw P1/90"
|
||||
run $BINDIR/import_raw_p190.py
|
||||
#print_log "Import raw P1/90"
|
||||
#run $BINDIR/import_raw_p190.py
|
||||
|
||||
print_log "Import final P1/11"
|
||||
run $BINDIR/import_final_p111.py
|
||||
|
||||
print_log "Import final P1/90"
|
||||
run $BINDIR/import_final_p190.py
|
||||
#print_log "Import final P1/90"
|
||||
#run $BINDIR/import_final_p190.py
|
||||
|
||||
print_log "Import SmartSource data"
|
||||
run $BINDIR/import_smsrc.py
|
||||
|
||||
print_log "Import map user layers"
|
||||
run $BINDIR/import_map_layers.py
|
||||
|
||||
# if [[ -z "$RUNNER_NOEXPORT" ]]; then
|
||||
# print_log "Export system data"
|
||||
# run $BINDIR/system_exports.py
|
||||
# fi
|
||||
|
||||
if [[ -n "$RUNNER_IMPORT" ]]; then
|
||||
print_log "Import system data"
|
||||
run $BINDIR/system_imports.py
|
||||
fi
|
||||
# if [[ -n "$RUNNER_IMPORT" ]]; then
|
||||
# print_log "Import system data"
|
||||
# run $BINDIR/system_imports.py
|
||||
# fi
|
||||
|
||||
# print_log "Export QC data"
|
||||
# run $BINDIR/human_exports_qc.py
|
||||
@@ -117,11 +166,14 @@ fi
|
||||
print_log "Process ASAQC queue"
|
||||
# Run insecure in test mode:
|
||||
# export NODE_TLS_REJECT_UNAUTHORIZED=0
|
||||
run $DOUGAL_ROOT/lib/www/server/queues/asaqc/index.js
|
||||
PROGNAME=asaqc_queue run $DOUGAL_ROOT/lib/www/server/queues/asaqc/index.js
|
||||
|
||||
print_log "Run database housekeeping actions"
|
||||
run $BINDIR/housekeep_database.py
|
||||
|
||||
print_log "Run QCs"
|
||||
PROGNAME=run_qc run $DOUGAL_ROOT/lib/www/server/lib/qc/index.js
|
||||
|
||||
|
||||
rm "$LOCKFILE"
|
||||
print_info "End run"
|
||||
|
||||
@@ -32,6 +32,25 @@ imports:
|
||||
# least this many seconds ago.
|
||||
file_min_age: 60
|
||||
|
||||
# These paths refer to remote mounts which must be present in order
|
||||
# for imports to work. If any of these paths are empty, import actions
|
||||
# (including data deletion) will be inhibited. This is to cope with
|
||||
# things like transient network failures.
|
||||
mounts:
|
||||
- /srv/mnt/Data
|
||||
|
||||
# These paths can be exposed to end users via the API. They should
|
||||
# contain the locations were project data, or any other user data
|
||||
# that needs to be accessible by Dougal, is located.
|
||||
#
|
||||
# This key can be either a string or an object:
|
||||
# - If a string, it points to the root path for Dougal-accessible data.
|
||||
# - If an object, there is an implicit root and the first-level
|
||||
# paths are denoted by the keys, with the values being their
|
||||
# respective physical paths.
|
||||
# Non-absolute paths are relative to $DOUGAL_ROOT.
|
||||
paths: /srv/mnt/Data
|
||||
|
||||
queues:
|
||||
asaqc:
|
||||
request:
|
||||
@@ -20,7 +20,7 @@ SET row_security = off;
|
||||
-- Name: dougal; Type: DATABASE; Schema: -; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE DATABASE dougal WITH TEMPLATE = template0 ENCODING = 'UTF8' LOCALE = 'en_GB.UTF-8';
|
||||
CREATE DATABASE dougal WITH TEMPLATE = template0 ENCODING = 'UTF8' LC_COLLATE = 'C' LC_CTYPE = 'en_GB.UTF-8';
|
||||
|
||||
|
||||
ALTER DATABASE dougal OWNER TO postgres;
|
||||
@@ -144,6 +144,107 @@ CREATE TYPE public.queue_item_status AS ENUM (
|
||||
|
||||
ALTER TYPE public.queue_item_status OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: event_meta(timestamp with time zone); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.event_meta(tstamp timestamp with time zone) RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_meta(tstamp, NULL, NULL);
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.event_meta(tstamp timestamp with time zone) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION event_meta(tstamp timestamp with time zone); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.event_meta(tstamp timestamp with time zone) IS 'Overload of event_meta (timestamptz, integer, integer) for use when searching by timestamp.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: event_meta(integer, integer); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.event_meta(sequence integer, point integer) RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_meta(NULL, sequence, point);
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.event_meta(sequence integer, point integer) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION event_meta(sequence integer, point integer); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.event_meta(sequence integer, point integer) IS 'Overload of event_meta (timestamptz, integer, integer) for use when searching by sequence / point.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: event_meta(timestamp with time zone, integer, integer); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.event_meta(tstamp timestamp with time zone, sequence integer, point integer) RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
result jsonb;
|
||||
-- Tolerance is hard-coded, at least until a need to expose arises.
|
||||
tolerance numeric;
|
||||
BEGIN
|
||||
tolerance := 3; -- seconds
|
||||
|
||||
-- We search by timestamp if we can, as that's a lot quicker
|
||||
IF tstamp IS NOT NULL THEN
|
||||
|
||||
SELECT meta
|
||||
INTO result
|
||||
FROM real_time_inputs rti
|
||||
WHERE
|
||||
rti.tstamp BETWEEN (event_meta.tstamp - tolerance * interval '1 second') AND (event_meta.tstamp + tolerance * interval '1 second')
|
||||
ORDER BY abs(extract('epoch' FROM rti.tstamp - event_meta.tstamp ))
|
||||
LIMIT 1;
|
||||
|
||||
ELSE
|
||||
|
||||
SELECT meta
|
||||
INTO result
|
||||
FROM real_time_inputs rti
|
||||
WHERE
|
||||
(meta->>'_sequence')::integer = event_meta.sequence AND
|
||||
(meta->>'_point')::integer = event_meta.point
|
||||
ORDER BY rti.tstamp DESC
|
||||
LIMIT 1;
|
||||
|
||||
END IF;
|
||||
|
||||
RETURN result;
|
||||
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.event_meta(tstamp timestamp with time zone, sequence integer, point integer) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION event_meta(tstamp timestamp with time zone, sequence integer, point integer); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.event_meta(tstamp timestamp with time zone, sequence integer, point integer) IS 'Return the real-time event metadata associated with a sequence / point in the current project or
|
||||
with a given timestamp. Timestamp that is first searched for in the shot tables
|
||||
of the current prospect or, if not found, in the real-time data.
|
||||
|
||||
Returns a JSONB object.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: geometry_from_tstamp(timestamp with time zone, numeric); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
@@ -153,12 +254,12 @@ CREATE FUNCTION public.geometry_from_tstamp(ts timestamp with time zone, toleran
|
||||
AS $$
|
||||
SELECT
|
||||
geometry,
|
||||
extract('epoch' FROM (meta->>'tstamp')::timestamptz - ts ) AS delta
|
||||
extract('epoch' FROM tstamp - ts ) AS delta
|
||||
FROM real_time_inputs
|
||||
WHERE
|
||||
geometry IS NOT NULL AND
|
||||
abs(extract('epoch' FROM (meta->>'tstamp')::timestamptz - ts )) < tolerance
|
||||
ORDER BY abs(extract('epoch' FROM (meta->>'tstamp')::timestamptz - ts ))
|
||||
tstamp BETWEEN (ts - tolerance * interval '1 second') AND (ts + tolerance * interval '1 second')
|
||||
ORDER BY abs(extract('epoch' FROM tstamp - ts ))
|
||||
LIMIT 1;
|
||||
$$;
|
||||
|
||||
@@ -172,6 +273,78 @@ ALTER FUNCTION public.geometry_from_tstamp(ts timestamp with time zone, toleranc
|
||||
COMMENT ON FUNCTION public.geometry_from_tstamp(ts timestamp with time zone, tolerance numeric, OUT geometry public.geometry, OUT delta numeric) IS 'Get geometry from timestamp';
|
||||
|
||||
|
||||
--
|
||||
-- Name: interpolate_geometry_from_tstamp(timestamp with time zone, numeric); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.interpolate_geometry_from_tstamp(ts timestamp with time zone, maxspan numeric) RETURNS public.geometry
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
ts0 timestamptz;
|
||||
ts1 timestamptz;
|
||||
geom0 geometry;
|
||||
geom1 geometry;
|
||||
span numeric;
|
||||
fraction numeric;
|
||||
BEGIN
|
||||
|
||||
SELECT tstamp, geometry
|
||||
INTO ts0, geom0
|
||||
FROM real_time_inputs
|
||||
WHERE tstamp <= ts
|
||||
ORDER BY tstamp DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT tstamp, geometry
|
||||
INTO ts1, geom1
|
||||
FROM real_time_inputs
|
||||
WHERE tstamp >= ts
|
||||
ORDER BY tstamp ASC
|
||||
LIMIT 1;
|
||||
|
||||
IF geom0 IS NULL OR geom1 IS NULL THEN
|
||||
RAISE NOTICE 'Interpolation failed (no straddling data)';
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
-- See if we got an exact match
|
||||
IF ts0 = ts THEN
|
||||
RETURN geom0;
|
||||
ELSIF ts1 = ts THEN
|
||||
RETURN geom1;
|
||||
END IF;
|
||||
|
||||
span := extract('epoch' FROM ts1 - ts0);
|
||||
|
||||
IF span > maxspan THEN
|
||||
RAISE NOTICE 'Interpolation timespan % outside maximum requested (%)', span, maxspan;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
fraction := extract('epoch' FROM ts - ts0) / span;
|
||||
|
||||
IF fraction < 0 OR fraction > 1 THEN
|
||||
RAISE NOTICE 'Requested timestamp % outside of interpolation span (fraction: %)', ts, fraction;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
RETURN ST_LineInterpolatePoint(St_MakeLine(geom0, geom1), fraction);
|
||||
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.interpolate_geometry_from_tstamp(ts timestamp with time zone, maxspan numeric) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION interpolate_geometry_from_tstamp(ts timestamp with time zone, maxspan numeric); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.interpolate_geometry_from_tstamp(ts timestamp with time zone, maxspan numeric) IS 'Interpolate a position over a given maximum timespan (in seconds)
|
||||
based on real-time inputs. Returns a POINT geometry.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: notify(); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
@@ -428,13 +601,6 @@ ALTER TABLE ONLY public.queue_items
|
||||
ADD CONSTRAINT queue_items_pkey PRIMARY KEY (item_id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: meta_tstamp_idx; Type: INDEX; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE INDEX meta_tstamp_idx ON public.real_time_inputs USING btree (((meta ->> 'tstamp'::text)) DESC);
|
||||
|
||||
|
||||
--
|
||||
-- Name: tstamp_idx; Type: INDEX; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.4"}')
|
||||
\connect dougal
|
||||
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.2"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.4"}' WHERE public.info.key = 'version';
|
||||
SET value = public.info.value || '{"db_schema": "0.4.2"}' WHERE public.info.key = 'version';
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
158
etc/db/upgrades/upgrade18-v0.3.5-label_in_sequence-function.sql
Normal file
158
etc/db/upgrades/upgrade18-v0.3.5-label_in_sequence-function.sql
Normal file
@@ -0,0 +1,158 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.5
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- The function label_in_sequence(integer, text) was missing for the
|
||||
-- production schemas. This patch (re-)defines the function as well
|
||||
-- as other function that depend on it (otherwise it does not get
|
||||
-- picked up).
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION label_in_sequence(_sequence integer, _label text) RETURNS event_log
|
||||
LANGUAGE sql
|
||||
AS $inner$
|
||||
SELECT * FROM event_log WHERE sequence = _sequence AND _label = ANY(labels);
|
||||
$inner$;
|
||||
|
||||
-- We need to redefine the functions / procedures that call label_in_sequence
|
||||
|
||||
CREATE OR REPLACE PROCEDURE handle_final_line_events(IN _seq integer, IN _label text, IN _column text)
|
||||
LANGUAGE plpgsql
|
||||
AS $inner$
|
||||
|
||||
DECLARE
|
||||
_line final_lines_summary%ROWTYPE;
|
||||
_column_value integer;
|
||||
_tg_name text := 'final_line';
|
||||
_event event_log%ROWTYPE;
|
||||
event_id integer;
|
||||
BEGIN
|
||||
|
||||
SELECT * INTO _line FROM final_lines_summary WHERE sequence = _seq;
|
||||
_event := label_in_sequence(_seq, _label);
|
||||
_column_value := row_to_json(_line)->>_column;
|
||||
|
||||
--RAISE NOTICE '% is %', _label, _event;
|
||||
--RAISE NOTICE 'Line is %', _line;
|
||||
--RAISE NOTICE '% is % (%)', _column, _column_value, _label;
|
||||
|
||||
IF _event IS NULL THEN
|
||||
--RAISE NOTICE 'We will populate the event log from the sequence data';
|
||||
|
||||
INSERT INTO event_log (sequence, point, remarks, labels, meta)
|
||||
VALUES (
|
||||
-- The sequence
|
||||
_seq,
|
||||
-- The shotpoint
|
||||
_column_value,
|
||||
-- Remark. Something like "FSP <linename>"
|
||||
format('%s %s', _label, (SELECT meta->>'lineName' FROM final_lines WHERE sequence = _seq)),
|
||||
-- Label
|
||||
ARRAY[_label],
|
||||
-- Meta. Something like {"auto" : {"FSP" : "final_line"}}
|
||||
json_build_object('auto', json_build_object(_label, _tg_name))
|
||||
);
|
||||
|
||||
ELSE
|
||||
--RAISE NOTICE 'We may populate the sequence meta from the event log';
|
||||
--RAISE NOTICE 'Unless the event log was populated by us previously';
|
||||
--RAISE NOTICE 'Populated by us previously? %', _event.meta->'auto'->>_label = _tg_name;
|
||||
|
||||
IF _event.meta->'auto'->>_label IS DISTINCT FROM _tg_name THEN
|
||||
|
||||
--RAISE NOTICE 'Adding % found in events log to final_line meta', _label;
|
||||
UPDATE final_lines
|
||||
SET meta = jsonb_set(meta, ARRAY[_label], to_jsonb(_event.point))
|
||||
WHERE sequence = _seq;
|
||||
|
||||
END IF;
|
||||
|
||||
END IF;
|
||||
END;
|
||||
$inner$;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE final_line_post_import(IN _seq integer)
|
||||
LANGUAGE plpgsql
|
||||
AS $inner$
|
||||
BEGIN
|
||||
|
||||
CALL handle_final_line_events(_seq, 'FSP', 'fsp');
|
||||
CALL handle_final_line_events(_seq, 'FGSP', 'fsp');
|
||||
CALL handle_final_line_events(_seq, 'LGSP', 'lsp');
|
||||
CALL handle_final_line_events(_seq, 'LSP', 'lsp');
|
||||
|
||||
END;
|
||||
$inner$;
|
||||
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_18 () AS $$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade_18();
|
||||
|
||||
CALL show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade_18 ();
|
||||
|
||||
CALL show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.5"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.5"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
162
etc/db/upgrades/upgrade19-v0.3.6-optimise-geometry-functions.sql
Normal file
162
etc/db/upgrades/upgrade19-v0.3.6-optimise-geometry-functions.sql
Normal file
@@ -0,0 +1,162 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.6
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This optimises geometry_from_tstamp() by many orders of magnitude
|
||||
-- (issue #241). The redefinition of geometry_from_tstamp() necessitates
|
||||
-- redefining dependent functions.
|
||||
--
|
||||
-- We also drop the index on real_time_inputs.meta->'tstamp' as it is no
|
||||
-- longer used.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE PROCEDURE augment_event_data ()
|
||||
LANGUAGE sql
|
||||
AS $inner$
|
||||
-- Populate the timestamp of sequence / point events
|
||||
UPDATE event_log_full
|
||||
SET tstamp = tstamp_from_sequence_shot(sequence, point)
|
||||
WHERE
|
||||
tstamp IS NULL AND sequence IS NOT NULL AND point IS NOT NULL;
|
||||
|
||||
-- Populate the geometry of sequence / point events for which
|
||||
-- there is raw_shots data.
|
||||
UPDATE event_log_full
|
||||
SET meta = meta ||
|
||||
jsonb_build_object(
|
||||
'geometry',
|
||||
(
|
||||
SELECT st_transform(geometry, 4326)::jsonb
|
||||
FROM raw_shots rs
|
||||
WHERE rs.sequence = event_log_full.sequence AND rs.point = event_log_full.point
|
||||
)
|
||||
)
|
||||
WHERE
|
||||
sequence IS NOT NULL AND point IS NOT NULL AND
|
||||
NOT meta ? 'geometry';
|
||||
|
||||
-- Populate the geometry of time-based events
|
||||
UPDATE event_log_full e
|
||||
SET
|
||||
meta = meta || jsonb_build_object('geometry',
|
||||
(SELECT st_transform(g.geometry, 4326)::jsonb
|
||||
FROM geometry_from_tstamp(e.tstamp, 3) g))
|
||||
WHERE
|
||||
tstamp IS NOT NULL AND
|
||||
sequence IS NULL AND point IS NULL AND
|
||||
NOT meta ? 'geometry';
|
||||
|
||||
-- Get rid of null geometries
|
||||
UPDATE event_log_full
|
||||
SET
|
||||
meta = meta - 'geometry'
|
||||
WHERE
|
||||
jsonb_typeof(meta->'geometry') = 'null';
|
||||
|
||||
-- Simplify the GeoJSON when the CRS is EPSG:4326
|
||||
UPDATE event_log_full
|
||||
SET
|
||||
meta = meta #- '{geometry, crs}'
|
||||
WHERE
|
||||
meta->'geometry'->'crs'->'properties'->>'name' = 'EPSG:4326';
|
||||
|
||||
$inner$;
|
||||
|
||||
COMMENT ON PROCEDURE augment_event_data()
|
||||
IS 'Populate missing timestamps and geometries in event_log_full';
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
CALL show_notice('Dropping index from real_time_inputs.meta->tstamp');
|
||||
DROP INDEX IF EXISTS meta_tstamp_idx;
|
||||
|
||||
CALL show_notice('Creating function geometry_from_tstamp');
|
||||
CREATE OR REPLACE FUNCTION public.geometry_from_tstamp(
|
||||
IN ts timestamptz,
|
||||
IN tolerance numeric,
|
||||
OUT "geometry" geometry,
|
||||
OUT "delta" numeric)
|
||||
AS $inner$
|
||||
SELECT
|
||||
geometry,
|
||||
extract('epoch' FROM tstamp - ts ) AS delta
|
||||
FROM real_time_inputs
|
||||
WHERE
|
||||
geometry IS NOT NULL AND
|
||||
tstamp BETWEEN (ts - tolerance * interval '1 second') AND (ts + tolerance * interval '1 second')
|
||||
ORDER BY abs(extract('epoch' FROM tstamp - ts ))
|
||||
LIMIT 1;
|
||||
$inner$ LANGUAGE SQL;
|
||||
|
||||
COMMENT ON FUNCTION public.geometry_from_tstamp(timestamptz, numeric)
|
||||
IS 'Get geometry from timestamp';
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.6"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.6"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,254 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.7
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This updates the adjust_planner() procedure to take into account the
|
||||
-- new events schema (the `event` view has been replaced by `event_log`).
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CALL pg_temp.show_notice('Replacing adjust_planner() procedure');
|
||||
CREATE OR REPLACE PROCEDURE adjust_planner()
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
_planner_config jsonb;
|
||||
_planned_line planned_lines%ROWTYPE;
|
||||
_lag interval;
|
||||
_last_sequence sequences_summary%ROWTYPE;
|
||||
_deltatime interval;
|
||||
_shotinterval interval;
|
||||
_tstamp timestamptz;
|
||||
_incr integer;
|
||||
BEGIN
|
||||
|
||||
SET CONSTRAINTS planned_lines_pkey DEFERRED;
|
||||
|
||||
SELECT data->'planner'
|
||||
INTO _planner_config
|
||||
FROM file_data
|
||||
WHERE data ? 'planner';
|
||||
|
||||
SELECT *
|
||||
INTO _last_sequence
|
||||
FROM sequences_summary
|
||||
ORDER BY sequence DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT *
|
||||
INTO _planned_line
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
SELECT
|
||||
COALESCE(
|
||||
((lead(ts0) OVER (ORDER BY sequence)) - ts1),
|
||||
make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer)
|
||||
)
|
||||
INTO _lag
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
_incr = sign(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE '_planner_config: %', _planner_config;
|
||||
RAISE NOTICE '_last_sequence: %', _last_sequence;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
RAISE NOTICE '_incr: %', _incr;
|
||||
|
||||
-- Does the latest sequence match a planned sequence?
|
||||
IF _planned_line IS NULL THEN -- No it doesn't
|
||||
RAISE NOTICE 'Latest sequence shot does not match a planned sequence';
|
||||
SELECT * INTO _planned_line FROM planned_lines ORDER BY sequence ASC LIMIT 1;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
|
||||
IF _planned_line.sequence <= _last_sequence.sequence THEN
|
||||
RAISE NOTICE 'Renumbering the planned sequences starting from %', _planned_line.sequence + 1;
|
||||
-- Renumber the planned sequences starting from last shot sequence number + 1
|
||||
UPDATE planned_lines
|
||||
SET sequence = sequence + _last_sequence.sequence - _planned_line.sequence + 1;
|
||||
END IF;
|
||||
|
||||
-- The correction to make to the first planned line's ts0 will be based on either the last
|
||||
-- sequence's EOL + default line change time or the current time, whichever is later.
|
||||
_deltatime := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1) + make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer), current_timestamp) - _planned_line.ts0;
|
||||
|
||||
-- Is the first of the planned lines start time in the past? (±5 mins)
|
||||
IF _planned_line.ts0 < (current_timestamp - make_interval(mins => 5)) THEN
|
||||
RAISE NOTICE 'First planned line is in the past. Adjusting times by %', _deltatime;
|
||||
-- Adjust the start / end time of the planned lines by assuming that we are at
|
||||
-- `defaultLineChangeDuration` minutes away from SOL of the first planned line.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime;
|
||||
END IF;
|
||||
|
||||
ELSE -- Yes it does
|
||||
RAISE NOTICE 'Latest sequence does match a planned sequence: %, %', _planned_line.sequence, _planned_line.line;
|
||||
|
||||
-- Is it online?
|
||||
IF EXISTS(SELECT 1 FROM raw_lines_files WHERE sequence = _last_sequence.sequence AND hash = '*online*') THEN
|
||||
-- Yes it is
|
||||
RAISE NOTICE 'Sequence % is online', _last_sequence.sequence;
|
||||
|
||||
-- Let us get the SOL from the events log if we can
|
||||
RAISE NOTICE 'Trying to set fsp, ts0 from events log FSP, FGSP';
|
||||
WITH e AS (
|
||||
SELECT * FROM event_log
|
||||
WHERE
|
||||
sequence = _last_sequence.sequence
|
||||
AND ('FSP' = ANY(labels) OR 'FGSP' = ANY(labels))
|
||||
ORDER BY tstamp LIMIT 1
|
||||
)
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
fsp = COALESCE(e.point, fsp),
|
||||
ts0 = COALESCE(e.tstamp, ts0)
|
||||
FROM e
|
||||
WHERE planned_lines.sequence = _last_sequence.sequence;
|
||||
|
||||
-- Shot interval
|
||||
_shotinterval := (_last_sequence.ts1 - _last_sequence.ts0) / abs(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE 'Estimating EOL from current shot interval: %', _shotinterval;
|
||||
|
||||
SELECT (abs(lsp-fsp) * _shotinterval + ts0) - ts1
|
||||
INTO _deltatime
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
---- Set ts1 for the current sequence
|
||||
--UPDATE planned_lines
|
||||
--SET
|
||||
--ts1 = (abs(lsp-fsp) * _shotinterval) + ts0
|
||||
--WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Adjustment is %', _deltatime;
|
||||
|
||||
IF abs(EXTRACT(EPOCH FROM _deltatime)) < 8 THEN
|
||||
RAISE NOTICE 'Adjustment too small (< 8 s), so not applying it';
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- Adjust ts1 for the current sequence
|
||||
UPDATE planned_lines
|
||||
SET ts1 = ts1 + _deltatime
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
-- Now shift all sequences after
|
||||
UPDATE planned_lines
|
||||
SET ts0 = ts0 + _deltatime, ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences before %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence < _last_sequence.sequence;
|
||||
|
||||
ELSE
|
||||
-- No it isn't
|
||||
RAISE NOTICE 'Sequence % is offline', _last_sequence.sequence;
|
||||
|
||||
-- We were supposed to finish at _planned_line.ts1 but we finished at:
|
||||
_tstamp := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1), current_timestamp);
|
||||
-- WARNING Next line is for testing only
|
||||
--_tstamp := COALESCE(_last_sequence.ts1_final, _last_sequence.ts1);
|
||||
-- So we need to adjust timestamps by:
|
||||
_deltatime := _tstamp - _planned_line.ts1;
|
||||
|
||||
RAISE NOTICE 'Planned end: %, actual end: % (%, %)', _planned_line.ts1, _tstamp, _planned_line.sequence, _last_sequence.sequence;
|
||||
RAISE NOTICE 'Shifting times by % for sequences > %', _deltatime, _planned_line.sequence;
|
||||
-- NOTE: This won't work if sequences are not, err… sequential.
|
||||
-- NOTE: This has been known to happen in 2020.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _planned_line.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences up to %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence <= _last_sequence.sequence;
|
||||
|
||||
END IF;
|
||||
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.7"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.7"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
267
etc/db/upgrades/upgrade21-v0.3.8-add-event-data-functions.sql
Normal file
267
etc/db/upgrades/upgrade21-v0.3.8-add-event-data-functions.sql
Normal file
@@ -0,0 +1,267 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.8
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This adds event_position() and event_meta() functions which are used
|
||||
-- to retrieve position or metadata, respectively, given either a timestamp
|
||||
-- or a sequence / point pair. Intended to be used in the context of #229.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
--
|
||||
-- event_position(): Fetch event position
|
||||
--
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_position (
|
||||
tstamp timestamptz, sequence integer, point integer, tolerance numeric
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
DECLARE
|
||||
position geometry;
|
||||
BEGIN
|
||||
|
||||
-- Try and get position by sequence / point first
|
||||
IF sequence IS NOT NULL AND point IS NOT NULL THEN
|
||||
-- Try and get the position from final_shots or raw_shots
|
||||
SELECT COALESCE(f.geometry, r.geometry) geometry
|
||||
INTO position
|
||||
FROM raw_shots r LEFT JOIN final_shots f USING (sequence, point)
|
||||
WHERE r.sequence = event_position.sequence AND r.point = event_position.point;
|
||||
|
||||
IF position IS NOT NULL THEN
|
||||
RETURN position;
|
||||
ELSIF tstamp IS NULL THEN
|
||||
-- Get the timestamp for the sequence / point, if we can.
|
||||
-- It will be used later in the function as we fall back
|
||||
-- to timestamp based search.
|
||||
-- We also adjust the tolerance as we're now dealing with
|
||||
-- an exact timestamp.
|
||||
SELECT COALESCE(f.tstamp, r.tstamp) tstamp, 0.002 tolerance
|
||||
INTO tstamp, tolerance
|
||||
FROM raw_shots r LEFT JOIN final_shots f USING (sequence, point)
|
||||
WHERE r.sequence = event_position.sequence AND r.point = event_position.point;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- If we got here, we better have a timestamp
|
||||
-- First attempt, get a position from final_shots, raw_shots. This may
|
||||
-- be redundant if we got here from the position of having a sequence /
|
||||
-- point without a position, but never mind.
|
||||
SELECT COALESCE(f.geometry, r.geometry) geometry
|
||||
INTO position
|
||||
FROM raw_shots r LEFT JOIN final_shots f USING (sequence, point)
|
||||
WHERE r.tstamp = event_position.tstamp OR f.tstamp = event_position.tstamp
|
||||
LIMIT 1; -- Just to be sure
|
||||
|
||||
IF position IS NULL THEN
|
||||
-- Ok, so everything else so far has failed, let's try and get this
|
||||
-- from real time data. We skip the search via sequence / point and
|
||||
-- go directly for timestamp.
|
||||
SELECT geometry
|
||||
INTO position
|
||||
FROM geometry_from_tstamp(tstamp, tolerance);
|
||||
END IF;
|
||||
|
||||
RETURN position;
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_position (timestamptz, integer, integer, numeric) IS
|
||||
'Return the position associated with a sequence / point in the current project or
|
||||
with a given timestamp. Timestamp that is first searched for in the shot tables
|
||||
of the current prospect or, if not found, in the real-time data.
|
||||
|
||||
Returns a geometry.';
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_position (
|
||||
tstamp timestamptz, sequence integer, point integer
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_position(tstamp, sequence, point, 3);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_position (timestamptz, integer, integer) IS
|
||||
'Overload of event_position with a default tolerance of three seconds.';
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_position (
|
||||
tstamp timestamptz
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_position(tstamp, NULL, NULL);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_position (timestamptz) IS
|
||||
'Overload of event_position (timestamptz, integer, integer) for use when searching by timestamp.';
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_position (
|
||||
sequence integer, point integer
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_position(NULL, sequence, point);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_position (integer, integer) IS
|
||||
'Overload of event_position (timestamptz, integer, integer) for use when searching by sequence / point.';
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
--
|
||||
-- event_meta(): Fetch event metadata
|
||||
--
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_meta (
|
||||
tstamp timestamptz, sequence integer, point integer
|
||||
)
|
||||
RETURNS jsonb
|
||||
AS $$
|
||||
DECLARE
|
||||
result jsonb;
|
||||
-- Tolerance is hard-coded, at least until a need to expose arises.
|
||||
tolerance numeric;
|
||||
BEGIN
|
||||
tolerance := 3; -- seconds
|
||||
|
||||
-- We search by timestamp if we can, as that's a lot quicker
|
||||
IF tstamp IS NOT NULL THEN
|
||||
|
||||
SELECT meta
|
||||
INTO result
|
||||
FROM real_time_inputs rti
|
||||
WHERE
|
||||
rti.tstamp BETWEEN (event_meta.tstamp - tolerance * interval '1 second') AND (event_meta.tstamp + tolerance * interval '1 second')
|
||||
ORDER BY abs(extract('epoch' FROM rti.tstamp - event_meta.tstamp ))
|
||||
LIMIT 1;
|
||||
|
||||
ELSE
|
||||
|
||||
SELECT meta
|
||||
INTO result
|
||||
FROM real_time_inputs rti
|
||||
WHERE
|
||||
(meta->>'_sequence')::integer = event_meta.sequence AND
|
||||
(meta->>'_point')::integer = event_meta.point
|
||||
ORDER BY rti.tstamp DESC
|
||||
LIMIT 1;
|
||||
|
||||
END IF;
|
||||
|
||||
RETURN result;
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_meta (timestamptz, integer, integer) IS
|
||||
'Return the real-time event metadata associated with a sequence / point in the current project or
|
||||
with a given timestamp. Timestamp that is first searched for in the shot tables
|
||||
of the current prospect or, if not found, in the real-time data.
|
||||
|
||||
Returns a JSONB object.';
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_meta (
|
||||
tstamp timestamptz
|
||||
)
|
||||
RETURNS jsonb
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_meta(tstamp, NULL, NULL);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_meta (timestamptz) IS
|
||||
'Overload of event_meta (timestamptz, integer, integer) for use when searching by timestamp.';
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_meta (
|
||||
sequence integer, point integer
|
||||
)
|
||||
RETURNS jsonb
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_meta(NULL, sequence, point);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_meta (integer, integer) IS
|
||||
'Overload of event_meta (timestamptz, integer, integer) for use when searching by sequence / point.';
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.8"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.8"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,229 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.9
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This defines a replace_placeholders() function, taking as arguments
|
||||
-- a text string and either a timestamp or a sequence / point pair. It
|
||||
-- uses the latter arguments to find metadata from which it can extract
|
||||
-- relevant information and replace it into the text string wherever the
|
||||
-- appropriate placeholders appear. For instance, given a call such as
|
||||
-- replace_placeholders('The position is @POS@', NULL, 11, 2600) it will
|
||||
-- replace '@POS@' with the position of point 2600 in sequence 11, if it
|
||||
-- exists (or leave the placeholder untouched otherwise).
|
||||
--
|
||||
-- A scan_placeholders() procedure is also defined, which calls the above
|
||||
-- function on the entire event log.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION replace_placeholders (
|
||||
text_in text, tstamp timestamptz, sequence integer, point integer
|
||||
)
|
||||
RETURNS text
|
||||
AS $$
|
||||
DECLARE
|
||||
position geometry;
|
||||
metadata jsonb;
|
||||
text_out text;
|
||||
|
||||
json_query text;
|
||||
json_result jsonb;
|
||||
expect_recursion boolean := false;
|
||||
BEGIN
|
||||
|
||||
text_out := text_in;
|
||||
|
||||
-- We only get a position if we are going to need it…
|
||||
IF regexp_match(text_out, '@DMS@|@POS@|@DEG@') IS NOT NULL THEN
|
||||
position := ST_Transform(event_position(tstamp, sequence, point), 4326);
|
||||
END IF;
|
||||
|
||||
-- …and likewise with the metadata.
|
||||
IF regexp_match(text_out, '@BSP@|@WD@|@CMG@|@EN@|@GRID@|@(\$\..*?)@@') IS NOT NULL THEN
|
||||
metadata := event_meta(tstamp, sequence, point);
|
||||
END IF;
|
||||
|
||||
-- We shortcut the evaluation if neither of the above regexps matched
|
||||
IF position IS NULL AND metadata IS NULL THEN
|
||||
RETURN text_out;
|
||||
END IF;
|
||||
|
||||
IF position('@DMS@' IN text_out) != 0 THEN
|
||||
text_out := replace(text_out, '@DMS@', ST_AsLatLonText(position));
|
||||
END IF;
|
||||
|
||||
IF position('@POS@' IN text_out) != 0 THEN
|
||||
text_out := replace(text_out, '@POS@', replace(ST_AsLatLonText(position, 'D.DDDDDD'), ' ', ', '));
|
||||
END IF;
|
||||
|
||||
IF position('@DEG@' IN text_out) != 0 THEN
|
||||
text_out := replace(text_out, '@DEG@', replace(ST_AsLatLonText(position, 'D.DDDDDD'), ' ', ', '));
|
||||
END IF;
|
||||
|
||||
IF position('@EN@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'easting' AND metadata ? 'northing' THEN
|
||||
text_out := replace(text_out, '@EN@', (metadata->>'easting') || ', ' || (metadata->>'northing'));
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@GRID@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'easting' AND metadata ? 'northing' THEN
|
||||
text_out := replace(text_out, '@GRID@', (metadata->>'easting') || ', ' || (metadata->>'northing'));
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@CMG@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'bearing' THEN
|
||||
text_out := replace(text_out, '@CMG@', metadata->>'bearing');
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@BSP@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'speed' THEN
|
||||
text_out := replace(text_out, '@BSP@', round((metadata->>'speed')::numeric * 3600 / 1852, 1)::text);
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@WD@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'waterDepth' THEN
|
||||
text_out := replace(text_out, '@WD@', metadata->>'waterDepth');
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
json_query := (regexp_match(text_out, '@(\$\..*?)@@'))[1];
|
||||
IF json_query IS NOT NULL THEN
|
||||
json_result := jsonb_path_query_array(metadata, json_query::jsonpath);
|
||||
IF jsonb_array_length(json_result) = 1 THEN
|
||||
text_out := replace(text_out, '@'||json_query||'@@', json_result->>0);
|
||||
ELSE
|
||||
text_out := replace(text_out, '@'||json_query||'@@', json_result::text);
|
||||
END IF;
|
||||
-- There might be multiple JSONPath queries, so we may have to recurse
|
||||
expect_recursion := true;
|
||||
END IF;
|
||||
|
||||
IF expect_recursion IS TRUE AND text_in != text_out THEN
|
||||
--RAISE NOTICE 'Recursing %', text_out;
|
||||
-- We don't know if we have found all the JSONPath expression
|
||||
-- so we do another pass.
|
||||
RETURN replace_placeholders(text_out, tstamp, sequence, point);
|
||||
ELSE
|
||||
RETURN text_out;
|
||||
END IF;
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION replace_placeholders (text, timestamptz, integer, integer) IS
|
||||
'Replace certain placeholder strings in the input text with data obtained from shot or real-time data.';
|
||||
|
||||
|
||||
CREATE OR REPLACE PROCEDURE scan_placeholders ()
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
-- We update non read-only events via the event_log view to leave a trace
|
||||
-- of the fact that placeholders were replaced (and when).
|
||||
-- Note that this will not replace placeholders of old edits.
|
||||
UPDATE event_log
|
||||
SET remarks = replace_placeholders(remarks, tstamp, sequence, point)
|
||||
FROM (
|
||||
SELECT id
|
||||
FROM event_log e
|
||||
WHERE
|
||||
(meta->'readonly')::boolean IS NOT TRUE AND (
|
||||
regexp_match(remarks, '@DMS@|@POS@|@DEG@') IS NOT NULL OR
|
||||
regexp_match(remarks, '@BSP@|@WD@|@CMG@|@EN@|@GRID@|@(\$\..*?)@@') IS NOT NULL
|
||||
)
|
||||
) t
|
||||
WHERE event_log.id = t.id;
|
||||
|
||||
-- And then we update read-only events directly on the event_log_full table
|
||||
-- (as of this version of the schema we're prevented from updating read-only
|
||||
-- events via event_log anyway).
|
||||
UPDATE event_log_full
|
||||
SET remarks = replace_placeholders(remarks, tstamp, sequence, point)
|
||||
FROM (
|
||||
SELECT uid
|
||||
FROM event_log_full e
|
||||
WHERE
|
||||
(meta->'readonly')::boolean IS TRUE AND (
|
||||
regexp_match(remarks, '@DMS@|@POS@|@DEG@') IS NOT NULL OR
|
||||
regexp_match(remarks, '@BSP@|@WD@|@CMG@|@EN@|@GRID@|@(\$\..*?)@@') IS NOT NULL
|
||||
)
|
||||
) t
|
||||
WHERE event_log_full.uid = t.uid;
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE scan_placeholders () IS
|
||||
'Run replace_placeholders() on the entire event log.';
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.9"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.9"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,127 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.10
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects only the public schema.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This defines a interpolate_geometry_from_tstamp(), taking a timestamp
|
||||
-- and a maximum timespan in seconds. It will then interpolate a position
|
||||
-- at the exact timestamp based on data from real_time_inputs, provided
|
||||
-- that the effective interpolation timespan does not exceed the maximum
|
||||
-- requested.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
BEGIN
|
||||
|
||||
CALL pg_temp.show_notice('Defining interpolate_geometry_from_tstamp()');
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.interpolate_geometry_from_tstamp(
|
||||
IN ts timestamptz,
|
||||
IN maxspan numeric
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
DECLARE
|
||||
ts0 timestamptz;
|
||||
ts1 timestamptz;
|
||||
geom0 geometry;
|
||||
geom1 geometry;
|
||||
span numeric;
|
||||
fraction numeric;
|
||||
BEGIN
|
||||
|
||||
SELECT tstamp, geometry
|
||||
INTO ts0, geom0
|
||||
FROM real_time_inputs
|
||||
WHERE tstamp <= ts
|
||||
ORDER BY tstamp DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT tstamp, geometry
|
||||
INTO ts1, geom1
|
||||
FROM real_time_inputs
|
||||
WHERE tstamp >= ts
|
||||
ORDER BY tstamp ASC
|
||||
LIMIT 1;
|
||||
|
||||
IF geom0 IS NULL OR geom1 IS NULL THEN
|
||||
RAISE NOTICE 'Interpolation failed (no straddling data)';
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
-- See if we got an exact match
|
||||
IF ts0 = ts THEN
|
||||
RETURN geom0;
|
||||
ELSIF ts1 = ts THEN
|
||||
RETURN geom1;
|
||||
END IF;
|
||||
|
||||
span := extract('epoch' FROM ts1 - ts0);
|
||||
|
||||
IF span > maxspan THEN
|
||||
RAISE NOTICE 'Interpolation timespan % outside maximum requested (%)', span, maxspan;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
fraction := extract('epoch' FROM ts - ts0) / span;
|
||||
|
||||
IF fraction < 0 OR fraction > 1 THEN
|
||||
RAISE NOTICE 'Requested timestamp % outside of interpolation span (fraction: %)', ts, fraction;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
RETURN ST_LineInterpolatePoint(St_MakeLine(geom0, geom1), fraction);
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION public.interpolate_geometry_from_tstamp(timestamptz, numeric) IS
|
||||
'Interpolate a position over a given maximum timespan (in seconds)
|
||||
based on real-time inputs. Returns a POINT geometry.';
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.10"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.10"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,149 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.11
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This redefines augment_event_data() to use interpolation rather than
|
||||
-- nearest neighbour. It now takes an argument indicating the maximum
|
||||
-- allowed interpolation timespan. An overload with a default of ten
|
||||
-- minutes is also provided, as an in situ replacement for the previous
|
||||
-- version.
|
||||
--
|
||||
-- The ten minute default is based on Triggerfish headers behaviour seen
|
||||
-- on crew 248 during soft starts.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE PROCEDURE augment_event_data (maxspan numeric)
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
-- Populate the timestamp of sequence / point events
|
||||
UPDATE event_log_full
|
||||
SET tstamp = tstamp_from_sequence_shot(sequence, point)
|
||||
WHERE
|
||||
tstamp IS NULL AND sequence IS NOT NULL AND point IS NOT NULL;
|
||||
|
||||
-- Populate the geometry of sequence / point events for which
|
||||
-- there is raw_shots data.
|
||||
UPDATE event_log_full
|
||||
SET meta = meta ||
|
||||
jsonb_build_object(
|
||||
'geometry',
|
||||
(
|
||||
SELECT st_transform(geometry, 4326)::jsonb
|
||||
FROM raw_shots rs
|
||||
WHERE rs.sequence = event_log_full.sequence AND rs.point = event_log_full.point
|
||||
)
|
||||
)
|
||||
WHERE
|
||||
sequence IS NOT NULL AND point IS NOT NULL AND
|
||||
NOT meta ? 'geometry';
|
||||
|
||||
-- Populate the geometry of time-based events
|
||||
UPDATE event_log_full e
|
||||
SET
|
||||
meta = meta || jsonb_build_object('geometry',
|
||||
(SELECT st_transform(g.geometry, 4326)::jsonb
|
||||
FROM interpolate_geometry_from_tstamp(e.tstamp, maxspan) g))
|
||||
WHERE
|
||||
tstamp IS NOT NULL AND
|
||||
sequence IS NULL AND point IS NULL AND
|
||||
NOT meta ? 'geometry';
|
||||
|
||||
-- Get rid of null geometries
|
||||
UPDATE event_log_full
|
||||
SET
|
||||
meta = meta - 'geometry'
|
||||
WHERE
|
||||
jsonb_typeof(meta->'geometry') = 'null';
|
||||
|
||||
-- Simplify the GeoJSON when the CRS is EPSG:4326
|
||||
UPDATE event_log_full
|
||||
SET
|
||||
meta = meta #- '{geometry, crs}'
|
||||
WHERE
|
||||
meta->'geometry'->'crs'->'properties'->>'name' = 'EPSG:4326';
|
||||
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE augment_event_data(numeric)
|
||||
IS 'Populate missing timestamps and geometries in event_log_full';
|
||||
|
||||
CREATE OR REPLACE PROCEDURE augment_event_data ()
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
CALL augment_event_data(600);
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE augment_event_data()
|
||||
IS 'Overload of augment_event_data(maxspan numeric) with a maxspan value of 600 seconds.';
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.11"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.11"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,193 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.12
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This defines a midnight_shots view and a log_midnight_shots() procedure
|
||||
-- (with some overloads). The view returns all points straddling midnight
|
||||
-- UTC and belonging to the same sequence (so last shot of the day and
|
||||
-- first shot of the next day).
|
||||
--
|
||||
-- The procedure inserts the corresponding events (optionally constrained
|
||||
-- by an earliest and a latest date) in the event log, unless the events
|
||||
-- already exist.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE VIEW midnight_shots AS
|
||||
WITH straddlers AS (
|
||||
-- Get sequence numbers straddling midnight UTC
|
||||
SELECT sequence
|
||||
FROM final_shots
|
||||
GROUP BY sequence
|
||||
HAVING min(date(tstamp)) != max(date(tstamp))
|
||||
),
|
||||
ts AS (
|
||||
-- Get earliest and latest timestamps for each day
|
||||
-- for each of the above sequences.
|
||||
-- This will return the timestamps for:
|
||||
-- FSP, LDSP, FDSP, LSP.
|
||||
SELECT
|
||||
fs.sequence,
|
||||
min(fs.tstamp) AS ts0,
|
||||
max(fs.tstamp) AS ts1
|
||||
FROM final_shots fs INNER JOIN straddlers USING (sequence)
|
||||
GROUP BY fs.sequence, (date(fs.tstamp))
|
||||
ORDER BY fs.sequence, date(fs.tstamp)
|
||||
),
|
||||
spts AS (
|
||||
-- Filter out FSP, LSP from the above.
|
||||
-- NOTE: This *should* in theory be able to cope with
|
||||
-- a sequence longer than 24 hours (so with more than
|
||||
-- one LDSP, FDSP) but that hasn't been tested.
|
||||
SELECT DISTINCT
|
||||
sequence,
|
||||
min(ts1) OVER (PARTITION BY sequence) ldsp,
|
||||
max(ts0) OVER (PARTITION BY sequence) fdsp
|
||||
FROM ts
|
||||
ORDER BY sequence
|
||||
), evt AS (
|
||||
SELECT
|
||||
fs.tstamp,
|
||||
fs.sequence,
|
||||
point,
|
||||
'Last shotpoint of the day' remarks,
|
||||
'{LDSP}'::text[] labels
|
||||
FROM final_shots fs
|
||||
INNER JOIN spts ON fs.sequence = spts.sequence AND fs.tstamp = spts.ldsp
|
||||
UNION SELECT
|
||||
fs.tstamp,
|
||||
fs.sequence,
|
||||
point,
|
||||
'First shotpoint of the day' remarks,
|
||||
'{FDSP}'::text[] labels
|
||||
FROM final_shots fs
|
||||
INNER JOIN spts ON fs.sequence = spts.sequence AND fs.tstamp = spts.fdsp
|
||||
ORDER BY tstamp
|
||||
)
|
||||
SELECT * FROM evt;
|
||||
|
||||
|
||||
CREATE OR REPLACE PROCEDURE log_midnight_shots (dt0 date, dt1 date)
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
INSERT INTO event_log (sequence, point, remarks, labels, meta)
|
||||
SELECT
|
||||
sequence, point, remarks, labels,
|
||||
'{"auto": true, "insertedBy": "log_midnight_shots"}'::jsonb
|
||||
FROM midnight_shots ms
|
||||
WHERE
|
||||
(dt0 IS NULL OR ms.tstamp >= dt0) AND
|
||||
(dt1 IS NULL OR ms.tstamp <= dt1) AND
|
||||
NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM event_log el
|
||||
WHERE ms.sequence = el.sequence AND ms.point = el.point AND el.labels @> ms.labels
|
||||
);
|
||||
|
||||
-- Delete any midnight shots that might have been inserted in the log
|
||||
-- but are no longer relevant according to the final_shots data.
|
||||
-- We operate on event_log, so the deletion is traceable.
|
||||
DELETE
|
||||
FROM event_log
|
||||
WHERE id IN (
|
||||
SELECT id
|
||||
FROM event_log el
|
||||
LEFT JOIN midnight_shots ms USING (sequence, point)
|
||||
WHERE
|
||||
'{LDSP,FDSP}'::text[] && el.labels -- &&: Do the arrays overlap?
|
||||
AND ms.sequence IS NULL
|
||||
);
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE log_midnight_shots (date, date)
|
||||
IS 'Add midnight shots between two dates dt0 and dt1 to the event_log, unless the events already exist.';
|
||||
|
||||
|
||||
CREATE OR REPLACE PROCEDURE log_midnight_shots (dt0 date)
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
CALL log_midnight_shots(dt0, NULL);
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE log_midnight_shots (date)
|
||||
IS 'Overload taking only a dt0 (adds events on that date or after).';
|
||||
|
||||
CREATE OR REPLACE PROCEDURE log_midnight_shots ()
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
CALL log_midnight_shots(NULL, NULL);
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE log_midnight_shots ()
|
||||
IS 'Overload taking no arguments (adds all missing events).';
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.12"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.12"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
162
etc/db/upgrades/upgrade26-v0.3.13-fix-missing-shots-summary.sql
Normal file
162
etc/db/upgrades/upgrade26-v0.3.13-fix-missing-shots-summary.sql
Normal file
@@ -0,0 +1,162 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.3.13
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- Fixes a bug in the `final_lines_summary` and `raw_lines_summary` views
|
||||
-- which results in the number of missing shots being miscounted on jobs
|
||||
-- using three sources.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
|
||||
CREATE OR REPLACE VIEW raw_lines_summary AS
|
||||
WITH summary AS (
|
||||
SELECT DISTINCT rs.sequence,
|
||||
first_value(rs.point) OVER w AS fsp,
|
||||
last_value(rs.point) OVER w AS lsp,
|
||||
first_value(rs.tstamp) OVER w AS ts0,
|
||||
last_value(rs.tstamp) OVER w AS ts1,
|
||||
count(rs.point) OVER w AS num_points,
|
||||
count(pp.point) OVER w AS num_preplots,
|
||||
public.st_distance(first_value(rs.geometry) OVER w, last_value(rs.geometry) OVER w) AS length,
|
||||
((public.st_azimuth(first_value(rs.geometry) OVER w, last_value(rs.geometry) OVER w) * (180)::double precision) / pi()) AS azimuth
|
||||
FROM (raw_shots rs
|
||||
LEFT JOIN preplot_points pp USING (line, point))
|
||||
WINDOW w AS (PARTITION BY rs.sequence ORDER BY rs.tstamp ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
|
||||
)
|
||||
SELECT rl.sequence,
|
||||
rl.line,
|
||||
s.fsp,
|
||||
s.lsp,
|
||||
s.ts0,
|
||||
s.ts1,
|
||||
(s.ts1 - s.ts0) AS duration,
|
||||
s.num_points,
|
||||
s.num_preplots,
|
||||
(SELECT count(*) AS count
|
||||
FROM missing_sequence_raw_points
|
||||
WHERE missing_sequence_raw_points.sequence = s.sequence) AS missing_shots,
|
||||
s.length,
|
||||
s.azimuth,
|
||||
rl.remarks,
|
||||
rl.ntbp,
|
||||
rl.meta
|
||||
FROM (summary s
|
||||
JOIN raw_lines rl USING (sequence));
|
||||
|
||||
|
||||
CREATE OR REPLACE VIEW final_lines_summary AS
|
||||
WITH summary AS (
|
||||
SELECT DISTINCT fs.sequence,
|
||||
first_value(fs.point) OVER w AS fsp,
|
||||
last_value(fs.point) OVER w AS lsp,
|
||||
first_value(fs.tstamp) OVER w AS ts0,
|
||||
last_value(fs.tstamp) OVER w AS ts1,
|
||||
count(fs.point) OVER w AS num_points,
|
||||
public.st_distance(first_value(fs.geometry) OVER w, last_value(fs.geometry) OVER w) AS length,
|
||||
((public.st_azimuth(first_value(fs.geometry) OVER w, last_value(fs.geometry) OVER w) * (180)::double precision) / pi()) AS azimuth
|
||||
FROM final_shots fs
|
||||
WINDOW w AS (PARTITION BY fs.sequence ORDER BY fs.tstamp ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
|
||||
)
|
||||
SELECT fl.sequence,
|
||||
fl.line,
|
||||
s.fsp,
|
||||
s.lsp,
|
||||
s.ts0,
|
||||
s.ts1,
|
||||
(s.ts1 - s.ts0) AS duration,
|
||||
s.num_points,
|
||||
( SELECT count(*) AS count
|
||||
FROM missing_sequence_final_points
|
||||
WHERE missing_sequence_final_points.sequence = s.sequence) AS missing_shots,
|
||||
s.length,
|
||||
s.azimuth,
|
||||
fl.remarks,
|
||||
fl.meta
|
||||
FROM (summary s
|
||||
JOIN final_lines fl USING (sequence));
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.3.13' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.3.12' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.13"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.13"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,122 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.4.0
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This adapts the schema to the change in how project configurations are
|
||||
-- handled (https://gitlab.com/wgp/dougal/software/-/merge_requests/29)
|
||||
-- by creating a project_configuration() function which returns the
|
||||
-- current project's configuration data.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION project_configuration()
|
||||
RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
schema_name text;
|
||||
configuration jsonb;
|
||||
BEGIN
|
||||
|
||||
SELECT nspname
|
||||
INTO schema_name
|
||||
FROM pg_namespace
|
||||
WHERE oid = (
|
||||
SELECT pronamespace
|
||||
FROM pg_proc
|
||||
WHERE oid = 'project_configuration'::regproc::oid
|
||||
);
|
||||
|
||||
SELECT meta
|
||||
INTO configuration
|
||||
FROM public.projects
|
||||
WHERE schema = schema_name;
|
||||
|
||||
RETURN configuration;
|
||||
END
|
||||
$$;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.3.12' AND current_db_version != '0.3.13' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.0"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.0"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,264 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.4.1
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This modifies adjust_planner() to use project_configuration()
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
|
||||
CREATE OR REPLACE PROCEDURE adjust_planner()
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
_planner_config jsonb;
|
||||
_planned_line planned_lines%ROWTYPE;
|
||||
_lag interval;
|
||||
_last_sequence sequences_summary%ROWTYPE;
|
||||
_deltatime interval;
|
||||
_shotinterval interval;
|
||||
_tstamp timestamptz;
|
||||
_incr integer;
|
||||
BEGIN
|
||||
|
||||
SET CONSTRAINTS planned_lines_pkey DEFERRED;
|
||||
|
||||
SELECT project_configuration()->'planner'
|
||||
INTO _planner_config;
|
||||
|
||||
SELECT *
|
||||
INTO _last_sequence
|
||||
FROM sequences_summary
|
||||
ORDER BY sequence DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT *
|
||||
INTO _planned_line
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
SELECT
|
||||
COALESCE(
|
||||
((lead(ts0) OVER (ORDER BY sequence)) - ts1),
|
||||
make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer)
|
||||
)
|
||||
INTO _lag
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
_incr = sign(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE '_planner_config: %', _planner_config;
|
||||
RAISE NOTICE '_last_sequence: %', _last_sequence;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
RAISE NOTICE '_incr: %', _incr;
|
||||
|
||||
-- Does the latest sequence match a planned sequence?
|
||||
IF _planned_line IS NULL THEN -- No it doesn't
|
||||
RAISE NOTICE 'Latest sequence shot does not match a planned sequence';
|
||||
SELECT * INTO _planned_line FROM planned_lines ORDER BY sequence ASC LIMIT 1;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
|
||||
IF _planned_line.sequence <= _last_sequence.sequence THEN
|
||||
RAISE NOTICE 'Renumbering the planned sequences starting from %', _planned_line.sequence + 1;
|
||||
-- Renumber the planned sequences starting from last shot sequence number + 1
|
||||
UPDATE planned_lines
|
||||
SET sequence = sequence + _last_sequence.sequence - _planned_line.sequence + 1;
|
||||
END IF;
|
||||
|
||||
-- The correction to make to the first planned line's ts0 will be based on either the last
|
||||
-- sequence's EOL + default line change time or the current time, whichever is later.
|
||||
_deltatime := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1) + make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer), current_timestamp) - _planned_line.ts0;
|
||||
|
||||
-- Is the first of the planned lines start time in the past? (±5 mins)
|
||||
IF _planned_line.ts0 < (current_timestamp - make_interval(mins => 5)) THEN
|
||||
RAISE NOTICE 'First planned line is in the past. Adjusting times by %', _deltatime;
|
||||
-- Adjust the start / end time of the planned lines by assuming that we are at
|
||||
-- `defaultLineChangeDuration` minutes away from SOL of the first planned line.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime;
|
||||
END IF;
|
||||
|
||||
ELSE -- Yes it does
|
||||
RAISE NOTICE 'Latest sequence does match a planned sequence: %, %', _planned_line.sequence, _planned_line.line;
|
||||
|
||||
-- Is it online?
|
||||
IF EXISTS(SELECT 1 FROM raw_lines_files WHERE sequence = _last_sequence.sequence AND hash = '*online*') THEN
|
||||
-- Yes it is
|
||||
RAISE NOTICE 'Sequence % is online', _last_sequence.sequence;
|
||||
|
||||
-- Let us get the SOL from the events log if we can
|
||||
RAISE NOTICE 'Trying to set fsp, ts0 from events log FSP, FGSP';
|
||||
WITH e AS (
|
||||
SELECT * FROM event_log
|
||||
WHERE
|
||||
sequence = _last_sequence.sequence
|
||||
AND ('FSP' = ANY(labels) OR 'FGSP' = ANY(labels))
|
||||
ORDER BY tstamp LIMIT 1
|
||||
)
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
fsp = COALESCE(e.point, fsp),
|
||||
ts0 = COALESCE(e.tstamp, ts0)
|
||||
FROM e
|
||||
WHERE planned_lines.sequence = _last_sequence.sequence;
|
||||
|
||||
-- Shot interval
|
||||
_shotinterval := (_last_sequence.ts1 - _last_sequence.ts0) / abs(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE 'Estimating EOL from current shot interval: %', _shotinterval;
|
||||
|
||||
SELECT (abs(lsp-fsp) * _shotinterval + ts0) - ts1
|
||||
INTO _deltatime
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
---- Set ts1 for the current sequence
|
||||
--UPDATE planned_lines
|
||||
--SET
|
||||
--ts1 = (abs(lsp-fsp) * _shotinterval) + ts0
|
||||
--WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Adjustment is %', _deltatime;
|
||||
|
||||
IF abs(EXTRACT(EPOCH FROM _deltatime)) < 8 THEN
|
||||
RAISE NOTICE 'Adjustment too small (< 8 s), so not applying it';
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- Adjust ts1 for the current sequence
|
||||
UPDATE planned_lines
|
||||
SET ts1 = ts1 + _deltatime
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
-- Now shift all sequences after
|
||||
UPDATE planned_lines
|
||||
SET ts0 = ts0 + _deltatime, ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences before %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence < _last_sequence.sequence;
|
||||
|
||||
ELSE
|
||||
-- No it isn't
|
||||
RAISE NOTICE 'Sequence % is offline', _last_sequence.sequence;
|
||||
|
||||
-- We were supposed to finish at _planned_line.ts1 but we finished at:
|
||||
_tstamp := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1), current_timestamp);
|
||||
-- WARNING Next line is for testing only
|
||||
--_tstamp := COALESCE(_last_sequence.ts1_final, _last_sequence.ts1);
|
||||
-- So we need to adjust timestamps by:
|
||||
_deltatime := _tstamp - _planned_line.ts1;
|
||||
|
||||
RAISE NOTICE 'Planned end: %, actual end: % (%, %)', _planned_line.ts1, _tstamp, _planned_line.sequence, _last_sequence.sequence;
|
||||
RAISE NOTICE 'Shifting times by % for sequences > %', _deltatime, _planned_line.sequence;
|
||||
-- NOTE: This won't work if sequences are not, err… sequential.
|
||||
-- NOTE: This has been known to happen in 2020.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _planned_line.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences up to %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence <= _last_sequence.sequence;
|
||||
|
||||
END IF;
|
||||
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.1"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.1"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,98 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.4.2
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This modifies binning_parameters() to use project_configuration()
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION binning_parameters() RETURNS jsonb
|
||||
LANGUAGE sql STABLE LEAKPROOF PARALLEL SAFE
|
||||
AS $$
|
||||
SELECT project_configuration()->'binning' binning;
|
||||
$$;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.2"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.2"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
164
etc/db/upgrades/upgrade30-v0.4.3-large-notification-payloads.sql
Normal file
164
etc/db/upgrades/upgrade30-v0.4.3-large-notification-payloads.sql
Normal file
@@ -0,0 +1,164 @@
|
||||
-- Support notification payloads larger than Postgres' NOTIFY limit.
|
||||
--
|
||||
-- New schema version: 0.4.3
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects the public schema only.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This creates a new table where large notification payloads are stored
|
||||
-- temporarily and from which they might be recalled by the notification
|
||||
-- listeners. It also creates a purge_notifications() procedure used to
|
||||
-- clean up old notifications from the notifications log and finally,
|
||||
-- modifies notify() to support these changes. When a large payload is
|
||||
-- encountered, the payload is stored in the notify_payloads table and
|
||||
-- a trimmed down version containing a notification_id is sent to listeners
|
||||
-- instead. Listeners can then query notify_payloads to retrieve the full
|
||||
-- payloads. It is the application layer's responsibility to delete old
|
||||
-- notifications.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_schema () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating public schema';
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO public');
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.notify_payloads (
|
||||
id SERIAL,
|
||||
tstamp timestamptz NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
payload text NOT NULL DEFAULT '',
|
||||
PRIMARY KEY (id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS notify_payload_tstamp ON notify_payloads (tstamp);
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.notify() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
channel text := TG_ARGV[0];
|
||||
pid text;
|
||||
payload text;
|
||||
notification text;
|
||||
payload_id integer;
|
||||
BEGIN
|
||||
|
||||
SELECT projects.pid INTO pid FROM projects WHERE schema = TG_TABLE_SCHEMA;
|
||||
|
||||
payload := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'old', row_to_json(OLD),
|
||||
'new', row_to_json(NEW),
|
||||
'pid', pid
|
||||
)::text;
|
||||
|
||||
IF octet_length(payload) < 1000 THEN
|
||||
PERFORM pg_notify(channel, payload);
|
||||
ELSE
|
||||
-- We need to find another solution
|
||||
-- FIXME Consider storing the payload in a temporary memory table,
|
||||
-- referenced by some form of autogenerated ID. Then send the ID
|
||||
-- as the payload and then it's up to the user to fetch the original
|
||||
-- payload if interested. This needs a mechanism to expire older payloads
|
||||
-- in the interest of conserving memory.
|
||||
|
||||
INSERT INTO notify_payloads (payload) VALUES (payload) RETURNING id INTO payload_id;
|
||||
|
||||
notification := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'pid', pid,
|
||||
'payload_id', payload_id
|
||||
)::text;
|
||||
|
||||
PERFORM pg_notify(channel, notification);
|
||||
RAISE INFO 'Payload over limit';
|
||||
END IF;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$;
|
||||
|
||||
CREATE PROCEDURE public.purge_notifications (age_seconds numeric DEFAULT 120) AS $$
|
||||
DELETE FROM notify_payloads WHERE EXTRACT(epoch FROM CURRENT_TIMESTAMP - tstamp) > age_seconds;
|
||||
$$ LANGUAGE sql;
|
||||
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
-- This upgrade modified the `public` schema only, not individual
|
||||
-- project schemas.
|
||||
CALL pg_temp.upgrade_schema();
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_schema ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.3"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.3"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,104 @@
|
||||
-- Add event_log_changes function
|
||||
--
|
||||
-- New schema version: 0.4.4
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This adds a function event_log_changes which returns the subset of
|
||||
-- events from event_log_full which have been modified on or after a
|
||||
-- given timestamp.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_log_changes(ts0 timestamptz)
|
||||
RETURNS SETOF event_log_full
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
SELECT *
|
||||
FROM event_log_full
|
||||
WHERE lower(validity) > ts0 OR upper(validity) IS NOT NULL AND upper(validity) > ts0
|
||||
ORDER BY lower(validity);
|
||||
$$;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.4"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.4"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -4,21 +4,21 @@
|
||||
<head>
|
||||
<title>{{DglProjectId}} Line Log Report – {%if Sequences.length > 1 %}Multiple sequences{% else %}Sequence {{Sequences[0].SequenceNumber}}{% endif %}</title>
|
||||
<style>
|
||||
|
||||
|
||||
@media print {
|
||||
body, html, table {
|
||||
font-size: 10px !important;
|
||||
}
|
||||
|
||||
|
||||
a {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
|
||||
tr.aside {
|
||||
font-size: 8px !important;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
html {
|
||||
font-size: 16px;
|
||||
text-rendering: optimizeLegibility;
|
||||
@@ -180,7 +180,7 @@ footer {
|
||||
font-size: smaller;
|
||||
border-top: thin solid;
|
||||
min-height: 25px;
|
||||
|
||||
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
background: white;
|
||||
@@ -246,13 +246,24 @@ footer {
|
||||
{% if Begin.Reshoot %}Reshoot{% endif -%}
|
||||
|
||||
<div class="comments">
|
||||
|
||||
{% if Sequence.DglSequenceComments %}
|
||||
{% if Sequence.DglSequenceComments[0] %}
|
||||
<h3>Acquisition Comments</h3>
|
||||
<div class="comment">
|
||||
{{ Sequence.DglSequenceComments[0] | markdown }}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if Sequence.DglSequenceComments[1] %}
|
||||
<h3>Processing Comments</h3>
|
||||
<div class="comment">
|
||||
{{ Sequence.DglSequenceComments[1] | markdown }}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% else %}
|
||||
<h3>Sequence comments</h3>
|
||||
|
||||
{% for Comment in Sequence.DglSequenceComments %}
|
||||
<div class="comment">{{ Comment | markdown }}</div>
|
||||
{% endfor %}
|
||||
|
||||
{% if not Sequence.DglSequenceComments %}<div class="nocomment">(Nil)</div>{% endif %}
|
||||
<div class="nocomment">(Nil)</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<div class="events">
|
||||
|
||||
@@ -165,7 +165,7 @@
|
||||
.filter(gun => Math.abs(gun[firetime]-gun[aimpoint]) >= parameters.gunTimingWarning && Math.abs(gun[firetime]-gun[aimpoint]) <= parameters.gunTiming)
|
||||
.forEach(gun => {
|
||||
const value = Math.abs(gun[firetime]-gun[aimpoint]);
|
||||
result.push(`Delta error: source ${gun[2]}, string ${gun[0]}, gun ${gun[1]}: ${parameters.gunTimingWarning} ≤ ${value.toFixed(2)} ≤ ${parameters.gunTiming}`);
|
||||
result.push(`Delta warning: source ${gun[2]}, string ${gun[0]}, gun ${gun[1]}: ${parameters.gunTimingWarning} ≤ ${value.toFixed(2)} ≤ ${parameters.gunTiming}`);
|
||||
});
|
||||
}
|
||||
if (result.length) {
|
||||
|
||||
46
lib/www/client/source/package-lock.json
generated
46
lib/www/client/source/package-lock.json
generated
@@ -9,7 +9,7 @@
|
||||
"version": "0.0.0",
|
||||
"license": "UNLICENSED",
|
||||
"dependencies": {
|
||||
"@mdi/font": "^5.6.55",
|
||||
"@mdi/font": "^7.2.96",
|
||||
"core-js": "^3.6.5",
|
||||
"d3": "^7.0.1",
|
||||
"jwt-decode": "^3.0.0",
|
||||
@@ -33,7 +33,7 @@
|
||||
"@vue/cli-plugin-router": "~4.4.0",
|
||||
"@vue/cli-plugin-vuex": "~4.4.0",
|
||||
"@vue/cli-service": "^4.5.13",
|
||||
"sass": "1.32",
|
||||
"sass": "~1.32",
|
||||
"sass-loader": "^8.0.0",
|
||||
"stylus": "^0.54.8",
|
||||
"stylus-loader": "^3.0.2",
|
||||
@@ -1763,9 +1763,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@mdi/font": {
|
||||
"version": "5.9.55",
|
||||
"resolved": "https://registry.npmjs.org/@mdi/font/-/font-5.9.55.tgz",
|
||||
"integrity": "sha512-jswRF6q3eq8NWpWiqct6q+6Fg/I7nUhrxYJfiEM8JJpap0wVJLQdbKtyS65GdlK7S7Ytnx3TTi/bmw+tBhkGmg=="
|
||||
"version": "7.2.96",
|
||||
"resolved": "https://registry.npmjs.org/@mdi/font/-/font-7.2.96.tgz",
|
||||
"integrity": "sha512-e//lmkmpFUMZKhmCY9zdjRe4zNXfbOIJnn6xveHbaV2kSw5aJ5dLXUxcRt1Gxfi7ZYpFLUWlkG2MGSFAiqAu7w=="
|
||||
},
|
||||
"node_modules/@mrmlnc/readdir-enhanced": {
|
||||
"version": "2.2.1",
|
||||
@@ -3844,14 +3844,24 @@
|
||||
}
|
||||
},
|
||||
"node_modules/caniuse-lite": {
|
||||
"version": "1.0.30001317",
|
||||
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001317.tgz",
|
||||
"integrity": "sha512-xIZLh8gBm4dqNX0gkzrBeyI86J2eCjWzYAs40q88smG844YIrN4tVQl/RhquHvKEKImWWFIVh1Lxe5n1G/N+GQ==",
|
||||
"version": "1.0.30001476",
|
||||
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001476.tgz",
|
||||
"integrity": "sha512-JmpktFppVSvyUN4gsLS0bShY2L9ZUslHLE72vgemBkS43JD2fOvKTKs+GtRwuxrtRGnwJFW0ye7kWRRlLJS9vQ==",
|
||||
"dev": true,
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/browserslist"
|
||||
}
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/browserslist"
|
||||
},
|
||||
{
|
||||
"type": "tidelift",
|
||||
"url": "https://tidelift.com/funding/github/npm/caniuse-lite"
|
||||
},
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/ai"
|
||||
}
|
||||
]
|
||||
},
|
||||
"node_modules/case-sensitive-paths-webpack-plugin": {
|
||||
"version": "2.4.0",
|
||||
@@ -16432,9 +16442,9 @@
|
||||
}
|
||||
},
|
||||
"@mdi/font": {
|
||||
"version": "5.9.55",
|
||||
"resolved": "https://registry.npmjs.org/@mdi/font/-/font-5.9.55.tgz",
|
||||
"integrity": "sha512-jswRF6q3eq8NWpWiqct6q+6Fg/I7nUhrxYJfiEM8JJpap0wVJLQdbKtyS65GdlK7S7Ytnx3TTi/bmw+tBhkGmg=="
|
||||
"version": "7.2.96",
|
||||
"resolved": "https://registry.npmjs.org/@mdi/font/-/font-7.2.96.tgz",
|
||||
"integrity": "sha512-e//lmkmpFUMZKhmCY9zdjRe4zNXfbOIJnn6xveHbaV2kSw5aJ5dLXUxcRt1Gxfi7ZYpFLUWlkG2MGSFAiqAu7w=="
|
||||
},
|
||||
"@mrmlnc/readdir-enhanced": {
|
||||
"version": "2.2.1",
|
||||
@@ -18175,9 +18185,9 @@
|
||||
}
|
||||
},
|
||||
"caniuse-lite": {
|
||||
"version": "1.0.30001317",
|
||||
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001317.tgz",
|
||||
"integrity": "sha512-xIZLh8gBm4dqNX0gkzrBeyI86J2eCjWzYAs40q88smG844YIrN4tVQl/RhquHvKEKImWWFIVh1Lxe5n1G/N+GQ==",
|
||||
"version": "1.0.30001476",
|
||||
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001476.tgz",
|
||||
"integrity": "sha512-JmpktFppVSvyUN4gsLS0bShY2L9ZUslHLE72vgemBkS43JD2fOvKTKs+GtRwuxrtRGnwJFW0ye7kWRRlLJS9vQ==",
|
||||
"dev": true
|
||||
},
|
||||
"case-sensitive-paths-webpack-plugin": {
|
||||
|
||||
@@ -3,11 +3,11 @@
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"serve": "vue-cli-service serve",
|
||||
"serve": "vue-cli-service serve --host=0.0.0.0",
|
||||
"build": "vue-cli-service build"
|
||||
},
|
||||
"dependencies": {
|
||||
"@mdi/font": "^5.6.55",
|
||||
"@mdi/font": "^7.2.96",
|
||||
"core-js": "^3.6.5",
|
||||
"d3": "^7.0.1",
|
||||
"jwt-decode": "^3.0.0",
|
||||
|
||||
@@ -188,9 +188,9 @@ export default {
|
||||
labelToItem (k) {
|
||||
return {
|
||||
text: k,
|
||||
icon: this.labels[k].view?.icon,
|
||||
colour: this.labels[k].view?.colour,
|
||||
title: this.labels[k].view?.description
|
||||
icon: this.labels?.[k]?.view?.icon,
|
||||
colour: this.labels?.[k]?.view?.colour,
|
||||
title: this.labels?.[k]?.view?.description
|
||||
};
|
||||
},
|
||||
|
||||
|
||||
@@ -44,7 +44,7 @@
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-text-field
|
||||
v-model="tsDate"
|
||||
:disabled="!!(sequence || point || entrySequence || entryPoint)"
|
||||
:disabled="!!(entrySequence || entryPoint)"
|
||||
label="Date"
|
||||
suffix="UTC"
|
||||
prepend-icon="mdi-calendar"
|
||||
@@ -64,7 +64,7 @@
|
||||
<v-col>
|
||||
<v-text-field
|
||||
v-model="tsTime"
|
||||
:disabled="!!(sequence || point || entrySequence || entryPoint)"
|
||||
:disabled="!!(entrySequence || entryPoint)"
|
||||
label="Time"
|
||||
suffix="UTC"
|
||||
prepend-icon="mdi-clock-outline"
|
||||
@@ -133,6 +133,7 @@
|
||||
item-text="text"
|
||||
return-object
|
||||
label="Remarks"
|
||||
hint="Placeholders: @DMS@, @DEG@, @EN@, @WD@, @BSP@, @CMG@, …"
|
||||
prepend-icon="mdi-text-box-outline"
|
||||
append-outer-icon="mdi-magnify"
|
||||
@click:append-outer="(e) => remarksMenu = e"
|
||||
@@ -255,6 +256,15 @@
|
||||
>
|
||||
Cancel
|
||||
</v-btn>
|
||||
<v-btn v-if="!id && (entrySequence || entryPoint)"
|
||||
color="info"
|
||||
text
|
||||
title="Enter an event by time"
|
||||
@click="timed"
|
||||
>
|
||||
<v-icon left small>mdi-clock-outline</v-icon>
|
||||
Timed
|
||||
</v-btn>
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn
|
||||
:disabled="!canSave"
|
||||
@@ -631,6 +641,14 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
timed () {
|
||||
const tstamp = (new Date()).toISOString();
|
||||
this.entrySequence = null;
|
||||
this.entryPoint = null;
|
||||
this.tsDate = tstamp.substr(0, 10);
|
||||
this.tsTime = tstamp.substr(11, 8);
|
||||
},
|
||||
|
||||
close () {
|
||||
this.entryLabels = this.selectedLabels.map(this.labelToItem)
|
||||
this.$emit("input", false);
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
<div class="line-status" v-if="sequences.length == 0">
|
||||
<slot name="empty"></slot>
|
||||
</div>
|
||||
<div class="line-status" v-else-if="sequenceHref">
|
||||
<router-link v-for="sequence in sequences" :key="sequence.sequence"
|
||||
<div class="line-status" v-else-if="sequenceHref || plannedSequenceHref || pendingReshootHref">
|
||||
<router-link v-for="sequence in sequences" :key="sequence.sequence" v-if="sequenceHref"
|
||||
class="sequence"
|
||||
:class="sequence.status"
|
||||
:style="style(sequence)"
|
||||
@@ -11,15 +11,41 @@
|
||||
:to="sequenceHref(sequence)"
|
||||
>
|
||||
</router-link>
|
||||
<router-link v-for="sequence in plannedSequences" :key="sequence.sequence" v-if="plannedSequenceHref"
|
||||
class="sequence planned"
|
||||
:style="style(sequence)"
|
||||
:title="title(sequence, 'planned')"
|
||||
:to="plannedSequenceHref(sequence)"
|
||||
>
|
||||
</router-link>
|
||||
<router-link v-for="(line, key) in pendingReshoots" :key="key" v-if="pendingReshootHref"
|
||||
class="sequence reshoot"
|
||||
:style="style(line)"
|
||||
:title="title(line, 'reshoot')"
|
||||
:to="pendingReshootHref(line)"
|
||||
>
|
||||
</router-link>
|
||||
</div>
|
||||
<div class="line-status" v-else>
|
||||
<div v-for="sequence in sequences"
|
||||
<div v-for="sequence in sequences" :key="sequence.sequence"
|
||||
class="sequence"
|
||||
:class="sequence.status"
|
||||
:style="style(sequence)"
|
||||
:title="title(sequence)"
|
||||
>
|
||||
</div>
|
||||
<div v-for="sequence in plannedSequences" :key="sequence.sequence"
|
||||
class="sequence planned"
|
||||
:style="style(sequence)"
|
||||
:title="title(sequence, 'planned')"
|
||||
>
|
||||
</div>
|
||||
<div v-for="(line, key) in pendingReshoots" :key="key"
|
||||
class="sequence reshoot"
|
||||
:style="style(line)"
|
||||
:title="title(line, 'reshoot')"
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
@@ -48,6 +74,8 @@
|
||||
background-color blue
|
||||
&.planned
|
||||
background-color magenta
|
||||
&.reshoot
|
||||
background repeating-linear-gradient(-45deg, rgba(255,0,255,0.302), brown 5px, rgba(247, 247, 247, 0.1) 5px, rgba(242, 241, 241, 0.08) 10px), repeating-linear-gradient(45deg, rgba(255,0,255,0.302), brown 5px, rgba(247, 247, 247, 0.1) 5px, rgba(242, 241, 241, 0.08) 10px)
|
||||
</style>
|
||||
|
||||
<script>
|
||||
@@ -58,7 +86,11 @@ export default {
|
||||
props: {
|
||||
preplot: Object,
|
||||
sequences: Array,
|
||||
"sequence-href": Function
|
||||
"sequence-href": Function,
|
||||
"planned-sequences": Array,
|
||||
"planned-sequence-href": Function,
|
||||
"pending-reshoots": Array,
|
||||
"pending-reshoot-href": Function
|
||||
},
|
||||
|
||||
methods: {
|
||||
@@ -68,13 +100,13 @@ export default {
|
||||
? s.fsp_final
|
||||
: s.status == "ntbp"
|
||||
? (s.fsp_final || s.fsp)
|
||||
: s.fsp; /* status == "raw" */
|
||||
: s.fsp; /* status == "raw" or planned sequence or pending reshoot */
|
||||
|
||||
const lsp = s.status == "final"
|
||||
? s.lsp_final
|
||||
: s.status == "ntbp"
|
||||
? (s.lsp_final || s.lsp)
|
||||
: s.lsp; /* status == "raw" */
|
||||
: s.lsp; /* status == "raw" or planned sequence or pending reshoot */
|
||||
|
||||
const pp0 = Math.min(this.preplot.fsp, this.preplot.lsp);
|
||||
const pp1 = Math.max(this.preplot.fsp, this.preplot.lsp);
|
||||
@@ -91,20 +123,24 @@ export default {
|
||||
return values;
|
||||
},
|
||||
|
||||
title (s) {
|
||||
const status = s.status == "final"
|
||||
? "Final"
|
||||
: s.status == "raw"
|
||||
? "Acquired"
|
||||
: s.status == "ntbp"
|
||||
? "NTBP"
|
||||
: s.status == "planned"
|
||||
? "Planned"
|
||||
: s.status;
|
||||
title (s, type) {
|
||||
if (s.status || type == "planned") {
|
||||
const status = s.status == "final"
|
||||
? "Final"
|
||||
: s.status == "raw"
|
||||
? "Acquired"
|
||||
: s.status == "ntbp"
|
||||
? "NTBP"
|
||||
: type == "planned"
|
||||
? "Planned"
|
||||
: s.status;
|
||||
|
||||
const remarks = "\n"+[s.remarks, s.remarks_final].join("\n").trim()
|
||||
const remarks = "\n"+[s.remarks, s.remarks_final].join("\n").trim()
|
||||
|
||||
return `Sequence ${s.sequence} – ${status} (${s.fsp_final || s.fsp}−${s.lsp_final || s.lsp})${remarks}`;
|
||||
return `Sequence ${s.sequence} – ${status} (${s.fsp_final || s.fsp}−${s.lsp_final || s.lsp})${remarks}`;
|
||||
} else if (type == "reshoot") {
|
||||
return `Pending reshoot (${s.fsp}‒${s.lsp})${s.remarks? "\n"+s.remarks : ""}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
135
lib/www/client/source/src/components/qc-acceptance.vue
Normal file
135
lib/www/client/source/src/components/qc-acceptance.vue
Normal file
@@ -0,0 +1,135 @@
|
||||
<template>
|
||||
|
||||
<v-hover v-slot:default="{hover}" v-if="!isEmpty(item)">
|
||||
<span>
|
||||
<v-btn v-if="!isAccepted(item)"
|
||||
:class="{'text--disabled': !hover}"
|
||||
icon
|
||||
small
|
||||
color="primary"
|
||||
:title="isMultiple(item) ? 'Accept all' : 'Accept'"
|
||||
@click.stop="accept(item)">
|
||||
<v-icon small :color="isAccepted(item) ? 'green' : ''">
|
||||
{{ isMultiple(item) ? 'mdi-check-all' : 'mdi-check' }}
|
||||
</v-icon>
|
||||
</v-btn>
|
||||
<v-btn v-if="someAccepted(item)"
|
||||
:class="{'text--disabled': !hover}"
|
||||
icon
|
||||
small
|
||||
color="primary"
|
||||
:title="isMultiple(item) ? 'Restore all' : 'Restore'"
|
||||
@click.stop="unaccept(item)">
|
||||
<v-icon small>
|
||||
{{ isMultiple(item) ? 'mdi-restore' : 'mdi-restore' }}
|
||||
</v-icon>
|
||||
</v-btn>
|
||||
</span>
|
||||
</v-hover>
|
||||
|
||||
</template>
|
||||
|
||||
<script>
|
||||
|
||||
export default {
|
||||
name: 'DougalQcAcceptance',
|
||||
|
||||
props: {
|
||||
item: { type: Object }
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
isAccepted (item) {
|
||||
if (item._children) {
|
||||
return item._children.every(child => this.isAccepted(child));
|
||||
}
|
||||
|
||||
if (item.labels) {
|
||||
return item.labels.includes("QCAccepted");
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
|
||||
someAccepted (item) {
|
||||
if (item._children) {
|
||||
return item._children.some(child => this.someAccepted(child));
|
||||
}
|
||||
|
||||
if (item.labels) {
|
||||
return item.labels.includes("QCAccepted");
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
|
||||
isEmpty (item) {
|
||||
return item._children?.length === 0;
|
||||
},
|
||||
|
||||
isMultiple (item) {
|
||||
return item._children?.length;
|
||||
},
|
||||
|
||||
action (action, item) {
|
||||
const items = [];
|
||||
|
||||
const iterate = (item) => {
|
||||
if (item._kind == "point") {
|
||||
|
||||
if (this.isAccepted(item)) {
|
||||
if (action == "unaccept") {
|
||||
items.push(item);
|
||||
}
|
||||
} else {
|
||||
if (action == "accept") {
|
||||
items.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
} else if (item._kind == "sequence" || item._kind == "test") {
|
||||
|
||||
if (item._children) {
|
||||
|
||||
for (const child of item._children) {
|
||||
iterate(child);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (item._shots) {
|
||||
|
||||
for (const child of item._children) {
|
||||
iterate(child);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
iterate(item);
|
||||
return items;
|
||||
},
|
||||
|
||||
accept (item) {
|
||||
const items = this.action('accept', item);
|
||||
if (items.length) {
|
||||
this.$emit('accept', items);
|
||||
}
|
||||
},
|
||||
|
||||
unaccept (item) {
|
||||
const items = this.action('unaccept', item);
|
||||
if (items.length) {
|
||||
this.$emit('unaccept', items);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -5,6 +5,11 @@ import api from './modules/api'
|
||||
import user from './modules/user'
|
||||
import snack from './modules/snack'
|
||||
import project from './modules/project'
|
||||
import event from './modules/event'
|
||||
import label from './modules/label'
|
||||
import sequence from './modules/sequence'
|
||||
import plan from './modules/plan'
|
||||
import line from './modules/line'
|
||||
import notify from './modules/notify'
|
||||
|
||||
Vue.use(Vuex)
|
||||
@@ -15,6 +20,11 @@ export default new Vuex.Store({
|
||||
user,
|
||||
snack,
|
||||
project,
|
||||
event,
|
||||
label,
|
||||
sequence,
|
||||
plan,
|
||||
line,
|
||||
notify
|
||||
}
|
||||
})
|
||||
|
||||
@@ -16,7 +16,7 @@ async function api ({state, commit, dispatch}, [resource, init = {}, cb]) {
|
||||
const url = /^https?:\/\//i.test(resource) ? resource : (state.apiUrl + resource);
|
||||
const res = await fetch(url, init);
|
||||
if (typeof cb === 'function') {
|
||||
cb(null, res);
|
||||
await cb(null, res);
|
||||
}
|
||||
if (res.ok) {
|
||||
|
||||
@@ -35,7 +35,14 @@ async function api ({state, commit, dispatch}, [resource, init = {}, cb]) {
|
||||
throw err;
|
||||
}
|
||||
} else {
|
||||
await dispatch('showSnack', [res.statusText, "warning"]);
|
||||
let message = res.statusText;
|
||||
if (res.headers.get("Content-Type").match(/^application\/json/i)) {
|
||||
const body = await res.json();
|
||||
if (body.message) {
|
||||
message = body.message;
|
||||
}
|
||||
}
|
||||
await dispatch('showSnack', [message, "warning"]);
|
||||
}
|
||||
} catch (err) {
|
||||
if (err && err.name == "AbortError") return;
|
||||
|
||||
129
lib/www/client/source/src/store/modules/event/actions.js
Normal file
129
lib/www/client/source/src/store/modules/event/actions.js
Normal file
@@ -0,0 +1,129 @@
|
||||
|
||||
/** Fetch events from server
|
||||
*/
|
||||
async function refreshEvents ({commit, dispatch, state, rootState}, [modifiedAfter] = []) {
|
||||
|
||||
if (!modifiedAfter) {
|
||||
modifiedAfter = state.timestamp;
|
||||
}
|
||||
|
||||
if (state.loading) {
|
||||
commit('abortEventsLoading');
|
||||
}
|
||||
|
||||
commit('setEventsLoading');
|
||||
const pid = rootState.project.projectId;
|
||||
const url = modifiedAfter
|
||||
? `/project/${pid}/event/changes/${(new Date(modifiedAfter)).toISOString()}?unique=t`
|
||||
: `/project/${pid}/event`;
|
||||
const init = {
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
|
||||
if (res) {
|
||||
if (modifiedAfter) {
|
||||
commit('setModifiedEvents', res);
|
||||
} else {
|
||||
commit('setEvents', res);
|
||||
}
|
||||
commit('setEventsTimestamp');
|
||||
}
|
||||
commit('clearEventsLoading');
|
||||
|
||||
}
|
||||
|
||||
/** Return a subset of events from state.events
|
||||
*/
|
||||
async function getEvents ({commit, dispatch, state}, [projectId, {sequence, date0, date1, sortBy, sortDesc, itemsPerPage, page, text, label}]) {
|
||||
let filteredEvents = [...state.events];
|
||||
|
||||
if (sortBy) {
|
||||
|
||||
sortBy.forEach( (key, idx) => {
|
||||
filteredEvents.sort( (el0, el1) => {
|
||||
const a = el0?.[key];
|
||||
const b = el1?.[key];
|
||||
if (a < b) {
|
||||
return -1;
|
||||
} else if (a > b) {
|
||||
return 1;
|
||||
} else if (a == b) {
|
||||
return 0;
|
||||
} else if (a && !b) {
|
||||
return 1;
|
||||
} else if (!a && b) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
if (sortDesc && sortDesc[idx] === true) {
|
||||
filteredEvents.reverse();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
if (sequence) {
|
||||
filteredEvents = filteredEvents.filter( event => event.sequence == sequence );
|
||||
}
|
||||
|
||||
if (date0 && date1) {
|
||||
filteredEvents = filteredEvents.filter( event =>
|
||||
event.tstamp.substr(0, 10) >= date0 && event.tstamp.substr(0, 10) <= date1
|
||||
);
|
||||
} else if (date0) {
|
||||
filteredEvents = filteredEvents.filter( event => event.tstamp.substr(0, 10) == date0 );
|
||||
}
|
||||
|
||||
if (text) {
|
||||
const tstampFilter = (value, search, item) => {
|
||||
return textFilter(value, search, item);
|
||||
};
|
||||
|
||||
const numberFilter = (value, search, item) => {
|
||||
return value == search;
|
||||
};
|
||||
|
||||
const textFilter = (value, search, item) => {
|
||||
return String(value).toLowerCase().includes(search.toLowerCase());
|
||||
};
|
||||
|
||||
const searchFunctions = {
|
||||
tstamp: tstampFilter,
|
||||
sequence: numberFilter,
|
||||
point: numberFilter,
|
||||
remarks: textFilter,
|
||||
labels: (value, search, item) => value.some(label => textFilter(label, search, item))
|
||||
};
|
||||
|
||||
filteredEvents = filteredEvents.filter ( event => {
|
||||
for (let key in searchFunctions) {
|
||||
const fn = searchFunctions[key];
|
||||
if (fn(event[key], text, event)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
|
||||
if (label) {
|
||||
filteredEvents = filteredEvents.filter( event => event.labels?.includes(label) );
|
||||
}
|
||||
|
||||
const count = filteredEvents.length;
|
||||
|
||||
if (itemsPerPage && itemsPerPage > 0) {
|
||||
const offset = (page > 0)
|
||||
? (page-1) * itemsPerPage
|
||||
: 0;
|
||||
|
||||
filteredEvents = filteredEvents.slice(offset, offset+itemsPerPage);
|
||||
}
|
||||
|
||||
return {events: filteredEvents, count};
|
||||
}
|
||||
|
||||
export default { refreshEvents, getEvents };
|
||||
14
lib/www/client/source/src/store/modules/event/getters.js
Normal file
14
lib/www/client/source/src/store/modules/event/getters.js
Normal file
@@ -0,0 +1,14 @@
|
||||
|
||||
function events (state) {
|
||||
return state.events;
|
||||
}
|
||||
|
||||
function eventCount (state) {
|
||||
return state.events?.length ?? 0;
|
||||
}
|
||||
|
||||
function eventsLoading (state) {
|
||||
return !!state.loading;
|
||||
}
|
||||
|
||||
export default { events, eventCount, eventsLoading };
|
||||
6
lib/www/client/source/src/store/modules/event/index.js
Normal file
6
lib/www/client/source/src/store/modules/event/index.js
Normal file
@@ -0,0 +1,6 @@
|
||||
import state from './state'
|
||||
import getters from './getters'
|
||||
import actions from './actions'
|
||||
import mutations from './mutations'
|
||||
|
||||
export default { state, getters, actions, mutations };
|
||||
73
lib/www/client/source/src/store/modules/event/mutations.js
Normal file
73
lib/www/client/source/src/store/modules/event/mutations.js
Normal file
@@ -0,0 +1,73 @@
|
||||
|
||||
function setEvents (state, events) {
|
||||
// We don't need or want the events array to be reactive, since
|
||||
// it can be tens of thousands of items long.
|
||||
state.events = Object.freeze(events);
|
||||
}
|
||||
|
||||
/** Selectively replace / insert / delete events
|
||||
* from state.events.
|
||||
*
|
||||
* modifiedEvents is the result of
|
||||
* /api/project/:project/event/changes?unique=t
|
||||
*/
|
||||
function setModifiedEvents (state, modifiedEvents) {
|
||||
const events = [...state.events];
|
||||
for (let evt of modifiedEvents) {
|
||||
const idx = events.findIndex(i => i.id == evt.id);
|
||||
if (idx != -1) {
|
||||
if (evt.is_deleted) {
|
||||
events.splice(idx, 1);
|
||||
} else {
|
||||
delete evt.is_deleted;
|
||||
events.splice(idx, 1, evt);
|
||||
}
|
||||
} else {
|
||||
if (!evt.is_deleted) {
|
||||
delete evt.is_deleted;
|
||||
events.unshift(evt);
|
||||
}
|
||||
}
|
||||
}
|
||||
setEvents(state, events);
|
||||
}
|
||||
|
||||
function setEventsLoading (state, abortController = new AbortController()) {
|
||||
state.loading = abortController;
|
||||
}
|
||||
|
||||
function clearEventsLoading (state) {
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
function setEventsTimestamp (state, timestamp = new Date()) {
|
||||
if (timestamp === true) {
|
||||
const tstamp = state.events
|
||||
.map( event => event.modified_on )
|
||||
.reduce( (acc, cur) => acc > cur ? acc : cur );
|
||||
state.timestamp = tstamp ? new Date(tstamp) : new Date();
|
||||
} else {
|
||||
state.timestamp = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
function setEventsETag (state, etag) {
|
||||
state.etag = etag;
|
||||
}
|
||||
|
||||
function abortEventsLoading (state) {
|
||||
if (state.loading) {
|
||||
state.loading.abort();
|
||||
}
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
export default {
|
||||
setEvents,
|
||||
setModifiedEvents,
|
||||
setEventsLoading,
|
||||
clearEventsLoading,
|
||||
abortEventsLoading,
|
||||
setEventsTimestamp,
|
||||
setEventsETag
|
||||
};
|
||||
8
lib/www/client/source/src/store/modules/event/state.js
Normal file
8
lib/www/client/source/src/store/modules/event/state.js
Normal file
@@ -0,0 +1,8 @@
|
||||
const state = () => ({
|
||||
events: Object.freeze([]),
|
||||
loading: null,
|
||||
timestamp: null,
|
||||
etag: null,
|
||||
});
|
||||
|
||||
export default state;
|
||||
106
lib/www/client/source/src/store/modules/label/actions.js
Normal file
106
lib/www/client/source/src/store/modules/label/actions.js
Normal file
@@ -0,0 +1,106 @@
|
||||
|
||||
/** Fetch labels from server
|
||||
*/
|
||||
async function refreshLabels ({commit, dispatch, state, rootState}) {
|
||||
|
||||
if (state.loading) {
|
||||
commit('abortLabelsLoading');
|
||||
}
|
||||
|
||||
commit('setLabelsLoading');
|
||||
const pid = rootState.project.projectId;
|
||||
const url = `/project/${pid}/label`;
|
||||
const init = {
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
|
||||
if (res) {
|
||||
commit('setLabels', res);
|
||||
commit('setLabelsTimestamp');
|
||||
}
|
||||
commit('clearLabelsLoading');
|
||||
}
|
||||
|
||||
/** Return a subset of labels from state.labels.
|
||||
*
|
||||
* Note that, unlike other actions in the get* family,
|
||||
* the return value is not isomorphic to the state.
|
||||
*
|
||||
* While state.labels is an object, getLabels() returns
|
||||
* an array with each item have the shape:
|
||||
*
|
||||
* { label: "labelName", view: {…}, model: {…} }
|
||||
*
|
||||
* This is intended to be useful, for instance, for a table
|
||||
* of labels.
|
||||
*/
|
||||
async function getLabels ({commit, dispatch, state}, [projectId, {sortBy, sortDesc, itemsPerPage, page, text, label}]) {
|
||||
|
||||
let filteredLabels = Object.entries(state.labels).map(i => {
|
||||
return {
|
||||
label: i[0],
|
||||
...i[1]
|
||||
}
|
||||
});
|
||||
|
||||
if (sortBy) {
|
||||
|
||||
sortBy.forEach( (key, idx) => {
|
||||
filteredLabels.sort( (el0, el1) => {
|
||||
const a = key == "label" ? el0[0] : el0[1].view[key];
|
||||
const b = key == "label" ? el1[0] : el1[1].view[key];
|
||||
if (a < b) {
|
||||
return -1;
|
||||
} else if (a > b) {
|
||||
return 1;
|
||||
} else if (a == b) {
|
||||
return 0;
|
||||
} else if (a && !b) {
|
||||
return 1;
|
||||
} else if (!a && b) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
if (sortDesc && sortDesc[idx] === true) {
|
||||
filteredLabels.reverse();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
if (label) {
|
||||
filteredLabels = filteredLabels.filter( label => label.label == label );
|
||||
}
|
||||
|
||||
if (text) {
|
||||
const textFilter = (value, search, item) => {
|
||||
return String(value).toLowerCase().includes(search.toLowerCase());
|
||||
};
|
||||
|
||||
const searchFunctions = {
|
||||
label: numberFilter,
|
||||
description: textFilter,
|
||||
};
|
||||
|
||||
filteredLabels = filteredLabels.filter ( item => {
|
||||
return textFilter(item.label, text, item) ?? textFilter(item.view.description, text, item);
|
||||
});
|
||||
}
|
||||
|
||||
const count = filteredLabels.length;
|
||||
|
||||
if (itemsPerPage && itemsPerPage > 0) {
|
||||
const offset = (page > 0)
|
||||
? (page-1) * itemsPerPage
|
||||
: 0;
|
||||
|
||||
filteredLabels = filteredLabels.slice(offset, offset+itemsPerPage);
|
||||
}
|
||||
|
||||
return {labels: filteredLabels, count};
|
||||
}
|
||||
|
||||
export default { refreshLabels, getLabels };
|
||||
22
lib/www/client/source/src/store/modules/label/getters.js
Normal file
22
lib/www/client/source/src/store/modules/label/getters.js
Normal file
@@ -0,0 +1,22 @@
|
||||
|
||||
function labels (state) {
|
||||
return state.labels;
|
||||
}
|
||||
|
||||
/** Return labels that can be added by users.
|
||||
*
|
||||
* As opposed to system labels.
|
||||
*/
|
||||
function userLabels (state) {
|
||||
return Object.fromEntries(Object.entries(state.labels).filter(i => i[1].model.user));
|
||||
}
|
||||
|
||||
function labelCount (state) {
|
||||
return state.labels?.length ?? 0;
|
||||
}
|
||||
|
||||
function labelsLoading (state) {
|
||||
return !!state.loading;
|
||||
}
|
||||
|
||||
export default { labels, userLabels, labelCount, labelsLoading };
|
||||
6
lib/www/client/source/src/store/modules/label/index.js
Normal file
6
lib/www/client/source/src/store/modules/label/index.js
Normal file
@@ -0,0 +1,6 @@
|
||||
import state from './state'
|
||||
import getters from './getters'
|
||||
import actions from './actions'
|
||||
import mutations from './mutations'
|
||||
|
||||
export default { state, getters, actions, mutations };
|
||||
49
lib/www/client/source/src/store/modules/label/mutations.js
Normal file
49
lib/www/client/source/src/store/modules/label/mutations.js
Normal file
@@ -0,0 +1,49 @@
|
||||
|
||||
function setLabels (state, labels) {
|
||||
// We don't need or want the events array to be reactive, since
|
||||
// it can be tens of thousands of items long.
|
||||
state.labels = Object.freeze(labels);
|
||||
}
|
||||
|
||||
function setLabelsLoading (state, abortController = new AbortController()) {
|
||||
state.loading = abortController;
|
||||
}
|
||||
|
||||
// This assumes that we know any transactions have finished or we
|
||||
// don't care about aborting.
|
||||
function clearLabelsLoading (state) {
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
function setLabelsTimestamp (state, timestamp = new Date()) {
|
||||
// NOTE: There is no `modified_on` property in the labels
|
||||
// result or in the database schema, but we could add
|
||||
// one.
|
||||
if (timestamp === true) {
|
||||
const tstamp = state.labels
|
||||
.map( i => i.modified_on )
|
||||
.reduce( (acc, cur) => acc > cur ? acc : cur );
|
||||
state.timestamp = tstamp ? new Date(tstamp) : new Date();
|
||||
} else {
|
||||
state.timestamp = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
function setLabelsETag (state, etag) {
|
||||
state.etag = etag;
|
||||
}
|
||||
|
||||
function abortLabelsLoading (state) {
|
||||
if (state.loading) {
|
||||
state.loading.abort();
|
||||
}
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
export default {
|
||||
setLabels,
|
||||
setLabelsLoading,
|
||||
clearLabelsLoading,
|
||||
setLabelsTimestamp,
|
||||
setLabelsETag
|
||||
};
|
||||
8
lib/www/client/source/src/store/modules/label/state.js
Normal file
8
lib/www/client/source/src/store/modules/label/state.js
Normal file
@@ -0,0 +1,8 @@
|
||||
const state = () => ({
|
||||
labels: Object.freeze([]),
|
||||
loading: null,
|
||||
timestamp: null,
|
||||
etag: null,
|
||||
});
|
||||
|
||||
export default state;
|
||||
117
lib/www/client/source/src/store/modules/line/actions.js
Normal file
117
lib/www/client/source/src/store/modules/line/actions.js
Normal file
@@ -0,0 +1,117 @@
|
||||
|
||||
/** Fetch lines from server
|
||||
*/
|
||||
async function refreshLines ({commit, dispatch, state, rootState}) {
|
||||
|
||||
if (state.loading) {
|
||||
commit('abortLinesLoading');
|
||||
}
|
||||
|
||||
commit('setLinesLoading');
|
||||
const pid = rootState.project.projectId;
|
||||
const url = `/project/${pid}/line`;
|
||||
const init = {
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
|
||||
if (res) {
|
||||
commit('setLines', res);
|
||||
commit('setLinesTimestamp');
|
||||
}
|
||||
commit('clearLinesLoading');
|
||||
}
|
||||
|
||||
/** Return a subset of lines from state.lines
|
||||
*/
|
||||
async function getLines ({commit, dispatch, state}, [projectId, {line, fsp, lsp, incr, sortBy, sortDesc, itemsPerPage, page, text}]) {
|
||||
let filteredLines = [...state.lines];
|
||||
|
||||
if (sortBy) {
|
||||
|
||||
sortBy.forEach( (key, idx) => {
|
||||
filteredLines.sort( (el0, el1) => {
|
||||
const a = el0?.[key];
|
||||
const b = el1?.[key];
|
||||
if (a < b) {
|
||||
return -1;
|
||||
} else if (a > b) {
|
||||
return 1;
|
||||
} else if (a == b) {
|
||||
return 0;
|
||||
} else if (a && !b) {
|
||||
return 1;
|
||||
} else if (!a && b) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
if (sortDesc && sortDesc[idx] === true) {
|
||||
filteredLines.reverse();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
if (line) {
|
||||
filteredLines = filteredLines.filter( line => line.line == line );
|
||||
}
|
||||
|
||||
if (fsp) {
|
||||
filteredLines = filteredLines.filter( line => line.fsp == fsp );
|
||||
}
|
||||
|
||||
if (lsp) {
|
||||
filteredLines = filteredLines.filter( line => line.lsp == lsp );
|
||||
}
|
||||
|
||||
if (text) {
|
||||
const numberFilter = (value, search, item) => {
|
||||
return value == search;
|
||||
};
|
||||
|
||||
const textFilter = (value, search, item) => {
|
||||
return String(value).toLowerCase().includes(search.toLowerCase());
|
||||
};
|
||||
|
||||
const incrFilter = (value, search, item) => {
|
||||
const inc = /^(incr(ement)?|↑|\+)/i;
|
||||
const dec = /^(decr(ement)?|↓|-)/i;
|
||||
return (inc.test(search) && value) || (dec.test(search) && !value)
|
||||
}
|
||||
|
||||
const searchFunctions = {
|
||||
line: numberFilter,
|
||||
fsp: numberFilter,
|
||||
lsp: numberFilter,
|
||||
remarks: textFilter,
|
||||
incr: incrFilter,
|
||||
ntba: (value, search, item) => text.toLowerCase() == "ntba" && value
|
||||
};
|
||||
|
||||
filteredLines = filteredLines.filter ( line => {
|
||||
for (let key in searchFunctions) {
|
||||
const fn = searchFunctions[key];
|
||||
if (fn(line[key], text, line)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
|
||||
const count = filteredLines.length;
|
||||
|
||||
if (itemsPerPage && itemsPerPage > 0) {
|
||||
const offset = (page > 0)
|
||||
? (page-1) * itemsPerPage
|
||||
: 0;
|
||||
|
||||
filteredLines = filteredLines.slice(offset, offset+itemsPerPage);
|
||||
}
|
||||
|
||||
return {lines: filteredLines, count};
|
||||
}
|
||||
|
||||
export default { refreshLines, getLines };
|
||||
14
lib/www/client/source/src/store/modules/line/getters.js
Normal file
14
lib/www/client/source/src/store/modules/line/getters.js
Normal file
@@ -0,0 +1,14 @@
|
||||
|
||||
function lines (state) {
|
||||
return state.lines;
|
||||
}
|
||||
|
||||
function lineCount (state) {
|
||||
return state.lines?.length ?? 0;
|
||||
}
|
||||
|
||||
function linesLoading (state) {
|
||||
return !!state.loading;
|
||||
}
|
||||
|
||||
export default { lines, lineCount, linesLoading };
|
||||
6
lib/www/client/source/src/store/modules/line/index.js
Normal file
6
lib/www/client/source/src/store/modules/line/index.js
Normal file
@@ -0,0 +1,6 @@
|
||||
import state from './state'
|
||||
import getters from './getters'
|
||||
import actions from './actions'
|
||||
import mutations from './mutations'
|
||||
|
||||
export default { state, getters, actions, mutations };
|
||||
49
lib/www/client/source/src/store/modules/line/mutations.js
Normal file
49
lib/www/client/source/src/store/modules/line/mutations.js
Normal file
@@ -0,0 +1,49 @@
|
||||
|
||||
function setLines (state, lines) {
|
||||
// We don't need or want the events array to be reactive, since
|
||||
// it can be tens of thousands of items long.
|
||||
state.lines = Object.freeze(lines);
|
||||
}
|
||||
|
||||
function setLinesLoading (state, abortController = new AbortController()) {
|
||||
state.loading = abortController;
|
||||
}
|
||||
|
||||
// This assumes that we know any transactions have finished or we
|
||||
// don't care about aborting.
|
||||
function clearLinesLoading (state) {
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
function setLinesTimestamp (state, timestamp = new Date()) {
|
||||
// NOTE: There is no `modified_on` property in the lines
|
||||
// result or in the database schema, but we could perhaps add
|
||||
// one.
|
||||
if (timestamp === true) {
|
||||
const tstamp = state.lines
|
||||
.map( event => event.modified_on )
|
||||
.reduce( (acc, cur) => acc > cur ? acc : cur );
|
||||
state.timestamp = tstamp ? new Date(tstamp) : new Date();
|
||||
} else {
|
||||
state.timestamp = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
function setLinesETag (state, etag) {
|
||||
state.etag = etag;
|
||||
}
|
||||
|
||||
function abortLinesLoading (state) {
|
||||
if (state.loading) {
|
||||
state.loading.abort();
|
||||
}
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
export default {
|
||||
setLines,
|
||||
setLinesLoading,
|
||||
clearLinesLoading,
|
||||
setLinesTimestamp,
|
||||
setLinesETag
|
||||
};
|
||||
8
lib/www/client/source/src/store/modules/line/state.js
Normal file
8
lib/www/client/source/src/store/modules/line/state.js
Normal file
@@ -0,0 +1,8 @@
|
||||
const state = () => ({
|
||||
lines: Object.freeze([]),
|
||||
loading: null,
|
||||
timestamp: null,
|
||||
etag: null,
|
||||
});
|
||||
|
||||
export default state;
|
||||
114
lib/www/client/source/src/store/modules/plan/actions.js
Normal file
114
lib/www/client/source/src/store/modules/plan/actions.js
Normal file
@@ -0,0 +1,114 @@
|
||||
|
||||
/** Fetch sequences from server
|
||||
*/
|
||||
async function refreshPlan ({commit, dispatch, state, rootState}) {
|
||||
|
||||
if (state.loading) {
|
||||
commit('abortPlanLoading');
|
||||
}
|
||||
|
||||
commit('setPlanLoading');
|
||||
const pid = rootState.project.projectId;
|
||||
const url = `/project/${pid}/plan`;
|
||||
const init = {
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
|
||||
if (res) {
|
||||
commit('setPlan', res);
|
||||
commit('setPlanTimestamp');
|
||||
}
|
||||
commit('clearPlanLoading');
|
||||
}
|
||||
|
||||
/** Return a subset of sequences from state.sequences
|
||||
*/
|
||||
async function getPlannedSequences ({commit, dispatch, state}, [projectId, {sequence, date0, date1, sortBy, sortDesc, itemsPerPage, page, text}]) {
|
||||
let filteredPlannedSequences = [...state.sequences];
|
||||
|
||||
if (sortBy) {
|
||||
|
||||
sortBy.forEach( (key, idx) => {
|
||||
filteredPlannedSequences.sort( (el0, el1) => {
|
||||
const a = el0?.[key];
|
||||
const b = el1?.[key];
|
||||
if (a < b) {
|
||||
return -1;
|
||||
} else if (a > b) {
|
||||
return 1;
|
||||
} else if (a == b) {
|
||||
return 0;
|
||||
} else if (a && !b) {
|
||||
return 1;
|
||||
} else if (!a && b) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
if (sortDesc && sortDesc[idx] === true) {
|
||||
filteredPlannedSequences.reverse();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
if (sequence) {
|
||||
filteredPlannedSequences = filteredPlannedSequences.filter( sequence => sequence.sequence == sequence );
|
||||
}
|
||||
|
||||
if (date0 && date1) {
|
||||
filteredPlannedSequences = filteredPlannedSequences.filter( sequence =>
|
||||
sequence.ts0.substr(0, 10) >= date0 && sequence.ts1.substr(0, 10) <= date1
|
||||
);
|
||||
} else if (date0) {
|
||||
filteredPlannedSequences = filteredPlannedSequences.filter( sequence => sequence.ts0.substr(0, 10) == date0 || sequence.ts1.substr(0, 10) );
|
||||
}
|
||||
|
||||
if (text) {
|
||||
const tstampFilter = (value, search, item) => {
|
||||
return textFilter(value.toISOString(), search, item);
|
||||
};
|
||||
|
||||
const numberFilter = (value, search, item) => {
|
||||
return value == search;
|
||||
};
|
||||
|
||||
const textFilter = (value, search, item) => {
|
||||
return String(value).toLowerCase().includes(search.toLowerCase());
|
||||
};
|
||||
|
||||
const searchFunctions = {
|
||||
sequence: numberFilter,
|
||||
line: numberFilter,
|
||||
remarks: textFilter,
|
||||
ts0: tstampFilter,
|
||||
ts1: tstampFilter
|
||||
};
|
||||
|
||||
filteredPlannedSequences = filteredPlannedSequences.filter ( sequence => {
|
||||
for (let key in searchFunctions) {
|
||||
const fn = searchFunctions[key];
|
||||
if (fn(sequence[key], text, sequence)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
|
||||
const count = filteredPlannedSequences.length;
|
||||
|
||||
if (itemsPerPage && itemsPerPage > 0) {
|
||||
const offset = (page > 0)
|
||||
? (page-1) * itemsPerPage
|
||||
: 0;
|
||||
|
||||
filteredPlannedSequences = filteredPlannedSequences.slice(offset, offset+itemsPerPage);
|
||||
}
|
||||
|
||||
return {sequences: filteredPlannedSequences, count};
|
||||
}
|
||||
|
||||
export default { refreshPlan, getPlannedSequences };
|
||||
18
lib/www/client/source/src/store/modules/plan/getters.js
Normal file
18
lib/www/client/source/src/store/modules/plan/getters.js
Normal file
@@ -0,0 +1,18 @@
|
||||
|
||||
function planRemarks (state) {
|
||||
return state.remarks;
|
||||
}
|
||||
|
||||
function plannedSequences (state) {
|
||||
return state.sequences;
|
||||
}
|
||||
|
||||
function plannedSequenceCount (state) {
|
||||
return state.sequences?.length ?? 0;
|
||||
}
|
||||
|
||||
function plannedSequencesLoading (state) {
|
||||
return !!state.loading;
|
||||
}
|
||||
|
||||
export default { planRemarks, plannedSequences, plannedSequenceCount, plannedSequencesLoading };
|
||||
6
lib/www/client/source/src/store/modules/plan/index.js
Normal file
6
lib/www/client/source/src/store/modules/plan/index.js
Normal file
@@ -0,0 +1,6 @@
|
||||
import state from './state'
|
||||
import getters from './getters'
|
||||
import actions from './actions'
|
||||
import mutations from './mutations'
|
||||
|
||||
export default { state, getters, actions, mutations };
|
||||
59
lib/www/client/source/src/store/modules/plan/mutations.js
Normal file
59
lib/www/client/source/src/store/modules/plan/mutations.js
Normal file
@@ -0,0 +1,59 @@
|
||||
|
||||
|
||||
function transform (item) {
|
||||
item.ts0 = new Date(item.ts0);
|
||||
item.ts1 = new Date(item.ts1);
|
||||
return item;
|
||||
}
|
||||
|
||||
// ATTENTION: This relies on the new planner endpoint
|
||||
// as per issue #281.
|
||||
|
||||
function setPlan (state, plan) {
|
||||
// We don't need or want the planned sequences array to be reactive
|
||||
state.sequences = Object.freeze(plan.sequences.map(transform));
|
||||
state.remarks = plan.remarks;
|
||||
}
|
||||
|
||||
function setPlanLoading (state, abortController = new AbortController()) {
|
||||
state.loading = abortController;
|
||||
}
|
||||
|
||||
// This assumes that we know any transactions have finished or we
|
||||
// don't care about aborting.
|
||||
function clearPlanLoading (state) {
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
function setPlanTimestamp (state, timestamp = new Date()) {
|
||||
// NOTE: There is no `modified_on` property in the plan
|
||||
// result or in the database schema, but we should probably add
|
||||
// one.
|
||||
if (timestamp === true) {
|
||||
const tstamp = state.plan
|
||||
.map( item => item.modified_on )
|
||||
.reduce( (acc, cur) => acc > cur ? acc : cur );
|
||||
state.timestamp = tstamp ? new Date(tstamp) : new Date();
|
||||
} else {
|
||||
state.timestamp = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
function setPlanETag (state, etag) {
|
||||
state.etag = etag;
|
||||
}
|
||||
|
||||
function abortPlanLoading (state) {
|
||||
if (state.loading) {
|
||||
state.loading.abort();
|
||||
}
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
export default {
|
||||
setPlan,
|
||||
setPlanLoading,
|
||||
clearPlanLoading,
|
||||
setPlanTimestamp,
|
||||
setPlanETag
|
||||
};
|
||||
9
lib/www/client/source/src/store/modules/plan/state.js
Normal file
9
lib/www/client/source/src/store/modules/plan/state.js
Normal file
@@ -0,0 +1,9 @@
|
||||
const state = () => ({
|
||||
sequences: Object.freeze([]),
|
||||
remarks: null,
|
||||
loading: null,
|
||||
timestamp: null,
|
||||
etag: null,
|
||||
});
|
||||
|
||||
export default state;
|
||||
@@ -1,13 +1,19 @@
|
||||
|
||||
async function getProject ({commit, dispatch}, projectId) {
|
||||
const res = await dispatch('api', [`/project/${String(projectId).toLowerCase()}`]);
|
||||
const res = await dispatch('api', [`/project/${String(projectId).toLowerCase()}/configuration`]);
|
||||
if (res) {
|
||||
commit('setProjectName', res.name);
|
||||
commit('setProjectId', res.pid);
|
||||
commit('setProjectId', res.id?.toLowerCase());
|
||||
commit('setProjectSchema', res.schema);
|
||||
commit('setProjectConfiguration', res);
|
||||
const recentProjects = JSON.parse(localStorage.getItem("recentProjects") || "[]")
|
||||
recentProjects.unshift(res);
|
||||
localStorage.setItem("recentProjects", JSON.stringify(recentProjects.slice(0, 3)));
|
||||
} else {
|
||||
commit('setProjectName', null);
|
||||
commit('setProjectId', null);
|
||||
commit('setProjectSchema', null);
|
||||
commit('setProjectConfiguration', {});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
|
||||
function projectId (state) {
|
||||
return state.projectId;
|
||||
}
|
||||
|
||||
function projectName (state) {
|
||||
return state.projectName;
|
||||
}
|
||||
|
||||
function projectSchema (state) {
|
||||
return state.projectSchema;
|
||||
}
|
||||
|
||||
function projectConfiguration (state) {
|
||||
return state.projectConfiguration;
|
||||
}
|
||||
|
||||
export default { projectId, projectName, projectSchema, projectConfiguration };
|
||||
|
||||
@@ -11,4 +11,8 @@ function setProjectSchema (state, schema) {
|
||||
state.projectSchema = schema;
|
||||
}
|
||||
|
||||
export default { setProjectId, setProjectName, setProjectSchema };
|
||||
function setProjectConfiguration (state, configuration) {
|
||||
state.projectConfiguration = Object.freeze(configuration);
|
||||
}
|
||||
|
||||
export default { setProjectId, setProjectName, setProjectSchema, setProjectConfiguration };
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
const state = () => ({
|
||||
projectId: null,
|
||||
projectName: null,
|
||||
projectSchema: null
|
||||
projectSchema: null,
|
||||
projectConfiguration: {}
|
||||
});
|
||||
|
||||
export default state;
|
||||
|
||||
122
lib/www/client/source/src/store/modules/sequence/actions.js
Normal file
122
lib/www/client/source/src/store/modules/sequence/actions.js
Normal file
@@ -0,0 +1,122 @@
|
||||
|
||||
/** Fetch sequences from server
|
||||
*/
|
||||
async function refreshSequences ({commit, dispatch, state, rootState}) {
|
||||
|
||||
if (state.loading) {
|
||||
commit('abortSequencesLoading');
|
||||
}
|
||||
|
||||
commit('setSequencesLoading');
|
||||
const pid = rootState.project.projectId;
|
||||
const url = `/project/${pid}/sequence?files=true`;
|
||||
const init = {
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
|
||||
if (res) {
|
||||
commit('setSequences', res);
|
||||
commit('setSequencesTimestamp');
|
||||
}
|
||||
commit('clearSequencesLoading');
|
||||
}
|
||||
|
||||
/** Return a subset of sequences from state.sequences
|
||||
*/
|
||||
async function getSequences ({commit, dispatch, state}, [projectId, {sequence, date0, date1, sortBy, sortDesc, itemsPerPage, page, text}]) {
|
||||
let filteredSequences = [...state.sequences];
|
||||
|
||||
if (sortBy) {
|
||||
|
||||
sortBy.forEach( (key, idx) => {
|
||||
filteredSequences.sort( (el0, el1) => {
|
||||
const a = el0?.[key];
|
||||
const b = el1?.[key];
|
||||
if (a < b) {
|
||||
return -1;
|
||||
} else if (a > b) {
|
||||
return 1;
|
||||
} else if (a == b) {
|
||||
return 0;
|
||||
} else if (a && !b) {
|
||||
return 1;
|
||||
} else if (!a && b) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
if (sortDesc && sortDesc[idx] === true) {
|
||||
filteredSequences.reverse();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
if (sequence) {
|
||||
filteredSequences = filteredSequences.filter( sequence => sequence.sequence == sequence );
|
||||
}
|
||||
|
||||
if (date0 && date1) {
|
||||
filteredSequences = filteredSequences.filter( sequence =>
|
||||
(sequence.ts0_final ?? sequence.ts0)?.substr(0, 10) >= date0 &&
|
||||
(sequence.ts1_final ?? sequence.ts1)?.substr(0, 10) <= date1
|
||||
);
|
||||
} else if (date0) {
|
||||
filteredSequences = filteredSequences.filter( sequence => (sequence.ts0_final ?? sequence.ts0)?.substr(0, 10) == date0 || (sequence.ts1_final ?? sequence.ts1)?.substr(0, 10) );
|
||||
}
|
||||
|
||||
if (text) {
|
||||
const tstampFilter = (value, search, item) => {
|
||||
return search?.length >= 5 && textFilter(value, search, item);
|
||||
};
|
||||
|
||||
const numberFilter = (value, search, item) => {
|
||||
return value == search;
|
||||
};
|
||||
|
||||
const textFilter = (value, search, item) => {
|
||||
return String(value).toLowerCase().includes(search.toLowerCase());
|
||||
};
|
||||
|
||||
const searchFunctions = {
|
||||
ts0: tstampFilter,
|
||||
ts1: tstampFilter,
|
||||
ts0_final: tstampFilter,
|
||||
ts1_final: tstampFilter,
|
||||
sequence: numberFilter,
|
||||
line: numberFilter,
|
||||
fsp: numberFilter,
|
||||
lsp: numberFilter,
|
||||
fsp_final: numberFilter,
|
||||
fsp_final: numberFilter,
|
||||
remarks: textFilter,
|
||||
remarks_final: textFilter
|
||||
};
|
||||
|
||||
filteredSequences = filteredSequences.filter ( sequence => {
|
||||
for (let key in searchFunctions) {
|
||||
const fn = searchFunctions[key];
|
||||
if (fn(sequence[key], text, sequence)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
|
||||
const count = filteredSequences.length;
|
||||
|
||||
if (itemsPerPage && itemsPerPage > 0) {
|
||||
const offset = (page > 0)
|
||||
? (page-1) * itemsPerPage
|
||||
: 0;
|
||||
|
||||
filteredSequences = filteredSequences.slice(offset, offset+itemsPerPage);
|
||||
}
|
||||
|
||||
return {sequences: filteredSequences, count};
|
||||
}
|
||||
|
||||
export default { refreshSequences, getSequences };
|
||||
14
lib/www/client/source/src/store/modules/sequence/getters.js
Normal file
14
lib/www/client/source/src/store/modules/sequence/getters.js
Normal file
@@ -0,0 +1,14 @@
|
||||
|
||||
function sequences (state) {
|
||||
return state.sequences;
|
||||
}
|
||||
|
||||
function sequenceCount (state) {
|
||||
return state.sequences?.length ?? 0;
|
||||
}
|
||||
|
||||
function sequencesLoading (state) {
|
||||
return !!state.loading;
|
||||
}
|
||||
|
||||
export default { sequences, sequenceCount, sequencesLoading };
|
||||
@@ -0,0 +1,6 @@
|
||||
import state from './state'
|
||||
import getters from './getters'
|
||||
import actions from './actions'
|
||||
import mutations from './mutations'
|
||||
|
||||
export default { state, getters, actions, mutations };
|
||||
@@ -0,0 +1,49 @@
|
||||
|
||||
function setSequences (state, sequences) {
|
||||
// We don't need or want the events array to be reactive, since
|
||||
// it can be tens of thousands of items long.
|
||||
state.sequences = Object.freeze(sequences);
|
||||
}
|
||||
|
||||
function setSequencesLoading (state, abortController = new AbortController()) {
|
||||
state.loading = abortController;
|
||||
}
|
||||
|
||||
// This assumes that we know any transactions have finished or we
|
||||
// don't care about aborting.
|
||||
function clearSequencesLoading (state) {
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
function setSequencesTimestamp (state, timestamp = new Date()) {
|
||||
// NOTE: There is no `modified_on` property in the sequences
|
||||
// result or in the database schema, but we should probably add
|
||||
// one.
|
||||
if (timestamp === true) {
|
||||
const tstamp = state.sequences
|
||||
.map( event => event.modified_on )
|
||||
.reduce( (acc, cur) => acc > cur ? acc : cur );
|
||||
state.timestamp = tstamp ? new Date(tstamp) : new Date();
|
||||
} else {
|
||||
state.timestamp = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
function setSequencesETag (state, etag) {
|
||||
state.etag = etag;
|
||||
}
|
||||
|
||||
function abortSequencesLoading (state) {
|
||||
if (state.loading) {
|
||||
state.loading.abort();
|
||||
}
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
export default {
|
||||
setSequences,
|
||||
setSequencesLoading,
|
||||
clearSequencesLoading,
|
||||
setSequencesTimestamp,
|
||||
setSequencesETag
|
||||
};
|
||||
@@ -0,0 +1,8 @@
|
||||
const state = () => ({
|
||||
sequences: Object.freeze([]),
|
||||
loading: null,
|
||||
timestamp: null,
|
||||
etag: null,
|
||||
});
|
||||
|
||||
export default state;
|
||||
@@ -39,6 +39,12 @@
|
||||
{{ $refs.calendar.title }}
|
||||
</v-toolbar-title>
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn v-if="categoriesAvailable"
|
||||
small
|
||||
class="mx-4"
|
||||
v-model="useCategories"
|
||||
@click="useCategories = !useCategories"
|
||||
>Labels {{useCategories ? "On" : "Off"}}</v-btn>
|
||||
<v-menu bottom right>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-btn
|
||||
@@ -72,16 +78,23 @@
|
||||
<v-calendar
|
||||
ref="calendar"
|
||||
v-model="focus"
|
||||
:events="events"
|
||||
:events="items"
|
||||
:event-color="getEventColour"
|
||||
color="primary"
|
||||
:type="type"
|
||||
:type="view"
|
||||
:locale-first-day-of-year="4"
|
||||
:weekdays="weekdays"
|
||||
:show-week="true"
|
||||
:category-days="categoryDays"
|
||||
:categories="categories"
|
||||
@click:date="showLogForDate"
|
||||
@click:event="showLogForEvent"
|
||||
></v-calendar>
|
||||
@change="setSpan"
|
||||
>
|
||||
<template v-slot:event="{ event }">
|
||||
<div style="height:100%;overflow:scroll;" v-html="event.name"></div>
|
||||
</template>
|
||||
</v-calendar>
|
||||
</v-sheet>
|
||||
</div>
|
||||
</template>
|
||||
@@ -97,8 +110,9 @@ export default {
|
||||
weekdays: [1, 2, 3, 4, 5, 6, 0],
|
||||
type: "week",
|
||||
focus: "",
|
||||
events: [
|
||||
],
|
||||
items: [],
|
||||
useCategories: false,
|
||||
span: {},
|
||||
options: {
|
||||
sortBy: "sequence"
|
||||
}
|
||||
@@ -117,28 +131,126 @@ export default {
|
||||
return labels[this.type];
|
||||
},
|
||||
|
||||
...mapGetters(['loading'])
|
||||
view () {
|
||||
return this.useCategories ? "category" : this.type;
|
||||
},
|
||||
|
||||
categoriesAvailable () {
|
||||
return this.type == "day" || this.type == "4day";
|
||||
},
|
||||
|
||||
categoryDays () {
|
||||
if (this.useCategories) {
|
||||
const days = {
|
||||
month: 30,
|
||||
week: 7,
|
||||
"4day": 4,
|
||||
day: 1
|
||||
};
|
||||
|
||||
return days[this.type];
|
||||
}
|
||||
},
|
||||
|
||||
visibleItems () {
|
||||
return this.items.filter(i => {
|
||||
const end = i.end ?? i.start;
|
||||
if (i.start > this.span.end) {
|
||||
return false;
|
||||
}
|
||||
if (end < this.span.start) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
},
|
||||
|
||||
categories () {
|
||||
return [...new Set(this.visibleItems.map(i => i.category ?? "General"))];
|
||||
},
|
||||
|
||||
...mapGetters(['sequencesLoading', 'sequences', 'events'])
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
sequences () {
|
||||
const isFirstLoad = !this.items.length;
|
||||
|
||||
this.getEvents();
|
||||
|
||||
if (isFirstLoad) {
|
||||
this.setLast();
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
events () {
|
||||
const isFirstLoad = !this.items.length;
|
||||
|
||||
this.getEvents();
|
||||
|
||||
if (isFirstLoad) {
|
||||
this.setLast();
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
type () {
|
||||
this.getEvents();
|
||||
},
|
||||
|
||||
categoriesAvailable (value) {
|
||||
if (!value) {
|
||||
this.useCategories = false;
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
async getEvents () {
|
||||
const query = new URLSearchParams(this.options);
|
||||
const url = `/project/${this.$route.params.project}/sequence?${query.toString()}`;
|
||||
|
||||
const finalSequences = await this.api([url]) || [];
|
||||
this.events = finalSequences.map(s => {
|
||||
const sequences = this.sequences.map(s => {
|
||||
const e = {};
|
||||
//e.start = s.ts0.substring(0,10)+" "+s.ts0.substring(11,19)
|
||||
//e.end = s.ts1.substring(0,10)+" "+s.ts1.substring(11,19)
|
||||
e.routerLink = { name: "logBySequence", params: { sequence: s.sequence } };
|
||||
e.start = new Date(s.ts0);
|
||||
e.end = new Date(s.ts1);
|
||||
e.timed = true;
|
||||
e.colour = "orange";
|
||||
e.name = `Sequence ${s.sequence}`;
|
||||
e.name = `<b>Sequence ${s.sequence}</b><br/>Line ${s.line}<br/><abbr title="Shotpoints">SP</abbr> ${s.fgsp ?? s.fsp}‒${s.lgsp ?? s.lsp}`;
|
||||
e.category = "Sequence"
|
||||
return e;
|
||||
});
|
||||
|
||||
const lineChanges = this.events.filter(i => i.meta?.["*ReportLineChangeTime*"]?.value && i.meta?.["*ReportLineChangeTime*"]?.type != "excess").map(i => {
|
||||
const e = {};
|
||||
const duration = i.meta?.["*ReportLineChangeTime*"]?.value;
|
||||
e.end = new Date(i.tstamp);
|
||||
e.start = new Date(e.end - duration);
|
||||
e.timed = true;
|
||||
e.colour = "pink";
|
||||
e.name = "Line change";
|
||||
e.category = "Production"
|
||||
return e;
|
||||
});
|
||||
|
||||
const excludedLabels = [ "FSP", "FGSP", "LSP", "LGSP", "QC" ];
|
||||
const otherEvents = this.events.filter(i => !excludedLabels.some(l => i.labels.includes(l))).map(i => {
|
||||
const e = {};
|
||||
e.start = new Date(i.tstamp);
|
||||
e.colour = "brown";
|
||||
e.timed = true;
|
||||
e.name = this.$options.filters.markdownInline(i.remarks);
|
||||
e.category = i.labels[0];
|
||||
return e;
|
||||
});
|
||||
|
||||
this.items = [...sequences];
|
||||
|
||||
if (this.type == "day" || this.type == "4day") {
|
||||
this.items.push(...lineChanges, ...otherEvents);
|
||||
}
|
||||
},
|
||||
|
||||
getEventColour (event) {
|
||||
@@ -150,11 +262,15 @@ export default {
|
||||
},
|
||||
|
||||
setFirst () {
|
||||
this.focus = this.events[this.events.length-1].start;
|
||||
if (this.items.length) {
|
||||
this.focus = this.items[this.items.length-1].start;
|
||||
}
|
||||
},
|
||||
|
||||
setLast () {
|
||||
this.focus = this.events[0].start;
|
||||
if (this.items.length) {
|
||||
this.focus = this.items[0].start;
|
||||
}
|
||||
},
|
||||
|
||||
prev () {
|
||||
@@ -175,6 +291,13 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
setSpan (span) {
|
||||
this.span = {
|
||||
start: new Date(span.start.date),
|
||||
end: new Date((new Date(span.end.date)).valueOf() + 86400000)
|
||||
};
|
||||
},
|
||||
|
||||
|
||||
...mapActions(["api"])
|
||||
|
||||
@@ -182,9 +305,7 @@ export default {
|
||||
|
||||
async mounted () {
|
||||
await this.getEvents();
|
||||
if (this.events.length) {
|
||||
this.setLast();
|
||||
}
|
||||
this.setLast();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
label="Filter"
|
||||
single-line
|
||||
clearable
|
||||
hint="Filter by line number, first or last shotpoint or remarks. Use ‘incr’ or ‘+’ / ‘decr’ or ‘-’ to show only incrementing / decrementing lines"
|
||||
></v-text-field>
|
||||
</v-toolbar>
|
||||
</v-card-title>
|
||||
@@ -106,12 +107,14 @@
|
||||
<v-data-table
|
||||
:headers="headers"
|
||||
:items="items"
|
||||
item-key="line"
|
||||
:items-per-page.sync="itemsPerPage"
|
||||
:server-items-length="lineCount"
|
||||
item-key="line"
|
||||
:search="filter"
|
||||
:loading="loading"
|
||||
:fixed-header="true"
|
||||
:footer-props='{itemsPerPageOptions: [ 10, 25, 50, 100, 500, -1 ]}'
|
||||
:loading="linesLoading"
|
||||
:options.sync="options"
|
||||
fixed-header
|
||||
:footer-props='{itemsPerPageOptions: [ 10, 25, 50, 100, 500, -1 ], showFirstLastPage: true}'
|
||||
:item-class="itemClass"
|
||||
:show-select="selectOn"
|
||||
v-model="selectedRows"
|
||||
@@ -124,6 +127,10 @@
|
||||
:preplot="item"
|
||||
:sequences="sequences.filter(s => s.line == item.line)"
|
||||
:sequence-href="(s) => `/projects/${$route.params.project}/log/sequence/${s.sequence}`"
|
||||
:planned-sequences="plannedSequences.filter(s => s.line == item.line)"
|
||||
:planned-sequence-href="() => `/projects/${$route.params.project}/plan`"
|
||||
:pending-reshoots="null"
|
||||
:pending-reshoot-href="null"
|
||||
>
|
||||
<template v-slot:empty>
|
||||
<div v-if="!item.ntba" class="sequence" title="Virgin"></div>
|
||||
@@ -161,7 +168,7 @@
|
||||
icon
|
||||
small
|
||||
title="Edit"
|
||||
:disabled="loading"
|
||||
:disabled="linesLoading"
|
||||
@click="editItem(item, 'remarks')"
|
||||
>
|
||||
<v-icon small>mdi-square-edit-outline</v-icon>
|
||||
@@ -251,9 +258,10 @@ export default {
|
||||
items: [],
|
||||
selectOn: false,
|
||||
selectedRows: [],
|
||||
filter: null,
|
||||
num_lines: null,
|
||||
sequences: [],
|
||||
filter: "",
|
||||
options: {},
|
||||
lineCount: null,
|
||||
//sequences: [],
|
||||
activeItem: null,
|
||||
edit: null, // {line, key, value}
|
||||
queuedReload: false,
|
||||
@@ -273,11 +281,22 @@ export default {
|
||||
},
|
||||
|
||||
computed: {
|
||||
...mapGetters(['user', 'writeaccess', 'loading', 'serverEvent'])
|
||||
...mapGetters(['user', 'writeaccess', 'linesLoading', 'lines', 'sequences', 'plannedSequences'])
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
options: {
|
||||
handler () {
|
||||
this.fetchLines();
|
||||
},
|
||||
deep: true
|
||||
},
|
||||
|
||||
async lines () {
|
||||
await this.fetchLines();
|
||||
},
|
||||
|
||||
async edit (newVal, oldVal) {
|
||||
if (newVal === null && oldVal !== null) {
|
||||
const item = this.items.find(i => i.line == oldVal.line);
|
||||
@@ -296,39 +315,9 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
async serverEvent (event) {
|
||||
if (event.payload.pid == this.$route.params.project) {
|
||||
if (event.channel == "preplot_lines" || event.channel == "preplot_points") {
|
||||
if (!this.loading && !this.queuedReload) {
|
||||
// Do not force a non-cached response if refreshing as a result
|
||||
// of an event notification. We will assume that the server has
|
||||
// already had time to update the cache by the time our request
|
||||
// gets back to it.
|
||||
this.getLines();
|
||||
} else {
|
||||
this.queuedReload = true;
|
||||
}
|
||||
} else if ([ "planned_lines", "raw_lines", "final_lines" ].includes(event.channel)) {
|
||||
if (!this.loading && !this.queuedReload) {
|
||||
this.getSequences();
|
||||
} else {
|
||||
this.queuedReload = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
queuedReload (newVal, oldVal) {
|
||||
if (newVal && !oldVal && !this.loading) {
|
||||
this.getLines();
|
||||
this.getSequences();
|
||||
}
|
||||
},
|
||||
|
||||
loading (newVal, oldVal) {
|
||||
if (!newVal && oldVal && this.queuedReload) {
|
||||
this.getLines();
|
||||
this.getSequences();
|
||||
filter (newVal, oldVal) {
|
||||
if (newVal?.toLowerCase() != oldVal?.toLowerCase()) {
|
||||
this.fetchLines();
|
||||
}
|
||||
},
|
||||
|
||||
@@ -468,43 +457,28 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
async getNumLines () {
|
||||
const projectInfo = await this.api([`/project/${this.$route.params.project}`]);
|
||||
this.num_lines = projectInfo.lines;
|
||||
},
|
||||
|
||||
async getLines () {
|
||||
|
||||
const url = `/project/${this.$route.params.project}/line`;
|
||||
|
||||
this.queuedReload = false;
|
||||
this.items = await this.api([url]) || [];
|
||||
|
||||
},
|
||||
|
||||
async getSequences () {
|
||||
const urlS = `/project/${this.$route.params.project}/sequence`;
|
||||
this.sequences = await this.api([urlS]) || [];
|
||||
|
||||
const urlP = `/project/${this.$route.params.project}/plan`;
|
||||
const planned = await this.api([urlP]) || [];
|
||||
planned.forEach(i => i.status = "planned");
|
||||
this.sequences.push(...planned);
|
||||
},
|
||||
|
||||
setActiveItem (item) {
|
||||
this.activeItem = this.activeItem == item
|
||||
? null
|
||||
: item;
|
||||
},
|
||||
|
||||
...mapActions(["api"])
|
||||
|
||||
async fetchLines (opts = {}) {
|
||||
const options = {
|
||||
text: this.filter,
|
||||
...this.options
|
||||
};
|
||||
const res = await this.getLines([this.$route.params.project, options]);
|
||||
this.items = res.lines;
|
||||
this.lineCount = res.count;
|
||||
},
|
||||
|
||||
...mapActions(["api", "getLines"])
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.getLines();
|
||||
this.getNumLines();
|
||||
this.getSequences();
|
||||
this.fetchLines();
|
||||
|
||||
// Initialise stylesheet
|
||||
const el = document.createElement("style");
|
||||
|
||||
@@ -36,7 +36,6 @@
|
||||
<dougal-event-edit v-if="writeaccess"
|
||||
v-model="eventDialog"
|
||||
v-bind="editedEvent"
|
||||
:loading="loading"
|
||||
:available-labels="userLabels"
|
||||
:preset-remarks="presetRemarks"
|
||||
@new="newEvent"
|
||||
@@ -73,6 +72,10 @@
|
||||
:href="`/api/project/${$route.params.project}/event/-/${$route.params.sequence}?mime=application%2Fjson`"
|
||||
title="Download as a generic JSON file"
|
||||
>JSON</v-list-item>
|
||||
<v-list-item
|
||||
:href="`/api/project/${$route.params.project}/event/-/${$route.params.sequence}?mime=text%2Fcsv`"
|
||||
title="Download as Comma Separated Values file"
|
||||
>CSV</v-list-item>
|
||||
<v-list-item
|
||||
:href="`/api/project/${$route.params.project}/event/-/${$route.params.sequence}?mime=text%2Fhtml`"
|
||||
title="Download as an HTML formatted file"
|
||||
@@ -90,7 +93,21 @@
|
||||
append-icon="mdi-magnify"
|
||||
label="Filter"
|
||||
single-line
|
||||
hide-details></v-text-field>
|
||||
clearable
|
||||
hide-details>
|
||||
<template v-slot:prepend-inner>
|
||||
<v-chip v-if="labelSearch"
|
||||
class="mr-1"
|
||||
small
|
||||
close
|
||||
@click:close="labelSearch=null"
|
||||
:color="labels[labelSearch] && labels[labelSearch].view.colour"
|
||||
:title="labels[labelSearch] && labels[labelSearch].view.description"
|
||||
:dark="labels[labelSearch] && labels[labelSearch].view.dark"
|
||||
:light="labels[labelSearch] && labels[labelSearch].view.light"
|
||||
>{{labelSearch}}</v-chip>
|
||||
</template>
|
||||
</v-text-field>
|
||||
</v-toolbar>
|
||||
</v-card-title>
|
||||
<v-card-text>
|
||||
@@ -182,136 +199,46 @@
|
||||
<v-list-item-icon><v-icon>mdi-delete</v-icon></v-list-item-icon>
|
||||
<v-list-item-title class="error--text">Delete all comments</v-list-item-title>
|
||||
</v-list-item>
|
||||
|
||||
<!-- BEGIN This section only applies to QC events -->
|
||||
<template v-if="contextMenuItem.meta.qc_id">
|
||||
|
||||
<v-divider></v-divider>
|
||||
|
||||
<!-- Mark QC accepted -->
|
||||
<v-list-item @click="() => acceptQc(contextMenuItem)" v-if="!isAcceptedQc(contextMenuItem)">
|
||||
<v-list-item-icon><v-icon>mdi-check</v-icon></v-list-item-icon>
|
||||
<v-list-item-title>Mark QC accepted</v-list-item-title>
|
||||
</v-list-item>
|
||||
<!-- Unmark QC accepted -->
|
||||
<v-list-item @click="() => acceptQc(contextMenuItem, false)" v-else>
|
||||
<v-list-item-icon><v-icon>mdi-restore</v-icon></v-list-item-icon>
|
||||
<v-list-item-title>Unmark QC accepted</v-list-item-title>
|
||||
</v-list-item>
|
||||
|
||||
</template>
|
||||
<!-- END This section only applies to QC events -->
|
||||
|
||||
</v-list>
|
||||
|
||||
</v-menu>
|
||||
<!-- END Context menu for log entries -->
|
||||
|
||||
<v-container fluid class="pa-0 pb-2" v-if="sequenceData">
|
||||
<v-row no-gutters class="d-flex flex-column flex-sm-row">
|
||||
<v-col cols="6" class="d-flex flex-column">
|
||||
<v-card outlined tile class="flex-grow-1">
|
||||
<v-card-subtitle>
|
||||
Acquisition remarks
|
||||
<template v-if="writeaccess">
|
||||
<template v-if="edit && edit.sequence == sequenceData.sequence && edit.key == 'remarks'">
|
||||
<v-btn
|
||||
class="ml-3"
|
||||
icon
|
||||
small
|
||||
title="Cancel edit"
|
||||
:disabled="loading"
|
||||
@click="edit.value = sequenceData.remarks; edit = null"
|
||||
>
|
||||
<v-icon small>mdi-close</v-icon>
|
||||
</v-btn>
|
||||
<v-btn v-if="edit.value != sequenceData.remarks"
|
||||
icon
|
||||
small
|
||||
title="Save edits"
|
||||
:disabled="loading"
|
||||
@click="edit = null"
|
||||
>
|
||||
<v-icon small>mdi-content-save-edit-outline</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
<v-btn v-else-if="edit === null"
|
||||
class="ml-3"
|
||||
icon
|
||||
small
|
||||
title="Edit"
|
||||
:disabled="loading"
|
||||
@click="editItem(sequenceData, 'remarks')"
|
||||
>
|
||||
<v-icon small>mdi-square-edit-outline</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
</v-card-subtitle>
|
||||
<v-card-text v-if="edit && edit.sequence == sequenceData.sequence && edit.key == 'remarks'">
|
||||
<v-textarea
|
||||
class="markdown"
|
||||
autofocus
|
||||
placeholder="Enter your text here"
|
||||
:disabled="loading"
|
||||
v-model="edit.value"
|
||||
>
|
||||
</v-textarea>
|
||||
</v-card-text>
|
||||
<v-card-text v-else v-html="$options.filters.markdown(sequenceData.remarks || '')">
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="6" class="d-flex flex-column">
|
||||
<v-card outlined tile class="flex-grow-1">
|
||||
<v-card-subtitle>
|
||||
Processing remarks
|
||||
<template v-if="writeaccess">
|
||||
<template v-if="edit && edit.sequence == sequenceData.sequence && edit.key == 'remarks_final'">
|
||||
<v-btn
|
||||
class="ml-3"
|
||||
icon
|
||||
small
|
||||
title="Cancel edit"
|
||||
:disabled="loading"
|
||||
@click="edit.value = sequenceData.remarks_final; edit = null"
|
||||
>
|
||||
<v-icon small>mdi-close</v-icon>
|
||||
</v-btn>
|
||||
<v-btn v-if="edit.value != sequenceData.remarks_final"
|
||||
icon
|
||||
small
|
||||
title="Save edits"
|
||||
:disabled="loading"
|
||||
@click="edit = null"
|
||||
>
|
||||
<v-icon small>mdi-content-save-edit-outline</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
<v-btn v-else-if="edit === null"
|
||||
class="ml-3"
|
||||
icon
|
||||
small
|
||||
title="Edit"
|
||||
:disabled="loading"
|
||||
@click="editItem(sequenceData, 'remarks_final')"
|
||||
>
|
||||
<v-icon small>mdi-square-edit-outline</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
</v-card-subtitle>
|
||||
<v-card-text v-if="edit && edit.sequence == sequenceData.sequence && edit.key == 'remarks_final'">
|
||||
<v-textarea
|
||||
class="markdown"
|
||||
autofocus
|
||||
placeholder="Enter your text here"
|
||||
:disabled="loading"
|
||||
v-model="edit.value"
|
||||
>
|
||||
</v-textarea>
|
||||
</v-card-text>
|
||||
<v-card-text v-else v-html="$options.filters.markdown(sequenceData.remarks_final || '')">
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</v-col>
|
||||
</v-row>
|
||||
</v-container>
|
||||
|
||||
<v-data-table
|
||||
dense
|
||||
:headers="headers"
|
||||
:items="rows"
|
||||
:items-per-page.sync="itemsPerPage"
|
||||
:server-items-length="eventCount"
|
||||
item-key="key"
|
||||
:item-class="(item) => (activeItem == item) ? 'align-top blue accent-1 elevation-3' : 'align-top'"
|
||||
:item-class="itemClass"
|
||||
sort-by="tstamp"
|
||||
:sort-desc="true"
|
||||
:search="filter"
|
||||
:custom-filter="searchTable"
|
||||
:loading="loading"
|
||||
:loading="eventsLoading"
|
||||
:options.sync="options"
|
||||
fixed-header
|
||||
:footer-props='{itemsPerPageOptions: [ 10, 25, 50, 100, 500, -1 ]}'
|
||||
:show-first-last-page="true"
|
||||
:footer-props='{itemsPerPageOptions: [ 10, 25, 50, 100, 500, -1 ], showFirstLastPage: true}'
|
||||
@click:row="setActiveItem"
|
||||
@contextmenu:row="contextMenu"
|
||||
>
|
||||
@@ -337,12 +264,12 @@
|
||||
:dark="labels[label] && labels[label].view.dark"
|
||||
:light="labels[label] && labels[label].view.light"
|
||||
:key="label"
|
||||
:href="$route.path+'?label='+encodeURIComponent(label)"
|
||||
@click="labelSearch=label"
|
||||
>{{label}}</v-chip>
|
||||
</span>
|
||||
<dougal-event-edit-history v-if="entry.has_edits"
|
||||
<dougal-event-edit-history v-if="entry.has_edits && writeaccess"
|
||||
:id="entry.id"
|
||||
:disabled="loading"
|
||||
:disabled="eventsLoading"
|
||||
:labels="labels"
|
||||
></dougal-event-edit-history>
|
||||
<span v-if="entry.meta.readonly"
|
||||
@@ -359,6 +286,29 @@
|
||||
|
||||
</template>
|
||||
|
||||
<template v-slot:footer.prepend>
|
||||
<v-checkbox v-for="label in filterableLabels"
|
||||
:key="label"
|
||||
class="mr-3"
|
||||
v-model="shownLabels"
|
||||
:value="label"
|
||||
:title="`Show ${label} events`"
|
||||
dense
|
||||
hide-details
|
||||
>
|
||||
<template v-slot:label>
|
||||
<v-chip
|
||||
x-small
|
||||
:color="labels[label] && labels[label].view.colour"
|
||||
:title="labels[label] && labels[label].view.description"
|
||||
:dark="labels[label] && labels[label].view.dark"
|
||||
:light="labels[label] && labels[label].view.light"
|
||||
>{{label}}
|
||||
</v-chip>
|
||||
</template>
|
||||
</v-checkbox>
|
||||
</template>
|
||||
|
||||
</v-data-table>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
@@ -450,14 +400,14 @@ export default {
|
||||
}
|
||||
],
|
||||
items: [],
|
||||
labels: {},
|
||||
options: {},
|
||||
filter: "",
|
||||
filterableLabels: [ "QC", "QCAccepted" ],
|
||||
shownLabels: [ "QC", "QCAccepted" ],
|
||||
eventCount: null,
|
||||
eventDialog: false,
|
||||
eventLabelsDialog: false,
|
||||
defaultEventTimestamp: null,
|
||||
presetRemarks: null,
|
||||
remarksMenu: null,
|
||||
remarksMenuItem: null,
|
||||
editedEvent: {},
|
||||
@@ -465,9 +415,6 @@ export default {
|
||||
queuedReload: false,
|
||||
itemsPerPage: 25,
|
||||
|
||||
sequenceData: null,
|
||||
edit: null, // { sequence, key, value }
|
||||
|
||||
// Row highlighter
|
||||
activeItem: null,
|
||||
|
||||
@@ -482,7 +429,16 @@ export default {
|
||||
computed: {
|
||||
rows () {
|
||||
const rows = {};
|
||||
this.items.forEach(i => {
|
||||
this.items
|
||||
.filter(i => {
|
||||
for (const label of this.filterableLabels) {
|
||||
if (!this.shownLabels.includes(label) && i.labels.includes(label)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
})
|
||||
.forEach(i => {
|
||||
const key = (i.sequence && i.point) ? (i.sequence+"@"+i.point) : i.tstamp;
|
||||
if (!rows[key]) {
|
||||
rows[key] = {
|
||||
@@ -501,17 +457,6 @@ export default {
|
||||
return Object.values(rows);
|
||||
},
|
||||
|
||||
userLabels () {
|
||||
const filtered = {};
|
||||
for (const key in this.labels) {
|
||||
if (this.labels[key].model.user) {
|
||||
filtered[key] = this.labels[key];
|
||||
}
|
||||
}
|
||||
return filtered;
|
||||
|
||||
},
|
||||
|
||||
popularLabels () {
|
||||
const tuples = this.items.flatMap( i => i.labels )
|
||||
.filter( l => (this.labels[l]??{})?.model?.user )
|
||||
@@ -523,6 +468,10 @@ export default {
|
||||
.sort( (a, b) => b[1]-a[1] );
|
||||
},
|
||||
|
||||
presetRemarks () {
|
||||
return this.projectConfiguration?.events?.presetRemarks ?? [];
|
||||
},
|
||||
|
||||
defaultSequence () {
|
||||
if (this.$route.params.sequence) {
|
||||
return Number(this.$route.params.sequence.split(";").pop());
|
||||
@@ -531,39 +480,24 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
...mapGetters(['user', 'writeaccess', 'loading', 'online', 'sequence', 'line', 'point', 'position', 'timestamp', 'lineName', 'serverEvent']),
|
||||
...mapGetters(['user', 'writeaccess', 'eventsLoading', 'online', 'sequence', 'line', 'point', 'position', 'timestamp', 'lineName', 'serverEvent', 'events', 'labels', 'userLabels']),
|
||||
...mapState({projectSchema: state => state.project.projectSchema})
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
async edit (newVal, oldVal) {
|
||||
if (newVal === null && oldVal !== null) {
|
||||
const item = oldVal.sequence == this.sequenceData.sequence
|
||||
? this.sequenceData
|
||||
: null;
|
||||
if (item && item[oldVal.key] != oldVal.value) {
|
||||
if (await this.saveItem(oldVal)) {
|
||||
item[oldVal.key] = oldVal.value;
|
||||
} else {
|
||||
this.edit = oldVal;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
defaultSequence (sequenceNumber) {
|
||||
this.getSequenceData();
|
||||
},
|
||||
|
||||
options: {
|
||||
handler () {
|
||||
//this.getEvents();
|
||||
async handler () {
|
||||
await this.fetchEvents();
|
||||
},
|
||||
deep: true
|
||||
},
|
||||
|
||||
async events () {
|
||||
console.log("Events changed");
|
||||
await this.fetchEvents();
|
||||
},
|
||||
|
||||
eventDialog (val) {
|
||||
if (val) {
|
||||
// If not online
|
||||
@@ -571,36 +505,14 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
async serverEvent (event) {
|
||||
if (event.channel == "event" && event.payload.schema == this.projectSchema) {
|
||||
if (!this.loading && !this.queuedReload) {
|
||||
// Do not force a non-cached response if refreshing as a result
|
||||
// of an event notification. We will assume that the server has
|
||||
// already had time to update the cache by the time our request
|
||||
// gets back to it.
|
||||
this.getEvents();
|
||||
} else {
|
||||
this.queuedReload = true;
|
||||
}
|
||||
} else if ((event.channel == "final_lines" || event.channel == "raw_lines") &&
|
||||
event.payload.schema == this.projectSchema &&
|
||||
this.sequenceData?.sequence &&
|
||||
(this.sequenceData.sequence == event.payload.old.sequence ||
|
||||
this.sequenceData.sequence == event.payload.new.sequence)) {
|
||||
this.getSequenceData();
|
||||
filter (newVal, oldVal) {
|
||||
if (newVal?.toLowerCase() != oldVal?.toLowerCase()) {
|
||||
this.fetchEvents();
|
||||
}
|
||||
},
|
||||
|
||||
queuedReload (newVal, oldVal) {
|
||||
if (newVal && !oldVal && !this.loading) {
|
||||
this.getEvents();
|
||||
}
|
||||
},
|
||||
|
||||
loading (newVal, oldVal) {
|
||||
if (!newVal && oldVal && this.queuedReload) {
|
||||
this.getEvents();
|
||||
}
|
||||
labelSearch () {
|
||||
this.fetchEvents();
|
||||
},
|
||||
|
||||
itemsPerPage (newVal, oldVal) {
|
||||
@@ -645,88 +557,27 @@ export default {
|
||||
return !this.deletableEntries(item);
|
||||
},
|
||||
|
||||
async getEventCount () {
|
||||
//this.eventCount = await this.api([`/project/${this.$route.params.project}/event/?count`]);
|
||||
this.eventCount = null;
|
||||
},
|
||||
|
||||
async getEvents (opts = {}) {
|
||||
|
||||
const query = new URLSearchParams(this.options);
|
||||
if (this.options.itemsPerPage < 0) {
|
||||
query.delete("itemsPerPage");
|
||||
}
|
||||
|
||||
if (this.$route.params.sequence) {
|
||||
query.set("sequence", this.$route.params.sequence);
|
||||
}
|
||||
|
||||
if (this.$route.params.date0) {
|
||||
query.set("date0", this.$route.params.date0);
|
||||
}
|
||||
|
||||
if (this.$route.params.date1) {
|
||||
query.set("date1", this.$route.params.date1);
|
||||
}
|
||||
|
||||
const url = `/project/${this.$route.params.project}/event?${query.toString()}`;
|
||||
|
||||
this.queuedReload = false;
|
||||
this.items = await this.api([url, opts]) || [];
|
||||
|
||||
},
|
||||
|
||||
async getLabelDefinitions () {
|
||||
const url = `/project/${this.$route.params.project}/label`;
|
||||
|
||||
const labelSet = {};
|
||||
const labels = await this.api([url]) || [];
|
||||
labels.forEach( l => labelSet[l.name] = l.data );
|
||||
this.labels = labelSet;
|
||||
},
|
||||
|
||||
async getPresetRemarks () {
|
||||
const url = `/project/${this.$route.params.project}/configuration/events/presetRemarks`;
|
||||
|
||||
this.presetRemarks = await this.api([url]);
|
||||
},
|
||||
|
||||
async getSequenceData () {
|
||||
if (this.defaultSequence) {
|
||||
const url = `/project/${this.$route.params.project}/sequence?sequence=${this.defaultSequence}`;
|
||||
const res = await this.api([url]);
|
||||
this.sequenceData = res[0];
|
||||
itemClass (item) {
|
||||
if (this.activeItem == item) {
|
||||
return 'align-top blue accent-1 elevation-3';
|
||||
} else if (item.sequence && item.point && item.tstamp) {
|
||||
return this.$vuetify.theme.isDark
|
||||
? 'align-top blue-grey darken-4'
|
||||
: 'align-top blue-grey lighten-5';
|
||||
} else {
|
||||
this.sequenceData = null;
|
||||
return 'align-top';
|
||||
}
|
||||
},
|
||||
|
||||
editItem (item, key) {
|
||||
this.edit = {
|
||||
sequence: item.sequence,
|
||||
key,
|
||||
value: item[key]
|
||||
}
|
||||
},
|
||||
|
||||
async saveItem (edit) {
|
||||
if (!edit) return;
|
||||
|
||||
try {
|
||||
const url = `/project/${this.$route.params.project}/sequence/${edit.sequence}`;
|
||||
const init = {
|
||||
method: "PATCH",
|
||||
body: {
|
||||
[edit.key]: edit.value
|
||||
}
|
||||
};
|
||||
|
||||
let res;
|
||||
await this.api([url, init, (e, r) => res = r]);
|
||||
return res && res.ok;
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
async fetchEvents (opts = {}) {
|
||||
const options = {
|
||||
text: this.filter,
|
||||
label: this.labelSearch,
|
||||
...this.options
|
||||
};
|
||||
const res = await this.getEvents([this.$route.params.project, options]);
|
||||
this.items = res.events;
|
||||
this.eventCount = res.count;
|
||||
},
|
||||
|
||||
newItem (from = {}) {
|
||||
@@ -800,7 +651,7 @@ export default {
|
||||
if (!err && res.ok) {
|
||||
this.showSnack(["Event saved", "success"]);
|
||||
this.queuedReload = true;
|
||||
this.getEvents({cache: "reload"});
|
||||
this.fetchEvents({cache: "reload"});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -818,7 +669,7 @@ export default {
|
||||
if (!err && res.ok) {
|
||||
this.showSnack(["Event saved", "success"]);
|
||||
this.queuedReload = true;
|
||||
this.getEvents({cache: "reload"});
|
||||
this.fetchEvents({cache: "reload"});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -865,7 +716,7 @@ export default {
|
||||
if (!err && res.ok) {
|
||||
this.showSnack([`${ids.length} events deleted`, "red"]);
|
||||
this.queuedReload = true;
|
||||
this.getEvents({cache: "reload"});
|
||||
this.fetchEvents({cache: "reload"});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -881,7 +732,7 @@ export default {
|
||||
if (!err && res.ok) {
|
||||
this.showSnack(["Event deleted", "red"]);
|
||||
this.queuedReload = true;
|
||||
this.getEvents({cache: "reload"});
|
||||
this.fetchEvents({cache: "reload"});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -915,19 +766,6 @@ export default {
|
||||
|
||||
},
|
||||
|
||||
searchTable (value, search, item) {
|
||||
if (!value && !search) return true;
|
||||
const s = search.toLowerCase();
|
||||
if (typeof value === 'string') {
|
||||
return value.toLowerCase().includes(s);
|
||||
} else if (typeof value === 'number') {
|
||||
return value == search;
|
||||
} else {
|
||||
return item.items.some( i => i.remarks.toLowerCase().includes(s) ) ||
|
||||
item.items.some( i => i.labels.some( l => l.toLowerCase().includes(s) ));
|
||||
}
|
||||
},
|
||||
|
||||
viewOnMap(item) {
|
||||
if (item?.meta && item.meta?.geometry?.type == "Point") {
|
||||
const [ lon, lat ] = item.meta.geometry.coordinates;
|
||||
@@ -937,6 +775,23 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
isAcceptedQc (item) {
|
||||
return item.labels.includes('QCAccepted');
|
||||
},
|
||||
|
||||
async acceptQc (item, accept = true) {
|
||||
|
||||
const url = accept
|
||||
? `/project/${this.$route.params.project}/qc/results/accept`
|
||||
: `/project/${this.$route.params.project}/qc/results/unaccept`;
|
||||
|
||||
await this.api([url, {
|
||||
method: "POST",
|
||||
body: [ item.id ]
|
||||
}]);
|
||||
|
||||
},
|
||||
|
||||
setActiveItem (item) {
|
||||
// Disable setting the active item for now,
|
||||
// it's kind of annoying.
|
||||
@@ -949,15 +804,11 @@ export default {
|
||||
*/
|
||||
},
|
||||
|
||||
...mapActions(["api", "showSnack"])
|
||||
...mapActions(["api", "showSnack", "refreshEvents", "getEvents"])
|
||||
},
|
||||
|
||||
async mounted () {
|
||||
await this.getLabelDefinitions();
|
||||
this.getEventCount();
|
||||
this.getEvents();
|
||||
this.getPresetRemarks();
|
||||
this.getSequenceData();
|
||||
this.fetchEvents();
|
||||
|
||||
window.addEventListener('keyup', this.handleKeyboardEvent);
|
||||
},
|
||||
|
||||
@@ -374,7 +374,9 @@ export default {
|
||||
}
|
||||
}
|
||||
],
|
||||
hashMarker: null
|
||||
labels: {},
|
||||
hashMarker: null,
|
||||
references: {}
|
||||
};
|
||||
},
|
||||
|
||||
@@ -438,17 +440,16 @@ export default {
|
||||
const data = await this.api([url, {headers: {"Accept": "application/geo+json"}}]);
|
||||
if (data) {
|
||||
|
||||
function colour(feature) {
|
||||
if (feature && feature.properties && feature.properties.type) {
|
||||
if (feature.properties.type == "qc") {
|
||||
return feature.properties.labels.includes("QCAccepted")
|
||||
? "lightgray"
|
||||
: "gray";
|
||||
} else if (feature.properties.type == "midnight shot") {
|
||||
return "cyan";
|
||||
} else {
|
||||
return "orange";
|
||||
}
|
||||
const colour = (feature) => {
|
||||
if (feature.properties.meta?.qc_id) {
|
||||
return feature.properties.labels.includes("QCAccepted")
|
||||
? "lightgray"
|
||||
: "green";
|
||||
} else if (feature.properties.type == "midnight shot") { // FIXME
|
||||
// The above will no longer work. See #223.
|
||||
return "cyan";
|
||||
} else if (feature.properties.labels?.length) {
|
||||
return this.labels?.[feature.properties.labels[0]]?.view?.colour ?? "orange";
|
||||
}
|
||||
return "brown";
|
||||
}
|
||||
@@ -474,7 +475,7 @@ export default {
|
||||
bounds._northEast.lng,
|
||||
bounds._northEast.lat
|
||||
].map(i => i.toFixed(bboxScale)).join(",");
|
||||
const limit = 10000;
|
||||
const limit = 10000; // Empirical value
|
||||
|
||||
const query = new URLSearchParams({bbox, limit});
|
||||
|
||||
@@ -511,7 +512,9 @@ export default {
|
||||
}
|
||||
|
||||
l.layer.clearLayers();
|
||||
if (layer instanceof L.Layer || (layer.features && layer.features.length < limit) || ("length" in layer && layer.length < limit)) {
|
||||
//if (layer instanceof L.Layer || (layer.features && layer.features.length < limit) || ("length" in layer && layer.length < limit)) {
|
||||
if (layer instanceof L.Layer || ((layer.features?.length ?? layer?.length) < limit)) {
|
||||
|
||||
if (l.layer.addData) {
|
||||
l.layer.addData(layer);
|
||||
} else if (l.layer.addLayer) {
|
||||
@@ -519,8 +522,12 @@ export default {
|
||||
}
|
||||
|
||||
l.layer.lastRequestURL = url;
|
||||
} else if (!layer.features) {
|
||||
console.log(`Layer ${url} is empty`);
|
||||
} else {
|
||||
console.warn("Too much data from", url);
|
||||
console.warn(`Too much data from ${url} (${layer.features?.length ?? layer.length} ≥ ${limit} features)`);
|
||||
|
||||
this.showSnack([`Layer ${l.layer.options.userLayerName ? "‘"+l.layer.options.userLayerName+"’ " : ""}is too large: ${layer.features?.length ?? layer.length} features; maximum is ${limit}`, "error"]);
|
||||
}
|
||||
})
|
||||
.finally( () => {
|
||||
@@ -671,7 +678,143 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
...mapActions(["api"])
|
||||
async getLabelDefinitions () {
|
||||
const url = `/project/${this.$route.params.project}/label`;
|
||||
|
||||
this.labels = await this.api([url]) || [];
|
||||
},
|
||||
|
||||
|
||||
removeUserLayers () {
|
||||
map.eachLayer( layer => {
|
||||
if (layer.options.userLayer === true) {
|
||||
console.log("Removing", layer);
|
||||
layer.eachLayer( sublayer => {
|
||||
const idx = this.layerRefreshConfig.findIndex(i => i.layer == layer);
|
||||
if (idx != -1) {
|
||||
this.layerRefreshConfig.splice(idx, 1);
|
||||
}
|
||||
});
|
||||
map.removeLayer(layer);
|
||||
this.references.layerControl.removeLayer(layer);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
async addUserLayers (userLayers) {
|
||||
|
||||
const options = {
|
||||
userLayer: true,
|
||||
style (feature) {
|
||||
const style = {
|
||||
stroke: undefined,
|
||||
color: "grey",
|
||||
weight: 2,
|
||||
opacity: 0.5,
|
||||
lineCap: undefined,
|
||||
lineJoin: undefined,
|
||||
dashArray: undefined,
|
||||
dashOffset: undefined,
|
||||
fill: undefined,
|
||||
fillColor: "lightgrey",
|
||||
fillOpacity: 0.5,
|
||||
fillRule: undefined
|
||||
};
|
||||
|
||||
for (let key in style) {
|
||||
switch (key) {
|
||||
case "color":
|
||||
style[key] = feature.properties?.colour ?? feature.properties?.color ?? style[key];
|
||||
break;
|
||||
case "fillColor":
|
||||
style[key] = feature.properties?.fillColour ?? feature.properties?.fillColor ?? style[key];
|
||||
break;
|
||||
default:
|
||||
style[key] = feature.properties?.[key] ?? style[key];
|
||||
}
|
||||
|
||||
if (typeof style[key] === "undefined") {
|
||||
delete style[key];
|
||||
}
|
||||
}
|
||||
|
||||
return style;
|
||||
}
|
||||
};
|
||||
|
||||
const userLayerGroups = {};
|
||||
userLayers.forEach(layer => {
|
||||
if (!(layer.name in userLayerGroups)) {
|
||||
userLayerGroups[layer.name] = [];
|
||||
}
|
||||
userLayerGroups[layer.name].push(layer);
|
||||
});
|
||||
|
||||
for (let userLayerName in userLayerGroups) {
|
||||
const userLayerGroup = userLayerGroups[userLayerName];
|
||||
|
||||
const layer = L.featureGroup(null, {userLayer: true, userLayerGroup: true, userLayerName});
|
||||
userLayerGroup.forEach(l => {
|
||||
const sublayer = L.geoJSON(null, {...options, userLayerName});
|
||||
layer.addLayer(sublayer);
|
||||
sublayer.on('add', ({target}) => {
|
||||
this.refreshLayers([target])
|
||||
});
|
||||
|
||||
if (l.tooltip) {
|
||||
sublayer.bindTooltip((layer) => {
|
||||
return layer?.feature?.properties?.[l.tooltip] ?? userLayerName;
|
||||
});
|
||||
}
|
||||
|
||||
if (l.popup) {
|
||||
if (l.popup === true) {
|
||||
sublayer.bindPopup((layer) => {
|
||||
const p = layer?.feature?.properties;
|
||||
let t = "";
|
||||
if (p) {
|
||||
t += "<table>";
|
||||
for (let [k, v] of Object.entries(p)) {
|
||||
t += `<tr><td><b>${k}: </b></td><td>${v}</td></tr>`;
|
||||
}
|
||||
t += "</table>";
|
||||
return t;
|
||||
}
|
||||
return userLayerName;
|
||||
});
|
||||
} else {
|
||||
sublayer.binPopup((layer) => {
|
||||
return layer?.feature?.properties?.[l.popup] ?? userLayerName;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const refreshConfig = {
|
||||
layer: sublayer,
|
||||
url: (query = "") => {
|
||||
return `/files/${l.path}`;
|
||||
}
|
||||
};
|
||||
|
||||
this.layerRefreshConfig.push(refreshConfig);
|
||||
});
|
||||
|
||||
layer.on('add', ({target}) => {
|
||||
this.refreshLayers(target.getLayers())
|
||||
});
|
||||
this.references.layerControl.addOverlay(layer, `<span title="User layer" style="text-decoration: dotted underline;">${userLayerName}</span>`);
|
||||
}
|
||||
},
|
||||
|
||||
async fetchUserLayers () {
|
||||
const url = `/project/${this.$route.params.project}/gis/layer`;
|
||||
const userLayers = await this.api([url]) || [];
|
||||
|
||||
this.removeUserLayers();
|
||||
this.addUserLayers(userLayers);
|
||||
},
|
||||
|
||||
...mapActions(["api", "showSnack"])
|
||||
|
||||
},
|
||||
|
||||
@@ -704,8 +847,10 @@ export default {
|
||||
}
|
||||
};
|
||||
|
||||
layers["Events (QC)"] = L.realtime(this.getEvents(i => i.properties.type == "qc"), eventsOptions());
|
||||
layers["Events (Other)"] = L.realtime(this.getEvents(i => i.properties.type != "qc"), eventsOptions());
|
||||
this.getLabelDefinitions(); // No await
|
||||
|
||||
layers["Events (QC)"] = L.realtime(this.getEvents(i => i.properties.meta?.qc_id), eventsOptions());
|
||||
layers["Events (Other)"] = L.realtime(this.getEvents(i => !i.properties.meta?.qc_id), eventsOptions());
|
||||
|
||||
layers["Events (Other)"].on('update', function (e) {
|
||||
//console.log("Events (Other) update event", e);
|
||||
@@ -739,7 +884,7 @@ export default {
|
||||
}
|
||||
|
||||
if (init.activeLayers) {
|
||||
init.activeLayers.forEach(l => layers[l].addTo(map));
|
||||
init.activeLayers.forEach(l => layers[l]?.addTo(map));
|
||||
} else {
|
||||
layers.OpenSeaMap.addTo(map);
|
||||
layers.Preplots.addTo(map);
|
||||
@@ -748,6 +893,9 @@ export default {
|
||||
const layerControl = L.control.layers(tileMaps, layers).addTo(map);
|
||||
const scaleControl = L.control.scale().addTo(map);
|
||||
|
||||
this.references.layerControl = layerControl;
|
||||
this.references.scaleControl = scaleControl;
|
||||
|
||||
if (init.position) {
|
||||
map.setView(init.position.slice(1), init.position[0]);
|
||||
} else {
|
||||
@@ -775,10 +923,13 @@ export default {
|
||||
map.on('layeradd', this.updateURL);
|
||||
map.on('layerremove', this.updateURL);
|
||||
|
||||
|
||||
this.layerRefreshConfig.forEach( l => {
|
||||
l.layer.on('add', ({target}) => this.refreshLayers([target]));
|
||||
});
|
||||
|
||||
this.fetchUserLayers();
|
||||
|
||||
if (init.position) {
|
||||
this.refreshLayers();
|
||||
} else {
|
||||
|
||||
@@ -44,6 +44,7 @@
|
||||
label="Filter"
|
||||
single-line
|
||||
clearable
|
||||
hint="Filter by sequence, line, first or last shotpoints, remarks or start/end time"
|
||||
></v-text-field>
|
||||
</v-toolbar>
|
||||
</v-card-title>
|
||||
@@ -109,17 +110,24 @@
|
||||
:headers="headers"
|
||||
:items="items"
|
||||
:items-per-page.sync="itemsPerPage"
|
||||
:server-items-length="sequenceCount"
|
||||
item-key="sequence"
|
||||
:search="filter"
|
||||
:loading="loading"
|
||||
:fixed-header="true"
|
||||
:loading="plannedSequencesLoading"
|
||||
fixed-header
|
||||
no-data-text="No planned lines. Add lines via the context menu from either the Lines or Sequences view."
|
||||
:item-class="(item) => (activeItem == item && !edit) ? 'blue accent-1 elevation-3' : ''"
|
||||
:footer-props="{showFirstLastPage: true}"
|
||||
@click:row="setActiveItem"
|
||||
@contextmenu:row="contextMenu"
|
||||
>
|
||||
|
||||
<template v-slot:item.srss="{item}">
|
||||
<v-icon small :title="srssInfo(item)">{{srssIcon(item)}}</v-icon>
|
||||
<span style="white-space: nowrap;">
|
||||
<v-icon small :title="srssInfo(item)">{{srssIcon(item)}}</v-icon>
|
||||
/
|
||||
<v-icon small :title="wxInfo(item)" v-if="item.meta.wx">{{wxIcon(item)}}</v-icon>
|
||||
</span>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.sequence="{item, value}">
|
||||
@@ -271,7 +279,7 @@
|
||||
icon
|
||||
small
|
||||
title="Edit"
|
||||
:disabled="loading"
|
||||
:disabled="plannedSequencesLoading"
|
||||
@click="editItem(item, 'remarks')"
|
||||
>
|
||||
<v-icon small>mdi-square-edit-outline</v-icon>
|
||||
@@ -413,7 +421,8 @@ export default {
|
||||
remarks: null,
|
||||
editRemarks: false,
|
||||
filter: null,
|
||||
num_lines: null,
|
||||
options: {},
|
||||
sequenceCount: null,
|
||||
activeItem: null,
|
||||
edit: null, // {sequence, key, value}
|
||||
queuedReload: false,
|
||||
@@ -422,6 +431,123 @@ export default {
|
||||
plannerConfig: null,
|
||||
shiftAll: false, // Shift all sequences checkbox
|
||||
|
||||
// Weather API
|
||||
wxData: null,
|
||||
weathercode: {
|
||||
0: {
|
||||
description: "Clear sky",
|
||||
icon: "mdi-weather-sunny"
|
||||
},
|
||||
1: {
|
||||
description: "Mainly clear",
|
||||
icon: "mdi-weather-sunny"
|
||||
},
|
||||
2: {
|
||||
description: "Partly cloudy",
|
||||
icon: "mdi-weather-partly-cloudy"
|
||||
},
|
||||
3: {
|
||||
description: "Overcast",
|
||||
icon: "mdi-weather-cloudy"
|
||||
},
|
||||
45: {
|
||||
description: "Fog",
|
||||
icon: "mde-weather-fog"
|
||||
},
|
||||
48: {
|
||||
description: "Depositing rime fog",
|
||||
icon: "mdi-weather-fog"
|
||||
},
|
||||
51: {
|
||||
description: "Light drizzle",
|
||||
icon: "mdi-weather-partly-rainy"
|
||||
},
|
||||
53: {
|
||||
description: "Moderate drizzle",
|
||||
icon: "mdi-weather-partly-rainy"
|
||||
},
|
||||
55: {
|
||||
description: "Dense drizzle",
|
||||
icon: "mdi-weather-rainy"
|
||||
},
|
||||
56: {
|
||||
description: "Light freezing drizzle",
|
||||
icon: "mdi-weather-partly-snowy-rainy"
|
||||
},
|
||||
57: {
|
||||
description: "Freezing drizzle",
|
||||
icon: "mdi-weather-partly-snowy-rainy"
|
||||
},
|
||||
61: {
|
||||
description: "Light rain",
|
||||
icon: "mdi-weather-rainy"
|
||||
},
|
||||
63: {
|
||||
description: "Moderate rain",
|
||||
icon: "mdi-weather-rainy"
|
||||
},
|
||||
65: {
|
||||
description: "Heavy rain",
|
||||
icon: "mdi-weather-pouring"
|
||||
},
|
||||
66: {
|
||||
description: "Light freezing rain",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
67: {
|
||||
description: "Freezing rain",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
71: {
|
||||
description: "Light snow",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
73: {
|
||||
description: "Moderate snow",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
75: {
|
||||
description: "Heavy snow",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
77: {
|
||||
description: "Snow grains",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
80: {
|
||||
description: "Light rain showers",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
81: {
|
||||
description: "Moderate rain showers",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
82: {
|
||||
description: "Violent rain showers",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
85: {
|
||||
description: "Light snow showers",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
86: {
|
||||
description: "Snow showers",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
95: {
|
||||
description: "Thunderstorm",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
96: {
|
||||
description: "Hailstorm",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
99: {
|
||||
description: "Heavy hailstorm",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
},
|
||||
|
||||
// Context menu stuff
|
||||
contextMenuShow: false,
|
||||
contextMenuX: 0,
|
||||
@@ -431,11 +557,22 @@ export default {
|
||||
},
|
||||
|
||||
computed: {
|
||||
...mapGetters(['user', 'writeaccess', 'loading', 'serverEvent'])
|
||||
...mapGetters(['user', 'writeaccess', 'plannedSequencesLoading', 'plannedSequences', 'planRemarks'])
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
options: {
|
||||
handler () {
|
||||
this.fetchPlannedSequences();
|
||||
},
|
||||
deep: true
|
||||
},
|
||||
|
||||
async plannedSequences () {
|
||||
await this.fetchPlannedSequences();
|
||||
},
|
||||
|
||||
async edit (newVal, oldVal) {
|
||||
if (newVal === null && oldVal !== null) {
|
||||
const item = this.items.find(i => i.sequence == oldVal.sequence);
|
||||
@@ -466,41 +603,9 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
async serverEvent (event) {
|
||||
if (event.channel == "planned_lines" && event.payload.pid == this.$route.params.project) {
|
||||
|
||||
// Ignore non-ops
|
||||
/*
|
||||
if (event.payload.old === null && event.payload.new === null) {
|
||||
return;
|
||||
}
|
||||
*/
|
||||
|
||||
if (!this.loading && !this.queuedReload) {
|
||||
// Do not force a non-cached response if refreshing as a result
|
||||
// of an event notification. We will assume that the server has
|
||||
// already had time to update the cache by the time our request
|
||||
// gets back to it.
|
||||
this.getPlannedLines();
|
||||
} else {
|
||||
this.queuedReload = true;
|
||||
}
|
||||
} else if (event.channel == "info" && event.payload.pid == this.$route.params.project) {
|
||||
if (event.payload?.new?.key == "plan" && ("remarks" in (event.payload?.new?.value || {}))) {
|
||||
this.remarks = event.payload?.new.value.remarks;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
queuedReload (newVal, oldVal) {
|
||||
if (newVal && !oldVal && !this.loading) {
|
||||
this.getPlannedLines();
|
||||
}
|
||||
},
|
||||
|
||||
loading (newVal, oldVal) {
|
||||
if (!newVal && oldVal && this.queuedReload) {
|
||||
this.getPlannedLines();
|
||||
filter (newVal, oldVal) {
|
||||
if (newVal?.toLowerCase() != oldVal?.toLowerCase()) {
|
||||
this.fetchPlannedSequences();
|
||||
}
|
||||
},
|
||||
|
||||
@@ -630,6 +735,113 @@ export default {
|
||||
return text.join("\n");
|
||||
},
|
||||
|
||||
wxInfo (line) {
|
||||
|
||||
function atm(key) {
|
||||
return line.meta?.wx?.atmospheric?.hourly[key];
|
||||
}
|
||||
|
||||
function mar(key) {
|
||||
return line.meta?.wx?.marine?.hourly[key];
|
||||
}
|
||||
|
||||
const code = atm("weathercode");
|
||||
|
||||
const description = this.weathercode[code]?.description ?? `WMO code ${code}`;
|
||||
const wind_speed = Math.round(atm("windspeed_10m"));
|
||||
const wind_direction = String(Math.round(atm("winddirection_10m"))).padStart(3, "0");
|
||||
const pressure = Math.round(atm("surface_pressure"));
|
||||
const temperature = Math.round(atm("temperature_2m"));
|
||||
const humidity = atm("relativehumidity_2m");
|
||||
const precipitation = atm("precipitation");
|
||||
const precipitation_probability = atm("precipitation_probability");
|
||||
const precipitation_str = precipitation_probability
|
||||
? `\nPrecipitation ${precipitation} mm (prob. ${precipitation_probability}%)`
|
||||
: ""
|
||||
|
||||
const wave_height = mar("wave_height").toFixed(1);
|
||||
const wave_direction = mar("wave_direction");
|
||||
const wave_period = mar("wave_period");
|
||||
|
||||
return `${description}\n${temperature}° C\n${pressure} hPa\nWind ${wind_speed} kt ${wind_direction}°\nRelative humidity ${humidity}%${precipitation_str}\nWaves ${wave_height} m ${wave_direction}° @ ${wave_period} s`;
|
||||
},
|
||||
|
||||
wxIcon (line) {
|
||||
const code = line.meta?.wx?.atmospheric?.hourly?.weathercode;
|
||||
|
||||
return this.weathercode[code]?.icon ?? "mdi-help";
|
||||
|
||||
},
|
||||
|
||||
async wxQuery (line) {
|
||||
function midpoint(line) {
|
||||
// WARNING Fails if across the antimeridian
|
||||
const longitude = (line.geometry.coordinates[0][0] + line.geometry.coordinates[1][0])/2;
|
||||
const latitude = (line.geometry.coordinates[0][1] + line.geometry.coordinates[1][1])/2;
|
||||
return [ longitude, latitude ];
|
||||
}
|
||||
|
||||
function extract (fcst) {
|
||||
const τ = (line.ts0.valueOf() + line.ts1.valueOf()) / 2000;
|
||||
const [idx, ε] = fcst?.hourly?.time?.reduce( (acc, cur, idx) => {
|
||||
const δ = Math.abs(cur - τ);
|
||||
const retval = acc
|
||||
? acc[1] < δ
|
||||
? acc
|
||||
: [ idx, δ ]
|
||||
: [ idx, δ ];
|
||||
|
||||
return retval;
|
||||
});
|
||||
|
||||
if (idx) {
|
||||
const hourly = {};
|
||||
for (let key in fcst?.hourly) {
|
||||
fcst.hourly[key] = fcst.hourly[key][idx];
|
||||
}
|
||||
}
|
||||
|
||||
return fcst;
|
||||
}
|
||||
|
||||
async function fetch_atmospheric (opts) {
|
||||
const { longitude, latitude, dt0, dt1 } = opts;
|
||||
|
||||
const url = `https://api.open-meteo.com/v1/forecast?latitude=${latitude}&longitude=${longitude}&hourly=temperature_2m,relativehumidity_2m,precipitation_probability,precipitation,weathercode,pressure_msl,surface_pressure,windspeed_10m,winddirection_10m&daily=uv_index_max&windspeed_unit=kn&timeformat=unixtime&timezone=GMT&start_date=${dt0}&end_date=${dt1}&format=json`;
|
||||
const init = {};
|
||||
const res = await fetch (url, init);
|
||||
if (res?.ok) {
|
||||
const data = await res.json();
|
||||
|
||||
return extract(data);
|
||||
}
|
||||
}
|
||||
|
||||
async function fetch_marine (opts) {
|
||||
const { longitude, latitude, dt0, dt1 } = opts;
|
||||
const url = `https://marine-api.open-meteo.com/v1/marine?latitude=${latitude}&longitude=${longitude}&hourly=wave_height,wave_direction,wave_period&timeformat=unixtime&timezone=GMT&start_date=${dt0}&end_date=${dt1}&format=json`;
|
||||
|
||||
const init = {};
|
||||
const res = await fetch (url, init);
|
||||
if (res?.ok) {
|
||||
const data = await res.json();
|
||||
|
||||
return extract(data);
|
||||
}
|
||||
}
|
||||
|
||||
if (line) {
|
||||
const [ longitude, latitude ] = midpoint(line);
|
||||
const dt0 = line.ts0.toISOString().substr(0, 10);
|
||||
const dt1 = line.ts1.toISOString().substr(0, 10);
|
||||
|
||||
return {
|
||||
atmospheric: await fetch_atmospheric({longitude, latitude, dt0, dt1}),
|
||||
marine: await fetch_marine({longitude, latitude, dt0, dt1})
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
lagAfter (item) {
|
||||
const pos = this.items.indexOf(item)+1;
|
||||
if (pos != 0) {
|
||||
@@ -662,7 +874,6 @@ export default {
|
||||
const url = `/project/${this.$route.params.project}/plan/${this.contextMenuItem.sequence}`;
|
||||
const init = {method: "DELETE"};
|
||||
await this.api([url, init]);
|
||||
await this.getPlannedLines();
|
||||
},
|
||||
|
||||
editItem (item, key, value) {
|
||||
@@ -714,18 +925,6 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
async getPlannedLines () {
|
||||
|
||||
const url = `/project/${this.$route.params.project}/plan`;
|
||||
|
||||
this.queuedReload = false;
|
||||
this.items = await this.api([url]) || [];
|
||||
for (const item of this.items) {
|
||||
item.ts0 = new Date(item.ts0);
|
||||
item.ts1 = new Date(item.ts1);
|
||||
}
|
||||
},
|
||||
|
||||
async getPlannerConfig () {
|
||||
const url = `/project/${this.$route.params.project}/configuration/planner`;
|
||||
this.plannerConfig = await this.api([url]) || {
|
||||
@@ -736,14 +935,15 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
async getPlannerRemarks () {
|
||||
const url = `/project/${this.$route.params.project}/info/plan/remarks`;
|
||||
this.remarks = await this.api([url]) || "";
|
||||
},
|
||||
|
||||
async getSequences () {
|
||||
const url = `/project/${this.$route.params.project}/sequence`;
|
||||
this.sequences = await this.api([url]) || [];
|
||||
async fetchPlannedSequences (opts = {}) {
|
||||
const options = {
|
||||
text: this.filter,
|
||||
...this.options
|
||||
};
|
||||
const res = await this.getPlannedSequences([this.$route.params.project, options]);
|
||||
this.items = res.sequences;
|
||||
this.sequenceCount = res.count;
|
||||
this.remarks = this.planRemarks;
|
||||
},
|
||||
|
||||
setActiveItem (item) {
|
||||
@@ -752,13 +952,12 @@ export default {
|
||||
: item;
|
||||
},
|
||||
|
||||
...mapActions(["api", "showSnack"])
|
||||
...mapActions(["api", "showSnack", "getPlannedSequences"])
|
||||
},
|
||||
|
||||
async mounted () {
|
||||
await this.getPlannerConfig();
|
||||
this.getPlannedLines();
|
||||
this.getPlannerRemarks();
|
||||
await this.fetchPlannedSequences();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,18 +1,25 @@
|
||||
<template>
|
||||
<v-container fluid fill-height class="ma-0 pa-0">
|
||||
<v-row no-gutters align="stretch" class="fill-height">
|
||||
<v-col cols="12">
|
||||
<v-col cols="12" v-if="projectFound">
|
||||
<!-- Show component here according to selected route -->
|
||||
<keep-alive>
|
||||
<router-view :key="$route.path"></router-view>
|
||||
</keep-alive>
|
||||
</v-col>
|
||||
<v-col cols="12" v-else>
|
||||
<v-card>
|
||||
<v-card-text>
|
||||
Project does not exist.
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</v-col>
|
||||
</v-row>
|
||||
</v-container>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions } from 'vuex'
|
||||
import { mapActions, mapGetters } from 'vuex'
|
||||
|
||||
export default {
|
||||
name: 'Project',
|
||||
@@ -24,12 +31,53 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
projectFound () {
|
||||
return this.loading || this.projectId;
|
||||
},
|
||||
|
||||
...mapGetters(["loading", "projectId", "projectSchema", "serverEvent"])
|
||||
},
|
||||
|
||||
watch: {
|
||||
async serverEvent (event) {
|
||||
if (event.channel == "project" && event.payload?.operation == "DELETE" && event.payload?.schema == "public") {
|
||||
// Project potentially deleted
|
||||
await this.getProject(this.$route.params.project);
|
||||
} else if (event.payload?.schema == this.projectSchema) {
|
||||
if (event.channel == "event") {
|
||||
this.refreshEvents();
|
||||
} else if (event.channel == "planned_lines") {
|
||||
this.refreshPlan();
|
||||
} else if (["raw_lines", "final_lines", "final_shots"].includes(event.channel)) {
|
||||
this.refreshSequences();
|
||||
} else if (["preplot_lines", "preplot_points"].includes(event.channel)) {
|
||||
this.refreshLines();
|
||||
} else if (event.channel == "info") {
|
||||
if ((event.payload?.new ?? event.payload?.old)?.key == "plan") {
|
||||
this.refreshPlan();
|
||||
}
|
||||
} else if (event.channel == "project") {
|
||||
this.getProject(this.$route.params.project);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
...mapActions(["getProject"])
|
||||
...mapActions(["getProject", "refreshLines", "refreshSequences", "refreshEvents", "refreshLabels", "refreshPlan"])
|
||||
},
|
||||
|
||||
async mounted () {
|
||||
await this.getProject(this.$route.params.project);
|
||||
if (this.projectFound) {
|
||||
this.refreshLines();
|
||||
this.refreshSequences();
|
||||
this.refreshEvents();
|
||||
this.refreshLabels();
|
||||
this.refreshPlan();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -83,12 +83,22 @@ export default {
|
||||
},
|
||||
|
||||
computed: {
|
||||
...mapGetters(['loading'])
|
||||
...mapGetters(['loading', 'serverEvent'])
|
||||
},
|
||||
|
||||
watch: {
|
||||
async serverEvent (event) {
|
||||
if (event.channel == "project" && event.payload?.schema == "public") {
|
||||
if (event.payload?.operation == "DELETE" || event.payload?.operation == "INSERT") {
|
||||
await this.load();
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
async list () {
|
||||
this.items = await this.api(["/project/"]) || [];
|
||||
this.items = await this.api(["/project"]) || [];
|
||||
},
|
||||
|
||||
async summary (item) {
|
||||
|
||||
@@ -50,16 +50,6 @@
|
||||
<v-col col="12" sm="6">
|
||||
<p>QC checks done on {{updatedOn}}.</p>
|
||||
</v-col>
|
||||
<v-col class="text-right">
|
||||
<div v-if="isDirty">
|
||||
<v-btn @click="saveLabels" small color="primary" class="mx-2">
|
||||
Save <v-icon right>mdi-content-save</v-icon>
|
||||
</v-btn>
|
||||
<v-btn @click="getQCData" small color="warning" outlined class="mx-2">
|
||||
Cancel <v-icon right>mdi-restore-alert</v-icon>
|
||||
</v-btn>
|
||||
</div>
|
||||
</v-col>
|
||||
</v-row>
|
||||
|
||||
<v-treeview
|
||||
@@ -98,39 +88,11 @@
|
||||
{{label}}
|
||||
</v-chip>
|
||||
|
||||
<template v-if="!item.labels || !item.labels.includes('QCAccepted')">
|
||||
<v-hover v-slot:default="{hover}" v-if="writeaccess">
|
||||
<span v-if="item.children && item.children.length">
|
||||
<v-btn
|
||||
:class="{'text--disabled': !hover}"
|
||||
icon
|
||||
small
|
||||
color="primary"
|
||||
title="Accept all"
|
||||
@click.stop="accept(item)">
|
||||
<v-icon small :color="accepted(item) ? 'green' : ''">mdi-check-all</v-icon>
|
||||
</v-btn>
|
||||
<v-btn
|
||||
:class="{'text--disabled': !hover}"
|
||||
icon
|
||||
small
|
||||
color="primary"
|
||||
title="Restore all"
|
||||
@click.stop="unaccept(item)">
|
||||
<v-icon small>mdi-restore</v-icon>
|
||||
</v-btn>
|
||||
</span>
|
||||
<v-btn v-else
|
||||
:class="{'text--disabled': !hover}"
|
||||
icon
|
||||
small
|
||||
color="primary"
|
||||
title="Accept this value"
|
||||
@click="accept(item)">
|
||||
<v-icon small :color="(item.children && item.children.length == 0)? 'green':''">mdi-check</v-icon>
|
||||
</v-btn>
|
||||
</v-hover>
|
||||
</template>
|
||||
<dougal-qc-acceptance v-if="writeaccess"
|
||||
:item="item"
|
||||
@accept="accept"
|
||||
@unaccept="unaccept"
|
||||
></dougal-qc-acceptance>
|
||||
|
||||
</div>
|
||||
<div :title="item.remarks" @dblclick.stop.prevent="toggleChildren(item)" v-else-if="item._kind=='sequence'">
|
||||
@@ -142,8 +104,21 @@
|
||||
v-text="itemCount(item)"
|
||||
>
|
||||
</v-chip>
|
||||
|
||||
<dougal-qc-acceptance v-if="writeaccess"
|
||||
:item="item"
|
||||
@accept="accept"
|
||||
@unaccept="unaccept"
|
||||
></dougal-qc-acceptance>
|
||||
|
||||
</div>
|
||||
<div class="text--secondary" v-else>
|
||||
<dougal-qc-acceptance v-if="writeaccess"
|
||||
:item="item"
|
||||
@accept="accept"
|
||||
@unaccept="unaccept"
|
||||
></dougal-qc-acceptance>
|
||||
|
||||
{{item._text}}
|
||||
</div>
|
||||
</template>
|
||||
@@ -164,10 +139,15 @@
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import { withParentProps } from '@/lib/utils';
|
||||
import DougalQcAcceptance from '@/components/qc-acceptance';
|
||||
|
||||
export default {
|
||||
name: "QC",
|
||||
|
||||
components: {
|
||||
DougalQcAcceptance
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
updatedOn: null,
|
||||
@@ -179,8 +159,7 @@ export default {
|
||||
selectedSequences: null,
|
||||
multiple: false,
|
||||
autoexpand: false,
|
||||
itemIndex: 0,
|
||||
isDirty: false
|
||||
itemIndex: 0
|
||||
}
|
||||
},
|
||||
|
||||
@@ -283,44 +262,26 @@ export default {
|
||||
return sum;
|
||||
},
|
||||
|
||||
accepted (item) {
|
||||
if (item._children) {
|
||||
return item._children.every(child => this.accepted(child));
|
||||
}
|
||||
async accept (items) {
|
||||
const url = `/project/${this.$route.params.project}/qc/results/accept`;
|
||||
await this.api([url, {
|
||||
method: "POST",
|
||||
body: items.map(i => i.id)
|
||||
}]);
|
||||
|
||||
if (item.labels) {
|
||||
return item.labels.includes("QCAccepted");
|
||||
}
|
||||
return false;
|
||||
// The open/closed state of the tree branches should stay the same, unless
|
||||
// the tree structure itself has changed in the meanwhile.
|
||||
await this.getQCData();
|
||||
},
|
||||
|
||||
accept (item) {
|
||||
if (item._children) {
|
||||
for (const child of item._children) {
|
||||
this.accept(child);
|
||||
}
|
||||
return;
|
||||
}
|
||||
async unaccept (items) {
|
||||
const url = `/project/${this.$route.params.project}/qc/results/unaccept`;
|
||||
await this.api([url, {
|
||||
method: "POST",
|
||||
body: items.map(i => i.id)
|
||||
}]);
|
||||
|
||||
if (!item.labels) {
|
||||
this.$set(item, "labels", []);
|
||||
}
|
||||
item.labels.includes("QCAccepted") || item.labels.push("QCAccepted");
|
||||
this.isDirty = true;
|
||||
},
|
||||
|
||||
unaccept (item) {
|
||||
if (item._children) {
|
||||
for (const child of item._children) {
|
||||
this.unaccept(child);
|
||||
}
|
||||
return;
|
||||
}
|
||||
const i = item.labels.indexOf("QCAccepted");
|
||||
if (i != -1) {
|
||||
item.labels.splice(i, 1);
|
||||
this.isDirty = true;
|
||||
}
|
||||
await this.getQCData();
|
||||
},
|
||||
|
||||
async getQCLabels () {
|
||||
@@ -375,16 +336,6 @@ export default {
|
||||
await Promise.all(promises);
|
||||
},
|
||||
|
||||
async saveLabels () {
|
||||
const url = `/project/${this.$route.params.project}/meta`;
|
||||
|
||||
const res = await this.api([url, {
|
||||
method: "PUT",
|
||||
body: this.resultObjects.filter(r => typeof r.value !== "undefined")
|
||||
}]);
|
||||
this.isDirty = false;
|
||||
},
|
||||
|
||||
filterByText(item, queryText) {
|
||||
if (!queryText || !item) return item;
|
||||
|
||||
@@ -482,10 +433,7 @@ export default {
|
||||
async getLabelDefinitions () {
|
||||
const url = `/project/${this.$route.params.project}/label`;
|
||||
|
||||
const labelSet = {};
|
||||
const labels = await this.api([url]) || [];
|
||||
labels.forEach( l => labelSet[l.name] = l.data );
|
||||
this.labels = labelSet;
|
||||
this.labels = await this.api([url]) || {};
|
||||
},
|
||||
|
||||
async getQCData () {
|
||||
@@ -495,6 +443,7 @@ export default {
|
||||
const res = await this.api([url]);
|
||||
|
||||
if (res) {
|
||||
this.itemIndex = 0;
|
||||
this.items = res.map(i => this.transform(i)) || [];
|
||||
this.updatedOn = res.updatedOn;
|
||||
await this.getQCLabels();
|
||||
@@ -503,7 +452,6 @@ export default {
|
||||
this.updatedOn = null;
|
||||
}
|
||||
|
||||
this.isDirty = false;
|
||||
},
|
||||
|
||||
...mapActions(["api"])
|
||||
|
||||
@@ -148,15 +148,16 @@
|
||||
:headers="headers"
|
||||
:items="items"
|
||||
:items-per-page.sync="itemsPerPage"
|
||||
:server-items-length="sequenceCount"
|
||||
item-key="sequence"
|
||||
:server-items-length="num_rows"
|
||||
:search="filter"
|
||||
:custom-filter="customFilter"
|
||||
:loading="loading"
|
||||
:fixed-header="true"
|
||||
:footer-props='{itemsPerPageOptions: [ 10, 25, 50, 100, 500, -1 ]}'
|
||||
show-expand
|
||||
:item-class="(item) => activeItem == item ? 'blue accent-1 elevation-3' : ''"
|
||||
:search="filter"
|
||||
x-custom-filter="customFilter"
|
||||
:loading="sequencesLoading"
|
||||
:options.sync="options"
|
||||
fixed-header
|
||||
:footer-props='{itemsPerPageOptions: [ 10, 25, 50, 100, 500, -1 ], showFirstLastPage: true}'
|
||||
show-expand
|
||||
@click:row="setActiveItem"
|
||||
@contextmenu:row="contextMenu"
|
||||
>
|
||||
@@ -176,7 +177,7 @@
|
||||
icon
|
||||
small
|
||||
title="Cancel edit"
|
||||
:disabled="loading"
|
||||
:disabled="sequencesLoading"
|
||||
@click="edit.value = item.remarks; edit = null"
|
||||
>
|
||||
<v-icon small>mdi-close</v-icon>
|
||||
@@ -185,7 +186,7 @@
|
||||
icon
|
||||
small
|
||||
title="Save edits"
|
||||
:disabled="loading"
|
||||
:disabled="sequencesLoading"
|
||||
@click="edit = null"
|
||||
>
|
||||
<v-icon small>mdi-content-save-edit-outline</v-icon>
|
||||
@@ -196,7 +197,7 @@
|
||||
icon
|
||||
small
|
||||
title="Edit"
|
||||
:disabled="loading"
|
||||
:disabled="sequencesLoading"
|
||||
@click="editItem(item, 'remarks')"
|
||||
>
|
||||
<v-icon small>mdi-square-edit-outline</v-icon>
|
||||
@@ -210,7 +211,7 @@
|
||||
class="markdown"
|
||||
autofocus
|
||||
placeholder="Enter your text here"
|
||||
:disabled="loading"
|
||||
:disabled="sequencesLoading"
|
||||
v-model="edit.value"
|
||||
>
|
||||
</v-textarea>
|
||||
@@ -228,7 +229,7 @@
|
||||
icon
|
||||
small
|
||||
title="Cancel edit"
|
||||
:disabled="loading"
|
||||
:disabled="sequencesLoading"
|
||||
@click="edit.value = item.remarks_final; edit = null"
|
||||
>
|
||||
<v-icon small>mdi-close</v-icon>
|
||||
@@ -237,7 +238,7 @@
|
||||
icon
|
||||
small
|
||||
title="Save edits"
|
||||
:disabled="loading"
|
||||
:disabled="sequencesLoading"
|
||||
@click="edit = null"
|
||||
>
|
||||
<v-icon small>mdi-content-save-edit-outline</v-icon>
|
||||
@@ -248,7 +249,7 @@
|
||||
icon
|
||||
small
|
||||
title="Edit"
|
||||
:disabled="loading"
|
||||
:disabled="sequencesLoading"
|
||||
@click="editItem(item, 'remarks_final')"
|
||||
>
|
||||
<v-icon small>mdi-square-edit-outline</v-icon>
|
||||
@@ -262,7 +263,7 @@
|
||||
class="markdown"
|
||||
autofocus
|
||||
placeholder="Enter your text here"
|
||||
:disabled="loading"
|
||||
:disabled="sequencesLoading"
|
||||
v-model="edit.value"
|
||||
>
|
||||
</v-textarea>
|
||||
@@ -292,9 +293,13 @@
|
||||
<v-list-item v-for="(path, index) in item.raw_files"
|
||||
key="index"
|
||||
link
|
||||
title="View the shot log"
|
||||
title="Download file"
|
||||
:href="`/api/files${path}`"
|
||||
>
|
||||
{{ basename(path) }}
|
||||
<v-list-item-action>
|
||||
<v-icon right small>mdi-cloud-download</v-icon>
|
||||
</v-list-item-action>
|
||||
</v-list-item>
|
||||
</v-list-group>
|
||||
<v-list-group value="true" v-if="item.final_files">
|
||||
@@ -308,10 +313,13 @@
|
||||
</template>
|
||||
<v-list-item v-for="(path, index) in item.final_files"
|
||||
key="index"
|
||||
link
|
||||
title="View the shot log"
|
||||
title="Download file"
|
||||
:href="`/api/files${path}`"
|
||||
>
|
||||
{{ basename(path) }}
|
||||
<v-list-item-action>
|
||||
<v-icon right small>mdi-cloud-download</v-icon>
|
||||
</v-list-item-action>
|
||||
</v-list-item>
|
||||
</v-list-group>
|
||||
</v-list>
|
||||
@@ -329,6 +337,18 @@
|
||||
title="View the event log for this sequence">{{value}}</a>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.line="{value}">
|
||||
<b>{{value}}</b>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.fsp_final="{value}">
|
||||
<b v-if="value">{{value}}</b>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.lsp_final="{value}">
|
||||
<b v-if="value">{{value}}</b>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.status="{value, item}">
|
||||
<span :class="{'success--text': value=='final', 'warning--text': value=='raw', 'error--text': value=='ntbp'}">
|
||||
{{ value == "final" ? "Processed" : value == "raw" ? item.raw_files ? "Acquired" : "In acquisition" : value == "ntbp" ? "NTBP" : `Unknown (${status})` }}
|
||||
@@ -368,7 +388,7 @@
|
||||
</template>
|
||||
|
||||
<template v-slot:item.duration_final="{item: {duration_final: value}}">
|
||||
{{
|
||||
<b>{{
|
||||
value
|
||||
?
|
||||
"" +
|
||||
@@ -379,7 +399,7 @@
|
||||
":" + String(value.minutes || 0).padStart(2, "0") +
|
||||
":" + String(value.seconds || 0).padStart(2, "0")
|
||||
: "N/A"
|
||||
}}
|
||||
}}</b>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.ts0="{value}">
|
||||
@@ -395,15 +415,15 @@
|
||||
</template>
|
||||
|
||||
<template v-slot:item.ts0_final="{value}">
|
||||
<span v-if="value">
|
||||
<b v-if="value">
|
||||
{{ value.replace(/(.{10})T(.{8}).{4}Z$/, "$1 $2") }}
|
||||
</span>
|
||||
</b>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.ts1_final="{value}">
|
||||
<span v-if="value">
|
||||
<b v-if="value">
|
||||
{{ value.replace(/(.{10})T(.{8}).{4}Z$/, "$1 $2") }}
|
||||
</span>
|
||||
</b>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.missing_shots="{value}">
|
||||
@@ -547,7 +567,7 @@ export default {
|
||||
items: [],
|
||||
filter: "",
|
||||
options: {},
|
||||
num_rows: null,
|
||||
sequenceCount: null,
|
||||
activeItem: null,
|
||||
edit: null, // {sequence, key, value}
|
||||
queuedReload: false,
|
||||
@@ -574,17 +594,22 @@ export default {
|
||||
return this.queuedItems.find(i => i.payload.sequence == this.contextMenuItem.sequence);
|
||||
},
|
||||
|
||||
...mapGetters(['user', 'writeaccess', 'loading', 'serverEvent'])
|
||||
...mapGetters(['user', 'writeaccess', 'sequencesLoading', 'sequences'])
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
options: {
|
||||
handler () {
|
||||
this.getSequences();
|
||||
this.fetchSequences();
|
||||
},
|
||||
deep: true
|
||||
},
|
||||
|
||||
async sequences () {
|
||||
await this.fetchSequences();
|
||||
},
|
||||
|
||||
async edit (newVal, oldVal) {
|
||||
if (newVal === null && oldVal !== null) {
|
||||
const item = this.items.find(i => i.sequence == oldVal.sequence);
|
||||
@@ -598,39 +623,9 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
async serverEvent (event) {
|
||||
const subscriptions = ["raw_lines", "final_lines", "final_shots"];
|
||||
if (subscriptions.includes(event.channel) && event.payload.pid == this.$route.params.project) {
|
||||
if (!this.loading && !this.queuedReload) {
|
||||
// Do not force a non-cached response if refreshing as a result
|
||||
// of an event notification. We will assume that the server has
|
||||
// already had time to update the cache by the time our request
|
||||
// gets back to it.
|
||||
this.getSequences();
|
||||
} else {
|
||||
this.queuedReload = true;
|
||||
}
|
||||
} else if (event.channel == "queue_items") {
|
||||
const project =
|
||||
event.payload?.project ??
|
||||
event.payload?.new?.payload?.project ??
|
||||
event.payload?.old?.payload?.project;
|
||||
|
||||
if (project == this.$route.params.project) {
|
||||
this.getQueuedItems();
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
queuedReload (newVal, oldVal) {
|
||||
if (newVal && !oldVal && !this.loading) {
|
||||
this.getSequences();
|
||||
}
|
||||
},
|
||||
|
||||
loading (newVal, oldVal) {
|
||||
if (!newVal && oldVal && this.queuedReload) {
|
||||
this.getSequences();
|
||||
filter (newVal, oldVal) {
|
||||
if (newVal?.toLowerCase() != oldVal?.toLowerCase()) {
|
||||
this.fetchSequences();
|
||||
}
|
||||
},
|
||||
|
||||
@@ -799,19 +794,14 @@ export default {
|
||||
this.num_rows = projectInfo.sequences;
|
||||
},
|
||||
|
||||
async getSequences () {
|
||||
|
||||
const query = new URLSearchParams(this.options);
|
||||
query.set("filter", this.filter);
|
||||
query.set("files", true);
|
||||
if (this.options.itemsPerPage < 0) {
|
||||
query.delete("itemsPerPage");
|
||||
}
|
||||
const url = `/project/${this.$route.params.project}/sequence?${query.toString()}`;
|
||||
|
||||
this.queuedReload = false;
|
||||
this.items = await this.api([url]) || [];
|
||||
|
||||
async fetchSequences (opts = {}) {
|
||||
const options = {
|
||||
text: this.filter,
|
||||
...this.options
|
||||
};
|
||||
const res = await this.getSequences([this.$route.params.project, options]);
|
||||
this.items = res.sequences;
|
||||
this.sequenceCount = res.count;
|
||||
},
|
||||
|
||||
async getQueuedItems () {
|
||||
@@ -859,11 +849,11 @@ export default {
|
||||
return false;
|
||||
},
|
||||
|
||||
...mapActions(["api", "showSnack"])
|
||||
...mapActions(["api", "showSnack", "getSequences"])
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.getSequences();
|
||||
this.fetchSequences();
|
||||
this.getNumLines();
|
||||
this.getQueuedItems();
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ module.exports = {
|
||||
"leaflet-arrowheads"
|
||||
],
|
||||
devServer: {
|
||||
host: "0.0.0.0",
|
||||
proxy: {
|
||||
"^/api(/|$)": {
|
||||
target: "http://localhost:3000",
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
|
||||
const http = require('http');
|
||||
const express = require('express');
|
||||
express.yaml ??= require('body-parser').yaml; // NOTE: Use own customised body-parser
|
||||
const cookieParser = require('cookie-parser')
|
||||
|
||||
const maybeSendAlert = require("../lib/alerts");
|
||||
const mw = require('./middleware');
|
||||
|
||||
const { ERROR, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
const verbose = process.env.NODE_ENV != 'test';
|
||||
const app = express();
|
||||
app.locals.version = "0.3.0"; // API version
|
||||
app.locals.version = "0.4.0"; // API version
|
||||
|
||||
app.map = function(a, route){
|
||||
route = route || '';
|
||||
@@ -22,7 +24,7 @@ app.map = function(a, route){
|
||||
} // else drop through
|
||||
// get: function(){ ... }
|
||||
case 'function':
|
||||
if (verbose) console.log('%s %s', key, route);
|
||||
if (verbose) INFO('%s %s', key, route);
|
||||
app[key](route, a[key]);
|
||||
break;
|
||||
}
|
||||
@@ -30,6 +32,7 @@ app.map = function(a, route){
|
||||
};
|
||||
|
||||
app.use(express.json({type: "application/json", strict: false, limit: '10mb'}));
|
||||
app.use(express.yaml({type: "application/yaml", limit: '10mb'}));
|
||||
app.use(express.urlencoded({ type: "application/x-www-form-urlencoded", extended: true }));
|
||||
app.use(express.text({type: "text/*", limit: '10mb'}));
|
||||
app.use((req, res, next) => {
|
||||
@@ -80,16 +83,25 @@ app.map({
|
||||
|
||||
app.use(mw.auth.authentify);
|
||||
|
||||
// Don't process the request if the data hasn't changed
|
||||
app.use(mw.etag.ifNoneMatch);
|
||||
|
||||
// We must be authenticated before we can access these
|
||||
app.map({
|
||||
'/project': {
|
||||
get: [ mw.project.list ], // Get list of projects
|
||||
get: [ mw.project.get ], // Get list of projects
|
||||
post: [ mw.auth.access.admin, mw.project.post ], // Create a new project
|
||||
},
|
||||
'/project/:project': {
|
||||
get: [ mw.project.get ], // Get project data
|
||||
get: [ mw.project.summary.get ], // Get project data
|
||||
delete: [ mw.auth.access.admin, mw.project.delete ], // Delete a project (only if empty)
|
||||
},
|
||||
'/project/:project/summary': {
|
||||
get: [ mw.project.get ],
|
||||
get: [ mw.project.summary.get ],
|
||||
},
|
||||
'/project/:project/configuration': {
|
||||
get: [ mw.project.configuration.get ], // Get project configuration
|
||||
patch: [ mw.auth.access.admin, mw.project.configuration.patch ], // Modify project configuration
|
||||
},
|
||||
|
||||
/*
|
||||
@@ -97,19 +109,25 @@ app.map({
|
||||
*/
|
||||
|
||||
'/project/:project/gis': {
|
||||
get: [ mw.gis.project.bbox ]
|
||||
get: [ mw.etag.noSave, mw.gis.project.bbox ]
|
||||
},
|
||||
'/project/:project/gis/preplot': {
|
||||
get: [ mw.gis.project.preplot ]
|
||||
get: [ mw.etag.noSave, mw.gis.project.preplot ]
|
||||
},
|
||||
'/project/:project/gis/preplot/:featuretype(line|point)': {
|
||||
get: [ mw.gis.project.preplot ]
|
||||
get: [ mw.etag.noSave, mw.gis.project.preplot ]
|
||||
},
|
||||
'/project/:project/gis/raw/:featuretype(line|point)': {
|
||||
get: [ mw.gis.project.raw ]
|
||||
get: [ mw.etag.noSave, mw.gis.project.raw ]
|
||||
},
|
||||
'/project/:project/gis/final/:featuretype(line|point)': {
|
||||
get: [ mw.gis.project.final ]
|
||||
get: [ mw.etag.noSave, mw.gis.project.final ]
|
||||
},
|
||||
'/project/:project/gis/layer': {
|
||||
get: [ mw.etag.noSave, mw.gis.project.layer.get ]
|
||||
},
|
||||
'/project/:project/gis/layer/:name': {
|
||||
get: [ mw.etag.noSave, mw.gis.project.layer.get ]
|
||||
},
|
||||
|
||||
/*
|
||||
@@ -163,6 +181,9 @@ app.map({
|
||||
post: [ mw.auth.access.write, mw.event.post ],
|
||||
put: [ mw.auth.access.write, mw.event.put ],
|
||||
delete: [ mw.auth.access.write, mw.event.delete ],
|
||||
'changes/:since': {
|
||||
get: [ mw.event.changes ]
|
||||
},
|
||||
// TODO Rename -/:sequence → sequence/:sequence
|
||||
'-/:sequence/': { // NOTE: We need to avoid conflict with the next endpoint ☹
|
||||
get: [ mw.event.sequence.get ],
|
||||
@@ -182,17 +203,25 @@ app.map({
|
||||
'/project/:project/qc': {
|
||||
'/results': {
|
||||
// Get all QC results for :project
|
||||
get: [ mw.qc.results.get ],
|
||||
get: [ mw.etag.noSave, mw.qc.results.get ],
|
||||
|
||||
// Delete all QC results for :project
|
||||
delete: [ mw.auth.access.write, mw.qc.results.delete ],
|
||||
delete: [ mw.etag.noSave, mw.auth.access.write, mw.qc.results.delete ],
|
||||
|
||||
'/accept': {
|
||||
post: [ mw.etag.noSave, mw.auth.access.write, mw.qc.results.accept ]
|
||||
},
|
||||
|
||||
'/unaccept': {
|
||||
post: [ mw.etag.noSave, mw.auth.access.write, mw.qc.results.unaccept ]
|
||||
},
|
||||
|
||||
'/sequence/:sequence': {
|
||||
// Get QC results for :project, :sequence
|
||||
get: [ mw.qc.results.get ],
|
||||
get: [ mw.etag.noSave, mw.qc.results.get ],
|
||||
|
||||
// Delete QC results for :project, :sequence
|
||||
delete: [ mw.auth.access.write, mw.qc.results.delete ]
|
||||
delete: [ mw.etag.noSave, mw.auth.access.write, mw.qc.results.delete ]
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -235,10 +264,16 @@ app.map({
|
||||
// // post: [ mw.permissions.post ],
|
||||
// // delete: [ mw.permissions.delete ]
|
||||
// },
|
||||
'/project/:project/files/:path(*)': {
|
||||
get: [ mw.auth.access.write, mw.files.get ]
|
||||
},
|
||||
'/files/?:path(*)': {
|
||||
get: [ mw.auth.access.write, mw.etag.noSave, mw.files.get ]
|
||||
},
|
||||
'/navdata/': {
|
||||
get: [ mw.navdata.get ],
|
||||
get: [ mw.etag.noSave, mw.navdata.get ],
|
||||
'gis/:featuretype(line|point)': {
|
||||
get: [ mw.gis.navdata.get ]
|
||||
get: [ mw.etag.noSave, mw.gis.navdata.get ]
|
||||
}
|
||||
},
|
||||
'/info/': {
|
||||
@@ -251,12 +286,12 @@ app.map({
|
||||
},
|
||||
'/queue/outgoing/': {
|
||||
'asaqc': {
|
||||
get: [ mw.queue.asaqc.get ],
|
||||
get: [ mw.etag.noSave, mw.queue.asaqc.get ],
|
||||
post: [ mw.auth.access.write, mw.queue.asaqc.post ],
|
||||
'/project/:project': {
|
||||
get: [ mw.queue.asaqc.get ],
|
||||
get: [ mw.etag.noSave, mw.queue.asaqc.get ],
|
||||
'/sequence/:sequence': {
|
||||
get: [ mw.queue.asaqc.get ],
|
||||
get: [ mw.etag.noSave, mw.queue.asaqc.get ],
|
||||
}
|
||||
},
|
||||
'/:id': {
|
||||
@@ -280,6 +315,10 @@ app.map({
|
||||
//
|
||||
});
|
||||
|
||||
app.use(mw.etag.save);
|
||||
// Invalidate cache on database events
|
||||
mw.etag.watch(app);
|
||||
|
||||
// Generic error handler. Stops stack dumps
|
||||
// being sent to clients.
|
||||
app.use(function (err, req, res, next) {
|
||||
@@ -289,10 +328,12 @@ app.use(function (err, req, res, next) {
|
||||
const alert = {title, message, description, error: err};
|
||||
|
||||
console.log("Error:", err);
|
||||
ERROR("%O", err)
|
||||
|
||||
res.set("Content-Type", "application/json");
|
||||
if (err instanceof Error && err.name != "UnauthorizedError") {
|
||||
console.error(err.stack);
|
||||
// console.error(err.stack);
|
||||
ERROR(err.stack);
|
||||
res.set("Content-Type", "text/plain");
|
||||
res.status(500).send('General internal error');
|
||||
maybeSendAlert(alert);
|
||||
@@ -309,17 +350,16 @@ app.use(function (err, req, res, next) {
|
||||
|
||||
app.disable('x-powered-by');
|
||||
app.enable('trust proxy');
|
||||
console.log('trust proxy is ' + (app.get('trust proxy')? 'on' : 'off'));
|
||||
|
||||
const addr = "127.0.0.1";
|
||||
INFO('trust proxy is ' + (app.get('trust proxy')? 'on' : 'off'));
|
||||
|
||||
if (!module.parent) {
|
||||
var port = process.env.HTTP_PORT || 3000;
|
||||
var server = http.createServer(app).listen(port, addr);
|
||||
const port = process.env.HTTP_PORT || 3000;
|
||||
const host = process.env.HTTP_HOST || "127.0.0.1";
|
||||
var server = http.createServer(app).listen(port, host);
|
||||
|
||||
console.log('API started on port ' + port);
|
||||
INFO('API started on port ' + port);
|
||||
} else {
|
||||
app.start = function (port = 3000, path) {
|
||||
app.start = function (port = 3000, host = "127.0.0.1", path) {
|
||||
|
||||
var root = app;
|
||||
if (path) {
|
||||
@@ -328,9 +368,10 @@ if (!module.parent) {
|
||||
root.use(path, app);
|
||||
}
|
||||
|
||||
const server = http.createServer(root).listen(port, addr);
|
||||
const server = http.createServer(root).listen(port, host);
|
||||
if (server) {
|
||||
console.log(`API started on port ${port}, prefix: ${path || "/"}`);
|
||||
INFO(`API started on port ${port}, prefix: ${path || "/"}`);
|
||||
}
|
||||
return server;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const expressJWT = require('express-jwt');
|
||||
const {expressjwt: expressJWT} = require('express-jwt');
|
||||
|
||||
const cfg = require("../../../lib/config").jwt;
|
||||
|
||||
@@ -15,11 +15,12 @@ const options = {
|
||||
secret: cfg.secret,
|
||||
credentialsRequired: false,
|
||||
algorithms: ['HS256'],
|
||||
requestProperty: "user",
|
||||
getToken
|
||||
};
|
||||
|
||||
const allow = {
|
||||
path: [/\/login$/, /\/logout$/],
|
||||
path: [/\/login$/, /\/logout$/, /\/$/, /\/version$/],
|
||||
useOriginalUrl: false
|
||||
};
|
||||
|
||||
|
||||
47
lib/www/server/api/middleware/etag/cache.js
Normal file
47
lib/www/server/api/middleware/etag/cache.js
Normal file
@@ -0,0 +1,47 @@
|
||||
|
||||
const cache = {};
|
||||
|
||||
function getCache (r) {
|
||||
if (!r?.app?.locals) {
|
||||
return cache;
|
||||
}
|
||||
|
||||
if (!r.app.locals.etags) {
|
||||
r.app.locals.etags = {};
|
||||
}
|
||||
|
||||
return r.app.locals.etags;
|
||||
}
|
||||
|
||||
function isCached (req) {
|
||||
const cache = getCache(req);
|
||||
|
||||
if (req.url in cache) {
|
||||
const cached = cache[req.url];
|
||||
const etag = req.get("If-None-Match");
|
||||
if (etag && etag == cached.etag) {
|
||||
return cached;
|
||||
}
|
||||
}
|
||||
return; // undefined
|
||||
}
|
||||
|
||||
function saveResponse (res) {
|
||||
if (res?.headersSent) {
|
||||
const etag = res.get("ETag");
|
||||
if (etag && res.locals.saveEtag !== false) {
|
||||
const cache = getCache(res);
|
||||
const req = res.req;
|
||||
console.log(`Saving ETag: ${req.method} ${req.url} → ${etag}`);
|
||||
const headers = structuredClone(res.getHeaders());
|
||||
delete headers["set-cookie"];
|
||||
cache[req.url] = {etag, headers};
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
getCache,
|
||||
isCached,
|
||||
saveResponse
|
||||
};
|
||||
39
lib/www/server/api/middleware/etag/if-none-match.js
Normal file
39
lib/www/server/api/middleware/etag/if-none-match.js
Normal file
@@ -0,0 +1,39 @@
|
||||
const { isCached } = require('./cache');
|
||||
|
||||
function isIdempotentMethod (method) {
|
||||
const nonIdempotentMethods = [
|
||||
"POST", "PUT", "PATH", "DELETE"
|
||||
];
|
||||
|
||||
return !nonIdempotentMethods.includes(method.toUpperCase());
|
||||
};
|
||||
|
||||
function setHeaders(res, headers) {
|
||||
for (let [key, value] of Object.entries(headers)) {
|
||||
res.set(key, value);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
function ifNoneMatch (req, res, next) {
|
||||
|
||||
const cached = isCached(req);
|
||||
if (cached) {
|
||||
console.log("ETag match", req.url);
|
||||
setHeaders(res, cached.headers);
|
||||
if (req.method == "GET" || req.method == "HEAD") {
|
||||
res.status(304).send();
|
||||
// No next()
|
||||
} else if (!isIdempotentMethod(req.method)) {
|
||||
res.status(412).send();
|
||||
}
|
||||
} else {
|
||||
// Either we didn't have this URL in the cache, or there was
|
||||
// no If-None-Match header, or it didn't match the cached ETag.
|
||||
// We let the request proceed normally.
|
||||
next();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ifNoneMatch;
|
||||
7
lib/www/server/api/middleware/etag/index.js
Normal file
7
lib/www/server/api/middleware/etag/index.js
Normal file
@@ -0,0 +1,7 @@
|
||||
|
||||
module.exports = {
|
||||
ifNoneMatch: require('./if-none-match'),
|
||||
save: require('./save'),
|
||||
noSave: require('./no-save'),
|
||||
watch: require('./watch')
|
||||
};
|
||||
7
lib/www/server/api/middleware/etag/no-save.js
Normal file
7
lib/www/server/api/middleware/etag/no-save.js
Normal file
@@ -0,0 +1,7 @@
|
||||
|
||||
function noSave (req, res, next) {
|
||||
res.locals.saveEtag = false;
|
||||
next();
|
||||
}
|
||||
|
||||
module.exports = noSave;
|
||||
8
lib/www/server/api/middleware/etag/save.js
Normal file
8
lib/www/server/api/middleware/etag/save.js
Normal file
@@ -0,0 +1,8 @@
|
||||
const { saveResponse } = require('./cache');
|
||||
|
||||
function save (req, res, next) {
|
||||
saveResponse(res);
|
||||
next();
|
||||
}
|
||||
|
||||
module.exports = save;
|
||||
138
lib/www/server/api/middleware/etag/watch.js
Normal file
138
lib/www/server/api/middleware/etag/watch.js
Normal file
@@ -0,0 +1,138 @@
|
||||
const { getCache } = require('./cache');
|
||||
const { listen } = require('../../../lib/db/notify');
|
||||
const channels = require('../../../lib/db/channels');
|
||||
|
||||
const rels = [
|
||||
{
|
||||
channels: [ "realtime" ],
|
||||
urls: [ ]
|
||||
},
|
||||
{
|
||||
channels: [ "event" ],
|
||||
urls: [ /^\/project\/([^\/]+)\/event/ ],
|
||||
matches: [ "project" ]
|
||||
},
|
||||
{
|
||||
channels: [ "project" ],
|
||||
urls: [ /^\/project\/([^\/]+)\// ],
|
||||
matches: [ "project" ]
|
||||
},
|
||||
{
|
||||
channels: [ "preplot_lines", "preplot_points" ],
|
||||
urls: [ /^\/project\/([^\/]+)\/line[\/?]?/ ],
|
||||
matches: [ "project" ]
|
||||
},
|
||||
{
|
||||
channels: [ "planned_lines" ],
|
||||
urls: [ /^\/project\/([^\/]+)\/plan[\/?]?/ ],
|
||||
matches: [ "project" ]
|
||||
},
|
||||
{
|
||||
channels: [ "raw_lines", "raw_shots" ],
|
||||
urls: [ /^\/project\/([^\/]+)\/sequence[\/?]?/ ],
|
||||
matches: [ "project" ]
|
||||
},
|
||||
{
|
||||
channels: [ "final_lines", "final_shots" ],
|
||||
urls: [ /^\/project\/([^\/]+)\/sequence[\/?]?/ ],
|
||||
matches: [ "project" ]
|
||||
},
|
||||
{
|
||||
channels: [ "info" ],
|
||||
urls: [ ],
|
||||
matches: [ ],
|
||||
callback (url, data) {
|
||||
if (data.payload?.table == "info") {
|
||||
const pid = data.payload?.pid;
|
||||
const key = (data.payload?.new ?? data.payload?.old)?.key;
|
||||
|
||||
const rx = /^\/project\/([^\/]+)\/info\/([^\/?]+)[\/?]?/;
|
||||
const match = url.match(rx);
|
||||
if (match) {
|
||||
if (match[1] == pid) {
|
||||
if (match[2] == data.payload?.old?.key || match[2] == data.payload?.new?.key) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (key == "plan") {
|
||||
const rx = /^\/project\/([^\/]+)\/plan[\/?]?/;
|
||||
const match = url.match(rx);
|
||||
if (match) {
|
||||
return match[1] == pid;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
},
|
||||
{
|
||||
channels: [ "queue_items" ],
|
||||
urls: [ ]
|
||||
},
|
||||
]
|
||||
|
||||
function invalidateCache (data, cache) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const channel = data.channel;
|
||||
const project = data.payload.pid;
|
||||
const operation = data.payload.operation;
|
||||
const table = data.payload.table;
|
||||
const fields = { channel, project, operation, table };
|
||||
|
||||
for (let rel of rels) {
|
||||
if (rel.channels.includes(channel)) {
|
||||
for (let url of rel.urls) {
|
||||
for (let [key, data] of Object.entries(cache)) {
|
||||
const match = key.match(url)?.slice(1);
|
||||
if (match) {
|
||||
if (rel.matches) {
|
||||
if (rel.matches.every( (field, idx) => match[idx] == fields[field] )) {
|
||||
console.log("DELETE ENTRY (MATCHES)", key);
|
||||
delete cache[key];
|
||||
}
|
||||
} else {
|
||||
// Delete unconditionally
|
||||
console.log("DELETE ENTRY (UNCONDITIONAL)", key);
|
||||
delete cache[key];
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
if (rel.callback) {
|
||||
for (let key of Object.keys(cache)) {
|
||||
if (rel.callback(key, data)) {
|
||||
console.log("DELETE ENTRY (CALLBACK)", key);
|
||||
delete cache[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
|
||||
async function watch (app) {
|
||||
if (!app.locals?.etags) {
|
||||
app.locals.etags = {};
|
||||
}
|
||||
|
||||
function etagWatch (data) {
|
||||
invalidateCache(data, app.locals.etags);
|
||||
}
|
||||
try {
|
||||
const client = await listen(channels, etagWatch);
|
||||
console.log("ETag watch installed", client);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
console.log("ETag watch not installed");
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = watch;
|
||||
14
lib/www/server/api/middleware/event/changes.js
Normal file
14
lib/www/server/api/middleware/event/changes.js
Normal file
@@ -0,0 +1,14 @@
|
||||
|
||||
const { event } = require('../../../lib/db');
|
||||
|
||||
const json = async function (req, res, next) {
|
||||
try {
|
||||
const response = await event.changes(req.params.project, req.params.since, req.query);
|
||||
res.status(200).send(response);
|
||||
next();
|
||||
} catch (err) {
|
||||
next(err);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = json;
|
||||
@@ -6,5 +6,6 @@ module.exports = {
|
||||
post: require('./post'),
|
||||
put: require('./put'),
|
||||
patch: require('./patch'),
|
||||
delete: require('./delete')
|
||||
delete: require('./delete'),
|
||||
changes: require('./changes')
|
||||
}
|
||||
|
||||
85
lib/www/server/api/middleware/event/sequence/get/csv.js
Normal file
85
lib/www/server/api/middleware/event/sequence/get/csv.js
Normal file
@@ -0,0 +1,85 @@
|
||||
const { stringify } = require('csv');
|
||||
const { transform, prepare } = require('../../../../../lib/sse');
|
||||
|
||||
const json = async function (req, res, next) {
|
||||
try {
|
||||
const query = req.query;
|
||||
query.sequence = req.params.sequence;
|
||||
const {events, sequences} = await prepare(req.params.project, query);
|
||||
if ("download" in query || "d" in query) {
|
||||
const extension = "csv";
|
||||
// Get the sequence number(s) (more than one sequence can be selected)
|
||||
const seqNums = query.sequence.split(";");
|
||||
// If we've only been asked for a single sequence, get its line name
|
||||
const lineName = (sequences.find(i => i.sequence == seqNums[0]) || {})?.meta?.lineName;
|
||||
const filename = (seqNums.length == 1 && lineName)
|
||||
? `${lineName}-NavLog.${extension}`
|
||||
: `${req.params.project}-${query.sequence}.${extension}`;
|
||||
res.set("Content-Disposition", `attachment; filename="${filename}"`);
|
||||
}
|
||||
|
||||
const columns = {
|
||||
id: "id",
|
||||
unix_epoch: (row) => Math.floor(row.tstamp/1000),
|
||||
timestamp: (row) => (new Date(row.tstamp)).toISOString(),
|
||||
sequence: "sequence",
|
||||
point: "point",
|
||||
text: "remarks",
|
||||
labels: (row) => row.labels.join(";"),
|
||||
latitude: (row) => {
|
||||
if (row.meta.geometry?.type == "Point" && row.meta.geometry?.coordinates) {
|
||||
return row.meta.geometry.coordinates[1];
|
||||
}
|
||||
},
|
||||
longitude: (row) => {
|
||||
if (row.meta.geometry?.type == "Point" && row.meta.geometry?.coordinates) {
|
||||
return row.meta.geometry.coordinates[0];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let fields = [ "timestamp", "sequence", "point", "text", "labels", "latitude", "longitude", "id" ];
|
||||
|
||||
if (req.query.fields) {
|
||||
fields = req.query.fields.split(/[,;:.\s+*|]+/);
|
||||
}
|
||||
|
||||
let delimiter = req.query.delimiter || ",";
|
||||
|
||||
const stringifier = stringify({delimiter});
|
||||
stringifier.on('error', (err) => {
|
||||
console.error(err.message);
|
||||
});
|
||||
|
||||
stringifier.on('readable', () => {
|
||||
while((row = stringifier.read()) !== null) {
|
||||
res.write(row);
|
||||
}
|
||||
});
|
||||
|
||||
res.status(200);
|
||||
|
||||
if (!req.query.header || req.query.header.toLowerCase() == "true" || req.query.header == "1") {
|
||||
// Send header
|
||||
stringifier.write(fields);
|
||||
}
|
||||
|
||||
events.forEach( event => {
|
||||
stringifier.write(fields.map( field => {
|
||||
if (typeof columns[field] === "function") {
|
||||
return columns[field](event);
|
||||
} else {
|
||||
return event[columns[field]];
|
||||
}
|
||||
}));
|
||||
});
|
||||
|
||||
stringifier.end();
|
||||
res.end();
|
||||
next();
|
||||
} catch (err) {
|
||||
next(err);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = json;
|
||||
@@ -2,6 +2,7 @@ const json = require('./json');
|
||||
const geojson = require('./geojson');
|
||||
const seis = require('./seis');
|
||||
const html = require('./html');
|
||||
const csv = require('./csv');
|
||||
const pdf = require('./pdf');
|
||||
|
||||
module.exports = async function (req, res, next) {
|
||||
@@ -11,6 +12,7 @@ module.exports = async function (req, res, next) {
|
||||
"application/geo+json": geojson,
|
||||
"application/vnd.seis+json": seis,
|
||||
"text/html": html,
|
||||
"text/csv": csv,
|
||||
"application/pdf": pdf
|
||||
};
|
||||
|
||||
|
||||
0
lib/www/server/api/middleware/files/delete.js
Normal file
0
lib/www/server/api/middleware/files/delete.js
Normal file
29
lib/www/server/api/middleware/files/get.js
Normal file
29
lib/www/server/api/middleware/files/get.js
Normal file
@@ -0,0 +1,29 @@
|
||||
const files = require('../../../lib/files');
|
||||
|
||||
module.exports = async function (req, res, next) {
|
||||
|
||||
try {
|
||||
const entity = await files.get(req.params.path, req.params.project, req.query);
|
||||
if (entity) {
|
||||
if (entity.download) {
|
||||
res.download(...entity.download, (err) => next(err));
|
||||
} else {
|
||||
// Directory listing
|
||||
res.status(203).json(entity);
|
||||
next();
|
||||
}
|
||||
} else {
|
||||
throw {
|
||||
status: 404,
|
||||
code: "ENOENT"
|
||||
};
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.code == 'ENOENT') {
|
||||
res.status(404).json({message: err.code});
|
||||
} else {
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
7
lib/www/server/api/middleware/files/index.js
Normal file
7
lib/www/server/api/middleware/files/index.js
Normal file
@@ -0,0 +1,7 @@
|
||||
|
||||
module.exports = {
|
||||
get: require('./get'),
|
||||
post: require('./post'),
|
||||
put: require('./put'),
|
||||
delete: require('./delete')
|
||||
}
|
||||
0
lib/www/server/api/middleware/files/post.js
Normal file
0
lib/www/server/api/middleware/files/post.js
Normal file
0
lib/www/server/api/middleware/files/put.js
Normal file
0
lib/www/server/api/middleware/files/put.js
Normal file
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user