mirror of
https://gitlab.com/wgp/dougal/software.git
synced 2025-12-06 12:57:08 +00:00
Compare commits
360 Commits
188-adapt-
...
265-add-sh
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cf887b7852 | ||
|
|
c201229891 | ||
|
|
7ac997cd7d | ||
|
|
08e6c4a2de | ||
|
|
2c21f8f7ef | ||
|
|
a76aefe418 | ||
|
|
8d825fc53b | ||
|
|
b039a5f1fd | ||
|
|
5c1218e95e | ||
|
|
1bb5e2a41d | ||
|
|
1576b121e6 | ||
|
|
a06cdde449 | ||
|
|
121131e910 | ||
|
|
9136e9655d | ||
|
|
c646944886 | ||
|
|
0e664fc095 | ||
|
|
1498891004 | ||
|
|
89cb237f8d | ||
|
|
3386c57670 | ||
|
|
7285de5ec4 | ||
|
|
a95059f5e5 | ||
|
|
1ac81c34ce | ||
|
|
22387ba215 | ||
|
|
b77d41e952 | ||
|
|
aeecb7db7d | ||
|
|
ac9a683135 | ||
|
|
17a58f1396 | ||
|
|
b2a97a1987 | ||
|
|
f684e3e8d6 | ||
|
|
219425245f | ||
|
|
31419e860e | ||
|
|
65481d3086 | ||
|
|
d64a1fcee7 | ||
|
|
2365789d48 | ||
|
|
4c2a2617a1 | ||
|
|
5021888d03 | ||
|
|
bf633f7fdf | ||
|
|
847f49ad7c | ||
|
|
171feb9dd2 | ||
|
|
503a0de12f | ||
|
|
cf89a43f64 | ||
|
|
680e376ed1 | ||
|
|
a26974670a | ||
|
|
16a6cb59dc | ||
|
|
829e206831 | ||
|
|
83244fcd1a | ||
|
|
d9a6c77d0c | ||
|
|
b5aafe42ad | ||
|
|
025f3f774d | ||
|
|
f26e746c2b | ||
|
|
39eaf17121 | ||
|
|
1bb06938b1 | ||
|
|
851369a0b4 | ||
|
|
5065d62443 | ||
|
|
2d1e1e9532 | ||
|
|
051049581a | ||
|
|
da5ae18b0b | ||
|
|
ac9353c101 | ||
|
|
c4c5c44bf1 | ||
|
|
d3659ebf02 | ||
|
|
6b5070e634 | ||
|
|
09ff96ceee | ||
|
|
f231acf109 | ||
|
|
e576e1662c | ||
|
|
6a21ddd1cd | ||
|
|
c1e35b2459 | ||
|
|
eee2a96029 | ||
|
|
6f5e5a4d20 | ||
|
|
9e73cb7e00 | ||
|
|
d7ab4eec7c | ||
|
|
cdd96a4bc7 | ||
|
|
39a21766b6 | ||
|
|
0e33c18b5c | ||
|
|
7f411ac7dd | ||
|
|
ed1da11c9d | ||
|
|
66ec28dd83 | ||
|
|
b928d96774 | ||
|
|
73335f9c1e | ||
|
|
7b6b81dbc5 | ||
|
|
2e11c574c2 | ||
|
|
d07565807c | ||
|
|
6eccbf215a | ||
|
|
8abc05f04e | ||
|
|
8f587467f9 | ||
|
|
3d7a91c7ff | ||
|
|
3fd408074c | ||
|
|
f71cbd8f51 | ||
|
|
915df8ac16 | ||
|
|
d5ecb08a2d | ||
|
|
9388cd4861 | ||
|
|
180590b411 | ||
|
|
4ec37539bf | ||
|
|
8755fe01b6 | ||
|
|
0bfe54e0c2 | ||
|
|
29bc689b84 | ||
|
|
65682febc7 | ||
|
|
d408665d62 | ||
|
|
64fceb0a01 | ||
|
|
ab58e578c9 | ||
|
|
0e58b8fa5b | ||
|
|
99ac082f00 | ||
|
|
4d3fddc051 | ||
|
|
42456439a9 | ||
|
|
ee0c0e7308 | ||
|
|
998c272bf8 | ||
|
|
daddd1f0e8 | ||
|
|
17f20535cb | ||
|
|
0829ea3ea1 | ||
|
|
2069d9c3d7 | ||
|
|
8a2d526c50 | ||
|
|
8ad96d6f73 | ||
|
|
947faf8c05 | ||
|
|
a948556455 | ||
|
|
835384b730 | ||
|
|
c5b93794f4 | ||
|
|
056cd32f0e | ||
|
|
49bb413110 | ||
|
|
ceccc42050 | ||
|
|
aa3379e1c6 | ||
|
|
4063af0e25 | ||
|
|
d53e6060a4 | ||
|
|
85d8fc8cc0 | ||
|
|
0fe40b1839 | ||
|
|
21de4b757f | ||
|
|
96cdbb2cff | ||
|
|
d531643b58 | ||
|
|
a1779ef488 | ||
|
|
5239dece1e | ||
|
|
a7d7837816 | ||
|
|
ebcfc7df47 | ||
|
|
dc4b9002fe | ||
|
|
33618b6b82 | ||
|
|
597d407acc | ||
|
|
6162a5bdee | ||
|
|
696bbf7a17 | ||
|
|
821fcf0922 | ||
|
|
b1712d838f | ||
|
|
895b865505 | ||
|
|
5a2af5c49e | ||
|
|
24658f4017 | ||
|
|
6707cda75e | ||
|
|
1302a31b3d | ||
|
|
871a1e8f3a | ||
|
|
04e1144bab | ||
|
|
6312d94f3e | ||
|
|
ed91026319 | ||
|
|
441a4e296d | ||
|
|
c33c3f61df | ||
|
|
2cc293b724 | ||
|
|
ee129b2faa | ||
|
|
98d9b3b093 | ||
|
|
57b9b420f8 | ||
|
|
9e73f2603a | ||
|
|
707889be42 | ||
|
|
f9a70e0145 | ||
|
|
b71489cee1 | ||
|
|
0a9bde5f10 | ||
|
|
36d5862375 | ||
|
|
398c702004 | ||
|
|
b2d1798338 | ||
|
|
4f165b0c83 | ||
|
|
2c86944a51 | ||
|
|
5fc51de7d8 | ||
|
|
158e0fb788 | ||
|
|
941d15c1bc | ||
|
|
cd00f8b995 | ||
|
|
44515f8e78 | ||
|
|
54fbc76da5 | ||
|
|
c1b5196134 | ||
|
|
fb3d3be546 | ||
|
|
8e11e242ed | ||
|
|
8a815ce3ef | ||
|
|
91076a50ad | ||
|
|
e624dcdde0 | ||
|
|
a25676122c | ||
|
|
e4dfbe2c9a | ||
|
|
78fb34d049 | ||
|
|
38c4125f4f | ||
|
|
04d6cbafe3 | ||
|
|
e6319172d8 | ||
|
|
5230ff63e3 | ||
|
|
2b364bbff7 | ||
|
|
c4b330b2bb | ||
|
|
308eda6342 | ||
|
|
e8b1cb27f1 | ||
|
|
ed14fd0ced | ||
|
|
fb10e56487 | ||
|
|
56ed0cbc79 | ||
|
|
227e588782 | ||
|
|
53f2108e37 | ||
|
|
ccf4bbf547 | ||
|
|
c99a625b60 | ||
|
|
25ab623328 | ||
|
|
455888bdac | ||
|
|
b650ece0ee | ||
|
|
2cb96c0252 | ||
|
|
70cf59bb4c | ||
|
|
ec03627119 | ||
|
|
675c19f060 | ||
|
|
6721b1b96b | ||
|
|
b4f23822c4 | ||
|
|
3dd1aaeddb | ||
|
|
1e593e6d75 | ||
|
|
ddbcb90c1f | ||
|
|
229fdf20ef | ||
|
|
72e67d0e5d | ||
|
|
b26fefbc37 | ||
|
|
04e0482f60 | ||
|
|
62f90846a8 | ||
|
|
1f9c0e56fe | ||
|
|
fe9d3563a0 | ||
|
|
38a07dffc6 | ||
|
|
1a6500308f | ||
|
|
6033b45ed3 | ||
|
|
33edef6647 | ||
|
|
8f8e8b7492 | ||
|
|
ab5e3198aa | ||
|
|
60ed850d2d | ||
|
|
63b9cc5b16 | ||
|
|
f2edd2bec5 | ||
|
|
44ad59130f | ||
|
|
ecbb1e04ee | ||
|
|
7cb2c3ef49 | ||
|
|
ff4f6bfd78 | ||
|
|
fbe0cb5efa | ||
|
|
aa7cbed611 | ||
|
|
89061f6411 | ||
|
|
838883d8a3 | ||
|
|
cd196f1acd | ||
|
|
a2b894fceb | ||
|
|
c3b3a4c70f | ||
|
|
8118641231 | ||
|
|
6d8a199a3c | ||
|
|
5a44e20a5b | ||
|
|
374739c133 | ||
|
|
992205da4a | ||
|
|
f5e08c68af | ||
|
|
105fee0623 | ||
|
|
aff974c03f | ||
|
|
bada6dc2e2 | ||
|
|
d5aac5e84d | ||
|
|
3577a2ba4a | ||
|
|
04df9f41cc | ||
|
|
fdb5e0cbab | ||
|
|
4b832babfd | ||
|
|
cc3a9b4e5c | ||
|
|
da5a708760 | ||
|
|
9834e85eb9 | ||
|
|
e19601218a | ||
|
|
15c56d3f64 | ||
|
|
632dd1ee75 | ||
|
|
aeff5a491d | ||
|
|
9179c9332d | ||
|
|
bb5de9a00e | ||
|
|
d6b985fcd2 | ||
|
|
3ed8339aa3 | ||
|
|
1b925502bc | ||
|
|
7cea79a9be | ||
|
|
69f565f357 | ||
|
|
23de4d00d7 | ||
|
|
1992efe914 | ||
|
|
c7f3f565cd | ||
|
|
1da02738b0 | ||
|
|
732d8e9be6 | ||
|
|
a2bd614b17 | ||
|
|
003c833293 | ||
|
|
a4c458dc16 | ||
|
|
f7b6ca3f79 | ||
|
|
a7cce69c81 | ||
|
|
2b20a5d69f | ||
|
|
4fc5d1deda | ||
|
|
df13343063 | ||
|
|
a5603cf243 | ||
|
|
b6d4236325 | ||
|
|
7e8f00d9f2 | ||
|
|
721cfb36d1 | ||
|
|
222c951e49 | ||
|
|
45d2e56ed1 | ||
|
|
c5b6c87278 | ||
|
|
fd37e8b8d6 | ||
|
|
ce0310d0b0 | ||
|
|
546bc45861 | ||
|
|
602f2c0a34 | ||
|
|
37de5ab223 | ||
|
|
d69c6c4150 | ||
|
|
d80f44547b | ||
|
|
6c8515a879 | ||
|
|
bb9340a0af | ||
|
|
672c14fb67 | ||
|
|
f4ee798bf0 | ||
|
|
c8ef089b28 | ||
|
|
1f6d560d7e | ||
|
|
f37e07796c | ||
|
|
349c052db0 | ||
|
|
1c291db6c6 | ||
|
|
f46fd4b6bc | ||
|
|
10883eb1a6 | ||
|
|
af6e419aab | ||
|
|
6516896bae | ||
|
|
c495dce27d | ||
|
|
40d96230d2 | ||
|
|
d607b4618a | ||
|
|
fd41d2a6fa | ||
|
|
39690c991b | ||
|
|
09ead4878f | ||
|
|
588d210f24 | ||
|
|
28be86e7ff | ||
|
|
1eac97cbd0 | ||
|
|
e3a3bdb153 | ||
|
|
0e534b583c | ||
|
|
51480e52ef | ||
|
|
187807cfb1 | ||
|
|
d386b97e42 | ||
|
|
da578d2e50 | ||
|
|
7cf89d48dd | ||
|
|
c0ec8298fa | ||
|
|
68322ef562 | ||
|
|
888228c9a2 | ||
|
|
74d6f0b9a0 | ||
|
|
cf475ce2df | ||
|
|
26033b2a37 | ||
|
|
fafd4928d9 | ||
|
|
ec38fdb290 | ||
|
|
086172c5e7 | ||
|
|
3db453a271 | ||
|
|
a5db9c984b | ||
|
|
ead938b40f | ||
|
|
634a7be3f1 | ||
|
|
913606e7f1 | ||
|
|
49b7747ded | ||
|
|
1fd265cc74 | ||
|
|
13389706a9 | ||
|
|
818cd8b070 | ||
|
|
a3d3c7aea7 | ||
|
|
a592ab5f6c | ||
|
|
9b571ce34d | ||
|
|
aa2b158088 | ||
|
|
0d1f2b207c | ||
|
|
38e4e705a4 | ||
|
|
82d7036860 | ||
|
|
0727e7db69 | ||
|
|
2484b1c473 | ||
|
|
cd2e7bbd0f | ||
|
|
21d5383882 | ||
|
|
2ec484da41 | ||
|
|
648ce9970f | ||
|
|
fd278a5ee6 | ||
|
|
4f5cce33fc | ||
|
|
53bb75a2c1 | ||
|
|
45595bd64f | ||
|
|
af4d141c6a | ||
|
|
bef2be10d2 | ||
|
|
803a08a736 | ||
|
|
186615d988 | ||
|
|
666f91de18 | ||
|
|
c8ce786e39 | ||
|
|
73cb26551b | ||
|
|
d90acb1aeb | ||
|
|
17bb88faf4 | ||
|
|
a52c7e91f5 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -11,3 +11,5 @@ lib/www/client/dist/
|
||||
etc/surveys/*.yaml
|
||||
!etc/surveys/_*.yaml
|
||||
etc/ssl/*
|
||||
etc/config.yaml
|
||||
var/*
|
||||
|
||||
27
bin/check_mounts_present.py
Executable file
27
bin/check_mounts_present.py
Executable file
@@ -0,0 +1,27 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
Check if any of the directories provided in the imports.mounts configuration
|
||||
section are empty.
|
||||
|
||||
Returns 0 if all arguments are non-empty, 1 otherwise. It stops at the first
|
||||
empty directory.
|
||||
"""
|
||||
|
||||
import os
|
||||
import configuration
|
||||
|
||||
cfg = configuration.read()
|
||||
|
||||
if cfg and "imports" in cfg and "mounts" in cfg["imports"]:
|
||||
|
||||
mounts = cfg["imports"]["mounts"]
|
||||
for item in mounts:
|
||||
with os.scandir(item) as contents:
|
||||
if not any(contents):
|
||||
exit(1)
|
||||
|
||||
else:
|
||||
print("No mounts in configuration")
|
||||
|
||||
exit(0)
|
||||
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
import pathlib
|
||||
from glob import glob
|
||||
from yaml import full_load as _load
|
||||
|
||||
@@ -11,6 +12,18 @@ surveys should be under $HOME/etc/surveys/*.yaml. In both cases,
|
||||
$HOME is the home directory of the user running this script.
|
||||
"""
|
||||
|
||||
def is_relative_to(it, other):
|
||||
"""
|
||||
is_relative_to() is not present version Python 3.9, so we
|
||||
need this kludge to get Dougal to run on OpenSUSE 15.4
|
||||
"""
|
||||
|
||||
if "is_relative_to" in dir(it):
|
||||
return it.is_relative_to(other)
|
||||
|
||||
return str(it.absolute()).startswith(str(other.absolute()))
|
||||
|
||||
|
||||
prefix = os.environ.get("DOUGAL_ROOT", os.environ.get("HOME", ".")+"/software")
|
||||
|
||||
DOUGAL_ROOT = os.environ.get("DOUGAL_ROOT", os.environ.get("HOME", ".")+"/software")
|
||||
@@ -54,6 +67,10 @@ def files (globspec = None, include_archived = False):
|
||||
quickly and temporarily “disabling” a survey configuration by renaming
|
||||
the relevant file.
|
||||
"""
|
||||
|
||||
print("This method is obsolete")
|
||||
return
|
||||
|
||||
tuples = []
|
||||
|
||||
if globspec is None:
|
||||
@@ -87,3 +104,73 @@ def rxflags (flagstr):
|
||||
for flag in flagstr:
|
||||
flags |= cases.get(flag, 0)
|
||||
return flags
|
||||
|
||||
def translate_path (file):
|
||||
"""
|
||||
Translate a path from a Dougal import directory to an actual
|
||||
physical path on disk.
|
||||
|
||||
Any user files accessible by Dougal must be under a path prefixed
|
||||
by `(config.yaml).imports.paths`. The value of `imports.paths` may
|
||||
be either a string, in which case this represents the prefix under
|
||||
which all Dougal data resides, or a dictionary where the keys are
|
||||
logical paths and their values the corresponding physical path.
|
||||
"""
|
||||
cfg = read()
|
||||
root = pathlib.Path(DOUGAL_ROOT)
|
||||
filepath = pathlib.Path(file).resolve()
|
||||
import_paths = cfg["imports"]["paths"]
|
||||
|
||||
if filepath.is_absolute():
|
||||
if type(import_paths) == str:
|
||||
# Substitute the root for the real physical path
|
||||
# NOTE: `root` deals with import_paths not being absolute
|
||||
prefix = root.joinpath(pathlib.Path(import_paths)).resolve()
|
||||
return str(pathlib.Path(prefix).joinpath(*filepath.parts[2:]))
|
||||
else:
|
||||
# Look for a match on the second path element
|
||||
if filepath.parts[1] in import_paths:
|
||||
# NOTE: `root` deals with import_paths[…] not being absolute
|
||||
prefix = root.joinpath(import_paths[filepath.parts[1]])
|
||||
return str(pathlib.Path(prefix).joinpath(*filepath.parts[2:]))
|
||||
else:
|
||||
# This path is invalid
|
||||
raise TypeError("invalid path or file: {0!r}".format(filepath))
|
||||
else:
|
||||
# A relative filepath is always resolved relative to the logical root
|
||||
root = pathlib.Path("/")
|
||||
return translate_path(root.joinpath(filepath))
|
||||
|
||||
def untranslate_path (file):
|
||||
"""
|
||||
Attempt to convert a physical path into a logical one.
|
||||
See `translate_path()` above for details.
|
||||
"""
|
||||
cfg = read()
|
||||
dougal_root = pathlib.Path(DOUGAL_ROOT)
|
||||
filepath = pathlib.Path(file).resolve()
|
||||
import_paths = cfg["imports"]["paths"]
|
||||
physical_root = pathlib.Path("/")
|
||||
|
||||
if filepath.is_absolute():
|
||||
if type(import_paths) == str:
|
||||
if is_relative_to(filepath, import_paths):
|
||||
physical_root = pathlib.Path("/")
|
||||
physical_prefix = pathlib.Path(import_paths)
|
||||
return str(root.joinpath(filepath.relative_to(physical_prefix)))
|
||||
else:
|
||||
raise TypeError("invalid path or file: {0!r}".format(filepath))
|
||||
else:
|
||||
for key, value in import_paths.items():
|
||||
value = dougal_root.joinpath(value)
|
||||
physical_prefix = pathlib.Path(value)
|
||||
if is_relative_to(filepath, physical_prefix):
|
||||
logical_prefix = physical_root.joinpath(pathlib.Path(key)).resolve()
|
||||
return str(logical_prefix.joinpath(filepath.relative_to(physical_prefix)))
|
||||
|
||||
# If we got here with no matches, this is not a valid
|
||||
# Dougal data path
|
||||
raise TypeError("invalid path or file: {0!r}".format(filepath))
|
||||
else:
|
||||
# A relative filepath is always resolved relative to DOUGAL_ROOT
|
||||
return untranslate_path(root.joinpath(filepath))
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
# be known to the database.
|
||||
# * PROJECT_NAME is a more descriptive name for human consumption.
|
||||
# * EPSG_CODE is the EPSG code identifying the CRS for the grid data in the
|
||||
# navigation files, e.g., 32031.
|
||||
# navigation files, e.g., 23031.
|
||||
#
|
||||
# In addition to this, certain other parameters may be controlled via
|
||||
# environment variables:
|
||||
|
||||
26
bin/daily_tasks.py
Executable file
26
bin/daily_tasks.py
Executable file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
Do daily housekeeping on the database.
|
||||
|
||||
This is meant to run shortly after midnight every day.
|
||||
"""
|
||||
|
||||
import configuration
|
||||
from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
db.set_survey(survey["schema"])
|
||||
|
||||
print("Daily tasks")
|
||||
db.run_daily_tasks()
|
||||
|
||||
print("Done")
|
||||
127
bin/datastore.py
127
bin/datastore.py
@@ -52,7 +52,7 @@ class Datastore:
|
||||
|
||||
self.conn = psycopg2.connect(configuration.read()["db"]["connection_string"], **opts)
|
||||
|
||||
def set_autocommit(value = True):
|
||||
def set_autocommit(self, value = True):
|
||||
"""
|
||||
Enable or disable autocommit.
|
||||
|
||||
@@ -95,7 +95,7 @@ class Datastore:
|
||||
cursor.execute(qry, (filepath,))
|
||||
results = cursor.fetchall()
|
||||
if len(results):
|
||||
return (filepath, file_hash(filepath)) in results
|
||||
return (filepath, file_hash(configuration.translate_path(filepath))) in results
|
||||
|
||||
|
||||
def add_file(self, path, cursor = None):
|
||||
@@ -107,7 +107,8 @@ class Datastore:
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
hash = file_hash(path)
|
||||
realpath = configuration.translate_path(path)
|
||||
hash = file_hash(realpath)
|
||||
qry = "CALL add_file(%s, %s);"
|
||||
cur.execute(qry, (path, hash))
|
||||
if cursor is None:
|
||||
@@ -176,7 +177,7 @@ class Datastore:
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
hash = file_hash(path)
|
||||
hash = file_hash(configuration.translate_path(path))
|
||||
qry = """
|
||||
UPDATE raw_lines rl
|
||||
SET ntbp = %s
|
||||
@@ -412,7 +413,11 @@ class Datastore:
|
||||
qry = """
|
||||
INSERT INTO raw_lines (sequence, line, remarks, ntbp, incr, meta)
|
||||
VALUES (%s, %s, '', %s, %s, %s)
|
||||
ON CONFLICT DO NOTHING;
|
||||
ON CONFLICT (sequence) DO UPDATE SET
|
||||
line = EXCLUDED.line,
|
||||
ntbp = EXCLUDED.ntbp,
|
||||
incr = EXCLUDED.incr,
|
||||
meta = EXCLUDED.meta;
|
||||
"""
|
||||
|
||||
cursor.execute(qry, (fileinfo["sequence"], fileinfo["line"], ntbp, incr, json.dumps(fileinfo["meta"])))
|
||||
@@ -462,7 +467,9 @@ class Datastore:
|
||||
qry = """
|
||||
INSERT INTO final_lines (sequence, line, remarks, meta)
|
||||
VALUES (%s, %s, '', %s)
|
||||
ON CONFLICT DO NOTHING;
|
||||
ON CONFLICT (sequence) DO UPDATE SET
|
||||
line = EXCLUDED.line,
|
||||
meta = EXCLUDED.meta;
|
||||
"""
|
||||
|
||||
cursor.execute(qry, (fileinfo["sequence"], fileinfo["line"], json.dumps(fileinfo["meta"])))
|
||||
@@ -582,7 +589,63 @@ class Datastore:
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
# we assume that we are in the middle of a transaction
|
||||
|
||||
def get_file_data(self, path, cursor = None):
|
||||
"""
|
||||
Retrieve arbitrary data associated with a file.
|
||||
"""
|
||||
|
||||
if cursor is None:
|
||||
cur = self.conn.cursor()
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
realpath = configuration.translate_path(path)
|
||||
hash = file_hash(realpath)
|
||||
|
||||
qry = """
|
||||
SELECT data
|
||||
FROM file_data
|
||||
WHERE hash = %s;
|
||||
"""
|
||||
|
||||
cur.execute(qry, (hash,))
|
||||
res = cur.fetchone()
|
||||
|
||||
if cursor is None:
|
||||
self.maybe_commit()
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
# we assume that we are in the middle of a transaction
|
||||
return res[0]
|
||||
|
||||
def surveys (self, include_archived = False):
|
||||
"""
|
||||
Return list of survey definitions.
|
||||
"""
|
||||
|
||||
if self.conn is None:
|
||||
self.connect()
|
||||
|
||||
if include_archived:
|
||||
qry = """
|
||||
SELECT meta
|
||||
FROM public.projects;
|
||||
"""
|
||||
else:
|
||||
qry = """
|
||||
SELECT meta
|
||||
FROM public.projects
|
||||
WHERE NOT (meta->'archived')::boolean IS true
|
||||
"""
|
||||
|
||||
with self.conn:
|
||||
with self.conn.cursor() as cursor:
|
||||
|
||||
cursor.execute(qry)
|
||||
results = cursor.fetchall()
|
||||
return [r[0] for r in results if r[0]]
|
||||
|
||||
|
||||
# TODO Does this need tweaking on account of #246?
|
||||
def apply_survey_configuration(self, cursor = None):
|
||||
if cursor is None:
|
||||
cur = self.conn.cursor()
|
||||
@@ -679,3 +742,55 @@ class Datastore:
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
# we assume that we are in the middle of a transaction
|
||||
|
||||
def adjust_planner(self, cursor = None):
|
||||
"""
|
||||
Adjust estimated times on the planner
|
||||
"""
|
||||
if cursor is None:
|
||||
cur = self.conn.cursor()
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
qry = "CALL adjust_planner();"
|
||||
cur.execute(qry)
|
||||
if cursor is None:
|
||||
self.maybe_commit()
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
# we assume that we are in the middle of a transaction
|
||||
|
||||
def housekeep_event_log(self, cursor = None):
|
||||
"""
|
||||
Call housekeeping actions on the event log
|
||||
"""
|
||||
if cursor is None:
|
||||
cur = self.conn.cursor()
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
qry = "CALL augment_event_data();"
|
||||
cur.execute(qry)
|
||||
|
||||
qry = "CALL scan_placeholders();"
|
||||
cur.execute(qry)
|
||||
|
||||
if cursor is None:
|
||||
self.maybe_commit()
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
# we assume that we are in the middle of a transaction
|
||||
|
||||
def run_daily_tasks(self, cursor = None):
|
||||
"""
|
||||
Run once-a-day tasks
|
||||
"""
|
||||
if cursor is None:
|
||||
cur = self.conn.cursor()
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
qry = "CALL log_midnight_shots();"
|
||||
cur.execute(qry)
|
||||
|
||||
if cursor is None:
|
||||
self.maybe_commit()
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
# we assume that we are in the middle of a transaction
|
||||
|
||||
26
bin/housekeep_database.py
Executable file
26
bin/housekeep_database.py
Executable file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
Do housekeeping actions on the database.
|
||||
"""
|
||||
|
||||
import configuration
|
||||
from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
db.set_survey(survey["schema"])
|
||||
|
||||
print("Planner adjustment")
|
||||
db.adjust_planner()
|
||||
print("Event log housekeeping")
|
||||
db.housekeep_event_log()
|
||||
|
||||
print("Done")
|
||||
@@ -59,7 +59,7 @@ def qc_data (cursor, prefix):
|
||||
else:
|
||||
print("No QC data found");
|
||||
return
|
||||
|
||||
|
||||
#print("QC", qc)
|
||||
index = 0
|
||||
for item in qc["results"]:
|
||||
|
||||
@@ -39,7 +39,7 @@ def seis_data (survey):
|
||||
if not pathlib.Path(pathPrefix).exists():
|
||||
print(pathPrefix)
|
||||
raise ValueError("Export path does not exist")
|
||||
|
||||
|
||||
print(f"Requesting sequences for {survey['id']}")
|
||||
url = f"http://localhost:3000/api/project/{survey['id']}/sequence"
|
||||
r = requests.get(url)
|
||||
@@ -47,12 +47,12 @@ def seis_data (survey):
|
||||
for sequence in r.json():
|
||||
if sequence['status'] not in ["final", "ntbp"]:
|
||||
continue
|
||||
|
||||
|
||||
filename = pathlib.Path(pathPrefix, "sequence{:0>3d}.json".format(sequence['sequence']))
|
||||
if filename.exists():
|
||||
print(f"Skipping export for sequence {sequence['sequence']} – file already exists")
|
||||
continue
|
||||
|
||||
|
||||
print(f"Processing sequence {sequence['sequence']}")
|
||||
url = f"http://localhost:3000/api/project/{survey['id']}/event?sequence={sequence['sequence']}&missing=t"
|
||||
headers = { "Accept": "application/vnd.seis+json" }
|
||||
|
||||
@@ -19,7 +19,7 @@ from datastore import Datastore
|
||||
|
||||
def add_pending_remark(db, sequence):
|
||||
text = '<!-- @@DGL:PENDING@@ --><h4 style="color:red;cursor:help;" title="Edit the sequence file or directory name to import final data">Marked as <code>PENDING</code>.</h4><!-- @@/DGL:PENDING@@ -->\n'
|
||||
|
||||
|
||||
with db.conn.cursor() as cursor:
|
||||
qry = "SELECT remarks FROM raw_lines WHERE sequence = %s;"
|
||||
cursor.execute(qry, (sequence,))
|
||||
@@ -33,28 +33,29 @@ def add_pending_remark(db, sequence):
|
||||
db.maybe_commit()
|
||||
|
||||
def del_pending_remark(db, sequence):
|
||||
|
||||
|
||||
with db.conn.cursor() as cursor:
|
||||
qry = "SELECT remarks FROM raw_lines WHERE sequence = %s;"
|
||||
cursor.execute(qry, (sequence,))
|
||||
remarks = cursor.fetchone()[0]
|
||||
rx = re.compile("^(<!-- @@DGL:PENDING@@ -->.*<!-- @@/DGL:PENDING@@ -->\n)")
|
||||
m = rx.match(remarks)
|
||||
if m is not None:
|
||||
remarks = rx.sub("",remarks)
|
||||
qry = "UPDATE raw_lines SET remarks = %s WHERE sequence = %s;"
|
||||
cursor.execute(qry, (remarks, sequence))
|
||||
db.maybe_commit()
|
||||
row = cursor.fetchone()
|
||||
if row is not None:
|
||||
remarks = row[0]
|
||||
rx = re.compile("^(<!-- @@DGL:PENDING@@ -->.*<!-- @@/DGL:PENDING@@ -->\n)")
|
||||
m = rx.match(remarks)
|
||||
if m is not None:
|
||||
remarks = rx.sub("",remarks)
|
||||
qry = "UPDATE raw_lines SET remarks = %s WHERE sequence = %s;"
|
||||
cursor.execute(qry, (remarks, sequence))
|
||||
db.maybe_commit()
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
db.connect()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
@@ -75,52 +76,54 @@ if __name__ == '__main__':
|
||||
pendingRx = re.compile(survey["final"]["pending"]["pattern"]["regex"])
|
||||
|
||||
for fileprefix in final_p111["paths"]:
|
||||
print(f"Path prefix: {fileprefix}")
|
||||
realprefix = configuration.translate_path(fileprefix)
|
||||
print(f"Path prefix: {fileprefix} → {realprefix}")
|
||||
|
||||
for globspec in final_p111["globs"]:
|
||||
for filepath in pathlib.Path(fileprefix).glob(globspec):
|
||||
filepath = str(filepath)
|
||||
print(f"Found {filepath}")
|
||||
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
|
||||
physical_filepath = str(physical_filepath)
|
||||
logical_filepath = configuration.untranslate_path(physical_filepath)
|
||||
print(f"Found {logical_filepath}")
|
||||
|
||||
pending = False
|
||||
if pendingRx:
|
||||
pending = pendingRx.search(filepath) is not None
|
||||
pending = pendingRx.search(physical_filepath) is not None
|
||||
|
||||
if not db.file_in_db(filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(filepath)
|
||||
if not db.file_in_db(logical_filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(physical_filepath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", filepath)
|
||||
print("Skipping file because too new", logical_filepath)
|
||||
continue
|
||||
|
||||
|
||||
print("Importing")
|
||||
|
||||
match = rx.match(os.path.basename(filepath))
|
||||
match = rx.match(os.path.basename(logical_filepath))
|
||||
if not match:
|
||||
error_message = f"File path not match the expected format! ({filepath} ~ {pattern['regex']})"
|
||||
error_message = f"File path not match the expected format! ({logical_filepath} ~ {pattern['regex']})"
|
||||
print(error_message, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
|
||||
file_info = dict(zip(pattern["captures"], match.groups()))
|
||||
file_info["meta"] = {}
|
||||
|
||||
|
||||
if pending:
|
||||
print("Skipping / removing final file because marked as PENDING", filepath)
|
||||
print("Skipping / removing final file because marked as PENDING", logical_filepath)
|
||||
db.del_sequence_final(file_info["sequence"])
|
||||
add_pending_remark(db, file_info["sequence"])
|
||||
continue
|
||||
else:
|
||||
del_pending_remark(db, file_info["sequence"])
|
||||
|
||||
p111_data = p111.from_file(filepath)
|
||||
p111_data = p111.from_file(physical_filepath)
|
||||
|
||||
print("Saving")
|
||||
|
||||
p111_records = p111.p111_type("S", p111_data)
|
||||
file_info["meta"]["lineName"] = p111.line_name(p111_data)
|
||||
|
||||
db.save_final_p111(p111_records, file_info, filepath, survey["epsg"])
|
||||
db.save_final_p111(p111_records, file_info, logical_filepath, survey["epsg"])
|
||||
else:
|
||||
print("Already in DB")
|
||||
if pending:
|
||||
|
||||
@@ -51,12 +51,12 @@ if __name__ == '__main__':
|
||||
print(f"Found {filepath}")
|
||||
|
||||
if not db.file_in_db(filepath):
|
||||
|
||||
|
||||
age = time.time() - os.path.getmtime(filepath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", filepath)
|
||||
continue
|
||||
|
||||
|
||||
print("Importing")
|
||||
|
||||
match = rx.match(os.path.basename(filepath))
|
||||
|
||||
127
bin/import_map_layers.py
Executable file
127
bin/import_map_layers.py
Executable file
@@ -0,0 +1,127 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
Import SmartSource data.
|
||||
|
||||
For each survey in configuration.surveys(), check for new
|
||||
or modified final gun header files and (re-)import them into the
|
||||
database.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import pathlib
|
||||
import re
|
||||
import time
|
||||
import json
|
||||
import configuration
|
||||
from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
"""
|
||||
Imports map layers from the directories defined in the configuration object
|
||||
`import.map.layers`. The content of that key is an object with the following
|
||||
structure:
|
||||
|
||||
{
|
||||
layer1Name: [
|
||||
format: "geojson",
|
||||
path: "…", // Logical path to a directory
|
||||
globs: [
|
||||
"**/*.geojson", // List of globs matching map data files
|
||||
…
|
||||
]
|
||||
],
|
||||
|
||||
layer2Name: …
|
||||
…
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def process (layer_name, layer, physical_filepath):
|
||||
physical_filepath = str(physical_filepath)
|
||||
logical_filepath = configuration.untranslate_path(physical_filepath)
|
||||
print(f"Found {logical_filepath}")
|
||||
|
||||
if not db.file_in_db(logical_filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(physical_filepath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", logical_filepath)
|
||||
return
|
||||
|
||||
print("Importing")
|
||||
|
||||
file_info = {
|
||||
"type": "map_layer",
|
||||
"format": layer["format"],
|
||||
"name": layer_name,
|
||||
"tooltip": layer.get("tooltip"),
|
||||
"popup": layer.get("popup")
|
||||
}
|
||||
|
||||
db.save_file_data(logical_filepath, json.dumps(file_info))
|
||||
|
||||
else:
|
||||
file_info = db.get_file_data(logical_filepath)
|
||||
dirty = False
|
||||
if file_info:
|
||||
if file_info["name"] != layer_name:
|
||||
print("Renaming to", layer_name)
|
||||
file_info["name"] = layer_name
|
||||
dirty = True
|
||||
if file_info.get("tooltip") != layer.get("tooltip"):
|
||||
print("Changing tooltip to", layer.get("tooltip") or "null")
|
||||
file_info["tooltip"] = layer.get("tooltip")
|
||||
dirty = True
|
||||
if file_info.get("popup") != layer.get("popup"):
|
||||
print("Changing popup to", layer.get("popup") or "null")
|
||||
file_info["popup"] = layer.get("popup")
|
||||
dirty = True
|
||||
|
||||
if dirty:
|
||||
db.save_file_data(logical_filepath, json.dumps(file_info))
|
||||
else:
|
||||
print("Already in DB")
|
||||
|
||||
|
||||
print("Reading configuration")
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
|
||||
db.set_survey(survey["schema"])
|
||||
|
||||
try:
|
||||
map_layers = survey["imports"]["map"]["layers"]
|
||||
except KeyError:
|
||||
print("No map layers defined")
|
||||
continue
|
||||
|
||||
for layer_name, layer_items in map_layers.items():
|
||||
|
||||
for layer in layer_items:
|
||||
fileprefix = layer["path"]
|
||||
realprefix = configuration.translate_path(fileprefix)
|
||||
|
||||
if os.path.isfile(realprefix):
|
||||
|
||||
process(layer_name, layer, realprefix)
|
||||
|
||||
elif os.path.isdir(realprefix):
|
||||
|
||||
if not "globs" in layer:
|
||||
layer["globs"] = [ "**/*.geojson" ]
|
||||
|
||||
for globspec in layer["globs"]:
|
||||
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
|
||||
process(layer_name, layer, physical_filepath)
|
||||
|
||||
print("Done")
|
||||
@@ -17,29 +17,31 @@ from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading configuration")
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
db.set_survey(survey["schema"])
|
||||
for file in survey["preplots"]:
|
||||
realpath = configuration.translate_path(file["path"])
|
||||
|
||||
print(f"Preplot: {file['path']}")
|
||||
if not db.file_in_db(file["path"]):
|
||||
|
||||
age = time.time() - os.path.getmtime(file["path"])
|
||||
|
||||
age = time.time() - os.path.getmtime(realpath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", file["path"])
|
||||
continue
|
||||
|
||||
|
||||
print("Importing")
|
||||
try:
|
||||
preplot = preplots.from_file(file)
|
||||
preplot = preplots.from_file(file, realpath)
|
||||
except FileNotFoundError:
|
||||
print(f"File does not exist: {file['path']}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
@@ -20,12 +20,11 @@ from datastore import Datastore
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
db.connect()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
@@ -46,30 +45,32 @@ if __name__ == '__main__':
|
||||
ntbpRx = re.compile(survey["raw"]["ntbp"]["pattern"]["regex"])
|
||||
|
||||
for fileprefix in raw_p111["paths"]:
|
||||
print(f"Path prefix: {fileprefix}")
|
||||
realprefix = configuration.translate_path(fileprefix)
|
||||
print(f"Path prefix: {fileprefix} → {realprefix}")
|
||||
|
||||
for globspec in raw_p111["globs"]:
|
||||
for filepath in pathlib.Path(fileprefix).glob(globspec):
|
||||
filepath = str(filepath)
|
||||
print(f"Found {filepath}")
|
||||
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
|
||||
physical_filepath = str(physical_filepath)
|
||||
logical_filepath = configuration.untranslate_path(physical_filepath)
|
||||
print(f"Found {logical_filepath}")
|
||||
|
||||
if ntbpRx:
|
||||
ntbp = ntbpRx.search(filepath) is not None
|
||||
ntbp = ntbpRx.search(physical_filepath) is not None
|
||||
else:
|
||||
ntbp = False
|
||||
|
||||
if not db.file_in_db(filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(filepath)
|
||||
if not db.file_in_db(logical_filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(physical_filepath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", filepath)
|
||||
print("Skipping file because too new", logical_filepath)
|
||||
continue
|
||||
|
||||
|
||||
print("Importing")
|
||||
|
||||
match = rx.match(os.path.basename(filepath))
|
||||
match = rx.match(os.path.basename(logical_filepath))
|
||||
if not match:
|
||||
error_message = f"File path not match the expected format! ({filepath} ~ {pattern['regex']})"
|
||||
error_message = f"File path not matching the expected format! ({logical_filepath} ~ {pattern['regex']})"
|
||||
print(error_message, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
@@ -77,20 +78,23 @@ if __name__ == '__main__':
|
||||
file_info = dict(zip(pattern["captures"], match.groups()))
|
||||
file_info["meta"] = {}
|
||||
|
||||
p111_data = p111.from_file(filepath)
|
||||
p111_data = p111.from_file(physical_filepath)
|
||||
|
||||
print("Saving")
|
||||
|
||||
p111_records = p111.p111_type("S", p111_data)
|
||||
file_info["meta"]["lineName"] = p111.line_name(p111_data)
|
||||
if len(p111_records):
|
||||
file_info["meta"]["lineName"] = p111.line_name(p111_data)
|
||||
|
||||
db.save_raw_p111(p111_records, file_info, filepath, survey["epsg"], ntbp=ntbp)
|
||||
db.save_raw_p111(p111_records, file_info, logical_filepath, survey["epsg"], ntbp=ntbp)
|
||||
else:
|
||||
print("No source records found in file")
|
||||
else:
|
||||
print("Already in DB")
|
||||
|
||||
# Update the NTBP status to whatever the latest is,
|
||||
# as it might have changed.
|
||||
db.set_ntbp(filepath, ntbp)
|
||||
db.set_ntbp(logical_filepath, ntbp)
|
||||
if ntbp:
|
||||
print("Sequence is NTBP")
|
||||
|
||||
|
||||
@@ -54,12 +54,12 @@ if __name__ == '__main__':
|
||||
print(f"Found {filepath}")
|
||||
|
||||
if not db.file_in_db(filepath):
|
||||
|
||||
|
||||
age = time.time() - os.path.getmtime(filepath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", filepath)
|
||||
continue
|
||||
|
||||
|
||||
print("Importing")
|
||||
|
||||
match = rx.match(os.path.basename(filepath))
|
||||
|
||||
@@ -20,12 +20,11 @@ from datastore import Datastore
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
db.connect()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
@@ -47,36 +46,38 @@ if __name__ == '__main__':
|
||||
rx = re.compile(pattern["regex"], flags)
|
||||
|
||||
for fileprefix in raw_smsrc["paths"]:
|
||||
print(f"Path prefix: {fileprefix}")
|
||||
realprefix = configuration.translate_path(fileprefix)
|
||||
print(f"Path prefix: {fileprefix} → {realprefix}")
|
||||
|
||||
for globspec in raw_smsrc["globs"]:
|
||||
for filepath in pathlib.Path(fileprefix).glob(globspec):
|
||||
filepath = str(filepath)
|
||||
print(f"Found {filepath}")
|
||||
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
|
||||
physical_filepath = str(physical_filepath)
|
||||
logical_filepath = configuration.untranslate_path(physical_filepath)
|
||||
print(f"Found {logical_filepath}")
|
||||
|
||||
if not db.file_in_db(filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(filepath)
|
||||
if not db.file_in_db(logical_filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(physical_filepath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", filepath)
|
||||
print("Skipping file because too new", logical_filepath)
|
||||
continue
|
||||
|
||||
|
||||
print("Importing")
|
||||
|
||||
match = rx.match(os.path.basename(filepath))
|
||||
match = rx.match(os.path.basename(logical_filepath))
|
||||
if not match:
|
||||
error_message = f"File path not matching the expected format! ({filepath} ~ {pattern['regex']})"
|
||||
error_message = f"File path not matching the expected format! ({logical_filepath} ~ {pattern['regex']})"
|
||||
print(error_message, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
|
||||
file_info = dict(zip(pattern["captures"], match.groups()))
|
||||
|
||||
smsrc_records = smsrc.from_file(filepath)
|
||||
smsrc_records = smsrc.from_file(physical_filepath)
|
||||
|
||||
print("Saving")
|
||||
|
||||
db.save_raw_smsrc(smsrc_records, file_info, filepath)
|
||||
db.save_raw_smsrc(smsrc_records, file_info, logical_filepath)
|
||||
else:
|
||||
print("Already in DB")
|
||||
|
||||
|
||||
@@ -15,25 +15,4 @@ from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
configs = configuration.files(include_archived = True)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
#db.connect()
|
||||
|
||||
print("Reading surveys")
|
||||
for config in configs:
|
||||
filepath = config[0]
|
||||
survey = config[1]
|
||||
print(f'Survey: {survey["id"]} ({filepath})')
|
||||
db.set_survey(survey["schema"])
|
||||
if not db.file_in_db(filepath):
|
||||
print("Saving to DB")
|
||||
db.save_file_data(filepath, json.dumps(survey))
|
||||
print("Applying survey configuration")
|
||||
db.apply_survey_configuration()
|
||||
else:
|
||||
print("Already in DB")
|
||||
|
||||
print("Done")
|
||||
print("This function is obsolete. Returning with no action")
|
||||
|
||||
@@ -14,7 +14,7 @@ def detect_schema (conn):
|
||||
|
||||
if __name__ == '__main__':
|
||||
import argparse
|
||||
|
||||
|
||||
ap = argparse.ArgumentParser()
|
||||
ap.add_argument("-s", "--schema", required=False, default=None, help="survey where to insert the event")
|
||||
ap.add_argument("-t", "--tstamp", required=False, default=None, help="event timestamp")
|
||||
@@ -30,19 +30,19 @@ if __name__ == '__main__':
|
||||
schema = args["schema"]
|
||||
else:
|
||||
schema = detect_schema(db.conn)
|
||||
|
||||
|
||||
if args["tstamp"]:
|
||||
tstamp = args["tstamp"]
|
||||
else:
|
||||
tstamp = datetime.utcnow().isoformat()
|
||||
|
||||
|
||||
message = " ".join(args["remarks"])
|
||||
|
||||
print("new event:", schema, tstamp, message)
|
||||
|
||||
|
||||
print("new event:", schema, tstamp, message, args["label"])
|
||||
|
||||
if schema and tstamp and message:
|
||||
db.set_survey(schema)
|
||||
with db.conn.cursor() as cursor:
|
||||
qry = "INSERT INTO events_timed (tstamp, remarks) VALUES (%s, %s);"
|
||||
cursor.execute(qry, (tstamp, message))
|
||||
qry = "INSERT INTO event_log (tstamp, remarks, labels) VALUES (%s, replace_placeholders(%s, %s, NULL, NULL), %s);"
|
||||
cursor.execute(qry, (tstamp, message, tstamp, args["label"]))
|
||||
db.maybe_commit()
|
||||
|
||||
36
bin/p190.py
36
bin/p190.py
@@ -12,7 +12,7 @@ from parse_fwr import parse_fwr
|
||||
|
||||
def parse_p190_header (string):
|
||||
"""Parse a generic P1/90 header record.
|
||||
|
||||
|
||||
Returns a dictionary of fields.
|
||||
"""
|
||||
names = [ "record_type", "header_type", "header_type_modifier", "description", "data" ]
|
||||
@@ -27,7 +27,7 @@ def parse_p190_type1 (string):
|
||||
"doy", "time", "spare2" ]
|
||||
record = parse_fwr(string, [1, 12, 3, 1, 1, 1, 6, 10, 11, 9, 9, 6, 3, 6, 1])
|
||||
return dict(zip(names, record))
|
||||
|
||||
|
||||
def parse_p190_rcv_group (string):
|
||||
"""Parse a P1/90 Type 1 receiver group record."""
|
||||
names = [ "record_type",
|
||||
@@ -37,7 +37,7 @@ def parse_p190_rcv_group (string):
|
||||
"streamer_id" ]
|
||||
record = parse_fwr(string, [1, 4, 9, 9, 4, 4, 9, 9, 4, 4, 9, 9, 4, 1])
|
||||
return dict(zip(names, record))
|
||||
|
||||
|
||||
def parse_line (string):
|
||||
type = string[0]
|
||||
if string[:3] == "EOF":
|
||||
@@ -52,7 +52,7 @@ def parse_line (string):
|
||||
|
||||
def p190_type(type, records):
|
||||
return [ r for r in records if r["record_type"] == type ]
|
||||
|
||||
|
||||
def p190_header(code, records):
|
||||
return [ h for h in p190_type("H", records) if h["header_type"]+h["header_type_modifier"] == code ]
|
||||
|
||||
@@ -86,15 +86,15 @@ def normalise_record(record):
|
||||
# These are probably strings
|
||||
elif "strip" in dir(record[key]):
|
||||
record[key] = record[key].strip()
|
||||
|
||||
|
||||
return record
|
||||
|
||||
|
||||
def normalise(records):
|
||||
for record in records:
|
||||
normalise_record(record)
|
||||
|
||||
|
||||
return records
|
||||
|
||||
|
||||
def from_file(path, only_records=None, shot_range=None, with_objrefs=False):
|
||||
records = []
|
||||
with open(path) as fd:
|
||||
@@ -102,10 +102,10 @@ def from_file(path, only_records=None, shot_range=None, with_objrefs=False):
|
||||
line = fd.readline()
|
||||
while line:
|
||||
cnt = cnt + 1
|
||||
|
||||
|
||||
if line == "EOF":
|
||||
break
|
||||
|
||||
|
||||
record = parse_line(line)
|
||||
if record is not None:
|
||||
if only_records:
|
||||
@@ -121,9 +121,9 @@ def from_file(path, only_records=None, shot_range=None, with_objrefs=False):
|
||||
|
||||
records.append(record)
|
||||
line = fd.readline()
|
||||
|
||||
|
||||
return records
|
||||
|
||||
|
||||
def apply_tstamps(recordset, tstamp=None, fix_bad_seconds=False):
|
||||
#print("tstamp", tstamp, type(tstamp))
|
||||
if type(tstamp) is int:
|
||||
@@ -161,16 +161,16 @@ def apply_tstamps(recordset, tstamp=None, fix_bad_seconds=False):
|
||||
record["tstamp"] = ts
|
||||
prev[object_id(record)] = doy
|
||||
break
|
||||
|
||||
|
||||
return recordset
|
||||
|
||||
|
||||
def dms(value):
|
||||
# 591544.61N
|
||||
hemisphere = 1 if value[-1] in "NnEe" else -1
|
||||
seconds = float(value[-6:-1])
|
||||
minutes = int(value[-8:-6])
|
||||
degrees = int(value[:-8])
|
||||
|
||||
|
||||
return (degrees + minutes/60 + seconds/3600) * hemisphere
|
||||
|
||||
def tod(record):
|
||||
@@ -183,7 +183,7 @@ def tod(record):
|
||||
m = int(time[2:4])
|
||||
s = float(time[4:])
|
||||
return d*86400 + h*3600 + m*60 + s
|
||||
|
||||
|
||||
def duration(record0, record1):
|
||||
ts0 = tod(record0)
|
||||
ts1 = tod(record1)
|
||||
@@ -198,10 +198,10 @@ def azimuth(record0, record1):
|
||||
x0, y0 = float(record0["easting"]), float(record0["northing"])
|
||||
x1, y1 = float(record1["easting"]), float(record1["northing"])
|
||||
return math.degrees(math.atan2(x1-x0, y1-y0)) % 360
|
||||
|
||||
|
||||
def speed(record0, record1, knots=False):
|
||||
scale = 3600/1852 if knots else 1
|
||||
t0 = tod(record0)
|
||||
t1 = tod(record1)
|
||||
return (distance(record0, record1) / math.fabs(t1-t0)) * scale
|
||||
|
||||
|
||||
|
||||
@@ -4,9 +4,10 @@ import sps
|
||||
Preplot importing functions.
|
||||
"""
|
||||
|
||||
def from_file (file):
|
||||
def from_file (file, realpath = None):
|
||||
filepath = realpath or file["path"]
|
||||
if not "type" in file or file["type"] == "sps":
|
||||
records = sps.from_file(file["path"], file["format"] if "format" in file else None )
|
||||
records = sps.from_file(filepath, file["format"] if "format" in file else None )
|
||||
else:
|
||||
return "Not an SPS file"
|
||||
|
||||
|
||||
@@ -13,21 +13,27 @@ from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
db.set_survey(survey["schema"])
|
||||
|
||||
for file in db.list_files():
|
||||
path = file[0]
|
||||
if not os.path.exists(path):
|
||||
print(path, "NOT FOUND")
|
||||
db.del_file(path)
|
||||
try:
|
||||
path = configuration.translate_path(file[0])
|
||||
if not os.path.exists(path):
|
||||
print(path, "NOT FOUND")
|
||||
db.del_file(file[0])
|
||||
except TypeError:
|
||||
# In case the logical path no longer matches
|
||||
# the Dougal configuration.
|
||||
print(file[0], "COULD NOT BE TRANSLATED TO A PHYSICAL PATH. DELETING")
|
||||
db.del_file(file[0])
|
||||
|
||||
print("Done")
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
|
||||
DOUGAL_ROOT=${DOUGAL_ROOT:-$(dirname "$0")/..}
|
||||
|
||||
BINDIR="$DOUGAL_ROOT/bin"
|
||||
@@ -8,6 +9,20 @@ LOCKFILE=${LOCKFILE:-$VARDIR/runner.lock}
|
||||
|
||||
[ -f ~/.profile ] && . ~/.profile
|
||||
|
||||
DOUGAL_LOG_TAG="dougal.runner[$$]"
|
||||
|
||||
# Only send output to the logger if we have the appropriate
|
||||
# configuration set.
|
||||
if [[ -n "$DOUGAL_LOG_TAG" && -n "$DOUGAL_LOG_FACILITY" ]]; then
|
||||
function _logger () {
|
||||
logger $*
|
||||
}
|
||||
else
|
||||
function _logger () {
|
||||
: # This is the Bash null command
|
||||
}
|
||||
fi
|
||||
|
||||
function tstamp () {
|
||||
date -u +%Y-%m-%dT%H:%M:%SZ
|
||||
}
|
||||
@@ -18,26 +33,44 @@ function prefix () {
|
||||
|
||||
function print_log () {
|
||||
printf "$(prefix)\033[36m%s\033[0m\n" "$*"
|
||||
_logger -t "$DOUGAL_LOG_TAG" -p "$DOUGAL_LOG_FACILITY.info" "$*"
|
||||
}
|
||||
|
||||
function print_info () {
|
||||
printf "$(prefix)\033[0m%s\n" "$*"
|
||||
_logger -t "$DOUGAL_LOG_TAG" -p "$DOUGAL_LOG_FACILITY.debug" "$*"
|
||||
}
|
||||
|
||||
function print_warning () {
|
||||
printf "$(prefix)\033[33;1m%s\033[0m\n" "$*"
|
||||
_logger -t "$DOUGAL_LOG_TAG" -p "$DOUGAL_LOG_FACILITY.warning" "$*"
|
||||
}
|
||||
|
||||
function print_error () {
|
||||
printf "$(prefix)\033[31m%s\033[0m\n" "$*"
|
||||
_logger -t "$DOUGAL_LOG_TAG" -p "$DOUGAL_LOG_FACILITY.error" "$*"
|
||||
}
|
||||
|
||||
function run () {
|
||||
PROGNAME=$(basename "$1")
|
||||
PROGNAME=${PROGNAME:-$(basename "$1")}
|
||||
|
||||
STDOUTLOG="$VARDIR/$PROGNAME.out"
|
||||
STDERRLOG="$VARDIR/$PROGNAME.err"
|
||||
|
||||
"$1" >"$STDOUTLOG" 2>"$STDERRLOG" || {
|
||||
# What follows runs the command that we have been given (with any arguments passed)
|
||||
# and logs:
|
||||
# * stdout to $STDOUTLOG (a temporary file) and possibly to syslog, if enabled.
|
||||
# * stderr to $STDERRLOG (a temporary file) and possibly to syslog, if enabled.
|
||||
#
|
||||
# When logging to syslog, stdout goes as debug level and stderr as warning (not error)
|
||||
#
|
||||
# The temporary file is used in case the command fails, at which point we try to log
|
||||
# a warning in GitLab's alerts facility.
|
||||
|
||||
$* \
|
||||
> >(tee $STDOUTLOG |_logger -t "dougal.runner.$PROGNAME[$$]" -p "$DOUGAL_LOG_FACILITY.debug") \
|
||||
2> >(tee $STDERRLOG |_logger -t "dougal.runner.$PROGNAME[$$]" -p "$DOUGAL_LOG_FACILITY.warning") || {
|
||||
|
||||
print_error "Failed: $PROGNAME"
|
||||
cat $STDOUTLOG
|
||||
cat $STDERRLOG
|
||||
@@ -52,11 +85,17 @@ function run () {
|
||||
|
||||
exit 2
|
||||
}
|
||||
# cat $STDOUTLOG
|
||||
|
||||
unset PROGNAME
|
||||
rm $STDOUTLOG $STDERRLOG
|
||||
}
|
||||
|
||||
function cleanup () {
|
||||
if [[ -f $LOCKFILE ]]; then
|
||||
rm "$LOCKFILE"
|
||||
fi
|
||||
}
|
||||
|
||||
if [[ -f $LOCKFILE ]]; then
|
||||
PID=$(cat "$LOCKFILE")
|
||||
if pgrep -F "$LOCKFILE"; then
|
||||
@@ -74,6 +113,13 @@ echo "$$" > "$LOCKFILE" || {
|
||||
}
|
||||
print_info "Start run"
|
||||
|
||||
print_log "Check if data is accessible"
|
||||
$BINDIR/check_mounts_present.py || {
|
||||
print_warning "Import mounts not accessible. Inhibiting all tasks!"
|
||||
cleanup
|
||||
exit 253
|
||||
}
|
||||
|
||||
print_log "Purge deleted files"
|
||||
run $BINDIR/purge_deleted_files.py
|
||||
|
||||
@@ -86,36 +132,47 @@ run $BINDIR/import_preplots.py
|
||||
print_log "Import raw P1/11"
|
||||
run $BINDIR/import_raw_p111.py
|
||||
|
||||
print_log "Import raw P1/90"
|
||||
run $BINDIR/import_raw_p190.py
|
||||
#print_log "Import raw P1/90"
|
||||
#run $BINDIR/import_raw_p190.py
|
||||
|
||||
print_log "Import final P1/11"
|
||||
run $BINDIR/import_final_p111.py
|
||||
|
||||
print_log "Import final P1/90"
|
||||
run $BINDIR/import_final_p190.py
|
||||
#print_log "Import final P1/90"
|
||||
#run $BINDIR/import_final_p190.py
|
||||
|
||||
print_log "Import SmartSource data"
|
||||
run $BINDIR/import_smsrc.py
|
||||
|
||||
if [[ -z "$RUNNER_NOEXPORT" ]]; then
|
||||
print_log "Export system data"
|
||||
run $BINDIR/system_exports.py
|
||||
fi
|
||||
print_log "Import map user layers"
|
||||
run $BINDIR/import_map_layers.py
|
||||
|
||||
if [[ -n "$RUNNER_IMPORT" ]]; then
|
||||
print_log "Import system data"
|
||||
run $BINDIR/system_imports.py
|
||||
fi
|
||||
# if [[ -z "$RUNNER_NOEXPORT" ]]; then
|
||||
# print_log "Export system data"
|
||||
# run $BINDIR/system_exports.py
|
||||
# fi
|
||||
|
||||
print_log "Export QC data"
|
||||
run $BINDIR/human_exports_qc.py
|
||||
# if [[ -n "$RUNNER_IMPORT" ]]; then
|
||||
# print_log "Import system data"
|
||||
# run $BINDIR/system_imports.py
|
||||
# fi
|
||||
|
||||
print_log "Export sequence data"
|
||||
run $BINDIR/human_exports_seis.py
|
||||
# print_log "Export QC data"
|
||||
# run $BINDIR/human_exports_qc.py
|
||||
|
||||
# print_log "Export sequence data"
|
||||
# run $BINDIR/human_exports_seis.py
|
||||
|
||||
print_log "Process ASAQC queue"
|
||||
run $DOUGAL_ROOT/lib/www/server/queues/asaqc/index.js
|
||||
# Run insecure in test mode:
|
||||
# export NODE_TLS_REJECT_UNAUTHORIZED=0
|
||||
PROGNAME=asaqc_queue run $DOUGAL_ROOT/lib/www/server/queues/asaqc/index.js
|
||||
|
||||
print_log "Run database housekeeping actions"
|
||||
run $BINDIR/housekeep_database.py
|
||||
|
||||
print_log "Run QCs"
|
||||
PROGNAME=run_qc run $DOUGAL_ROOT/lib/www/server/lib/qc/index.js
|
||||
|
||||
|
||||
rm "$LOCKFILE"
|
||||
|
||||
@@ -39,7 +39,7 @@ exportables = {
|
||||
}
|
||||
|
||||
def primary_key (table, cursor):
|
||||
|
||||
|
||||
# https://wiki.postgresql.org/wiki/Retrieve_primary_key_columns
|
||||
qry = """
|
||||
SELECT a.attname, format_type(a.atttypid, a.atttypmod) AS data_type
|
||||
@@ -50,7 +50,7 @@ def primary_key (table, cursor):
|
||||
WHERE i.indrelid = %s::regclass
|
||||
AND i.indisprimary;
|
||||
"""
|
||||
|
||||
|
||||
cursor.execute(qry, (table,))
|
||||
return cursor.fetchall()
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ exportables = {
|
||||
}
|
||||
|
||||
def primary_key (table, cursor):
|
||||
|
||||
|
||||
# https://wiki.postgresql.org/wiki/Retrieve_primary_key_columns
|
||||
qry = """
|
||||
SELECT a.attname, format_type(a.atttypid, a.atttypmod) AS data_type
|
||||
@@ -45,13 +45,13 @@ def primary_key (table, cursor):
|
||||
WHERE i.indrelid = %s::regclass
|
||||
AND i.indisprimary;
|
||||
"""
|
||||
|
||||
|
||||
cursor.execute(qry, (table,))
|
||||
return cursor.fetchall()
|
||||
|
||||
def import_table(fd, table, columns, cursor):
|
||||
pk = [ r[0] for r in primary_key(table, cursor) ]
|
||||
|
||||
|
||||
# Create temporary table to import into
|
||||
temptable = "import_"+table
|
||||
print("Creating temporary table", temptable)
|
||||
@@ -61,29 +61,29 @@ def import_table(fd, table, columns, cursor):
|
||||
AS SELECT {', '.join(pk + columns)} FROM {table}
|
||||
WITH NO DATA;
|
||||
"""
|
||||
|
||||
|
||||
#print(qry)
|
||||
cursor.execute(qry)
|
||||
|
||||
|
||||
# Import into the temp table
|
||||
print("Import data into temporary table")
|
||||
cursor.copy_from(fd, temptable)
|
||||
|
||||
|
||||
# Update the destination table
|
||||
print("Updating destination table")
|
||||
setcols = ", ".join([ f"{c} = t.{c}" for c in columns ])
|
||||
wherecols = " AND ".join([ f"{table}.{c} = t.{c}" for c in pk ])
|
||||
|
||||
|
||||
qry = f"""
|
||||
UPDATE {table}
|
||||
SET {setcols}
|
||||
FROM {temptable} t
|
||||
WHERE {wherecols};
|
||||
"""
|
||||
|
||||
|
||||
#print(qry)
|
||||
cursor.execute(qry)
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -111,7 +111,7 @@ if __name__ == '__main__':
|
||||
print(f"It looks like table {table} may have already been imported. Skipping it.")
|
||||
except FileNotFoundError:
|
||||
print(f"File not found. Skipping {path}")
|
||||
|
||||
|
||||
db.conn.commit()
|
||||
|
||||
print("Reading surveys")
|
||||
@@ -130,7 +130,7 @@ if __name__ == '__main__':
|
||||
columns = exportables["survey"][table]
|
||||
path = os.path.join(pathPrefix, "-"+table)
|
||||
print(" ←← ", path, " →→ ", table, columns)
|
||||
|
||||
|
||||
try:
|
||||
with open(path, "rb") as fd:
|
||||
if columns is not None:
|
||||
@@ -143,7 +143,7 @@ if __name__ == '__main__':
|
||||
print(f"It looks like table {table} may have already been imported. Skipping it.")
|
||||
except FileNotFoundError:
|
||||
print(f"File not found. Skipping {path}")
|
||||
|
||||
|
||||
# If we don't commit the data does not actually get copied
|
||||
db.conn.commit()
|
||||
|
||||
|
||||
@@ -32,6 +32,25 @@ imports:
|
||||
# least this many seconds ago.
|
||||
file_min_age: 60
|
||||
|
||||
# These paths refer to remote mounts which must be present in order
|
||||
# for imports to work. If any of these paths are empty, import actions
|
||||
# (including data deletion) will be inhibited. This is to cope with
|
||||
# things like transient network failures.
|
||||
mounts:
|
||||
- /srv/mnt/Data
|
||||
|
||||
# These paths can be exposed to end users via the API. They should
|
||||
# contain the locations were project data, or any other user data
|
||||
# that needs to be accessible by Dougal, is located.
|
||||
#
|
||||
# This key can be either a string or an object:
|
||||
# - If a string, it points to the root path for Dougal-accessible data.
|
||||
# - If an object, there is an implicit root and the first-level
|
||||
# paths are denoted by the keys, with the values being their
|
||||
# respective physical paths.
|
||||
# Non-absolute paths are relative to $DOUGAL_ROOT.
|
||||
paths: /srv/mnt/Data
|
||||
|
||||
queues:
|
||||
asaqc:
|
||||
request:
|
||||
@@ -30,6 +30,7 @@ Ensure that the following packages are installed:
|
||||
|
||||
```bash
|
||||
psql -U postgres <./database-template.sql
|
||||
psql -U postgres <./database-version.sql
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
-- PostgreSQL database dump
|
||||
--
|
||||
|
||||
-- Dumped from database version 12.4
|
||||
-- Dumped by pg_dump version 12.4
|
||||
-- Dumped from database version 14.2
|
||||
-- Dumped by pg_dump version 14.2
|
||||
|
||||
SET statement_timeout = 0;
|
||||
SET lock_timeout = 0;
|
||||
@@ -102,20 +102,6 @@ CREATE EXTENSION IF NOT EXISTS postgis WITH SCHEMA public;
|
||||
COMMENT ON EXTENSION postgis IS 'PostGIS geometry, geography, and raster spatial types and functions';
|
||||
|
||||
|
||||
--
|
||||
-- Name: postgis_raster; Type: EXTENSION; Schema: -; Owner: -
|
||||
--
|
||||
|
||||
CREATE EXTENSION IF NOT EXISTS postgis_raster WITH SCHEMA public;
|
||||
|
||||
|
||||
--
|
||||
-- Name: EXTENSION postgis_raster; Type: COMMENT; Schema: -; Owner:
|
||||
--
|
||||
|
||||
COMMENT ON EXTENSION postgis_raster IS 'PostGIS raster types and functions';
|
||||
|
||||
|
||||
--
|
||||
-- Name: postgis_sfcgal; Type: EXTENSION; Schema: -; Owner: -
|
||||
--
|
||||
@@ -144,6 +130,221 @@ CREATE EXTENSION IF NOT EXISTS postgis_topology WITH SCHEMA topology;
|
||||
COMMENT ON EXTENSION postgis_topology IS 'PostGIS topology spatial types and functions';
|
||||
|
||||
|
||||
--
|
||||
-- Name: queue_item_status; Type: TYPE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TYPE public.queue_item_status AS ENUM (
|
||||
'queued',
|
||||
'cancelled',
|
||||
'failed',
|
||||
'sent'
|
||||
);
|
||||
|
||||
|
||||
ALTER TYPE public.queue_item_status OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: event_meta(timestamp with time zone); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.event_meta(tstamp timestamp with time zone) RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_meta(tstamp, NULL, NULL);
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.event_meta(tstamp timestamp with time zone) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION event_meta(tstamp timestamp with time zone); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.event_meta(tstamp timestamp with time zone) IS 'Overload of event_meta (timestamptz, integer, integer) for use when searching by timestamp.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: event_meta(integer, integer); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.event_meta(sequence integer, point integer) RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_meta(NULL, sequence, point);
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.event_meta(sequence integer, point integer) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION event_meta(sequence integer, point integer); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.event_meta(sequence integer, point integer) IS 'Overload of event_meta (timestamptz, integer, integer) for use when searching by sequence / point.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: event_meta(timestamp with time zone, integer, integer); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.event_meta(tstamp timestamp with time zone, sequence integer, point integer) RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
result jsonb;
|
||||
-- Tolerance is hard-coded, at least until a need to expose arises.
|
||||
tolerance numeric;
|
||||
BEGIN
|
||||
tolerance := 3; -- seconds
|
||||
|
||||
-- We search by timestamp if we can, as that's a lot quicker
|
||||
IF tstamp IS NOT NULL THEN
|
||||
|
||||
SELECT meta
|
||||
INTO result
|
||||
FROM real_time_inputs rti
|
||||
WHERE
|
||||
rti.tstamp BETWEEN (event_meta.tstamp - tolerance * interval '1 second') AND (event_meta.tstamp + tolerance * interval '1 second')
|
||||
ORDER BY abs(extract('epoch' FROM rti.tstamp - event_meta.tstamp ))
|
||||
LIMIT 1;
|
||||
|
||||
ELSE
|
||||
|
||||
SELECT meta
|
||||
INTO result
|
||||
FROM real_time_inputs rti
|
||||
WHERE
|
||||
(meta->>'_sequence')::integer = event_meta.sequence AND
|
||||
(meta->>'_point')::integer = event_meta.point
|
||||
ORDER BY rti.tstamp DESC
|
||||
LIMIT 1;
|
||||
|
||||
END IF;
|
||||
|
||||
RETURN result;
|
||||
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.event_meta(tstamp timestamp with time zone, sequence integer, point integer) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION event_meta(tstamp timestamp with time zone, sequence integer, point integer); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.event_meta(tstamp timestamp with time zone, sequence integer, point integer) IS 'Return the real-time event metadata associated with a sequence / point in the current project or
|
||||
with a given timestamp. Timestamp that is first searched for in the shot tables
|
||||
of the current prospect or, if not found, in the real-time data.
|
||||
|
||||
Returns a JSONB object.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: geometry_from_tstamp(timestamp with time zone, numeric); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.geometry_from_tstamp(ts timestamp with time zone, tolerance numeric, OUT geometry public.geometry, OUT delta numeric) RETURNS record
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
SELECT
|
||||
geometry,
|
||||
extract('epoch' FROM tstamp - ts ) AS delta
|
||||
FROM real_time_inputs
|
||||
WHERE
|
||||
geometry IS NOT NULL AND
|
||||
tstamp BETWEEN (ts - tolerance * interval '1 second') AND (ts + tolerance * interval '1 second')
|
||||
ORDER BY abs(extract('epoch' FROM tstamp - ts ))
|
||||
LIMIT 1;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.geometry_from_tstamp(ts timestamp with time zone, tolerance numeric, OUT geometry public.geometry, OUT delta numeric) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION geometry_from_tstamp(ts timestamp with time zone, tolerance numeric, OUT geometry public.geometry, OUT delta numeric); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.geometry_from_tstamp(ts timestamp with time zone, tolerance numeric, OUT geometry public.geometry, OUT delta numeric) IS 'Get geometry from timestamp';
|
||||
|
||||
|
||||
--
|
||||
-- Name: interpolate_geometry_from_tstamp(timestamp with time zone, numeric); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.interpolate_geometry_from_tstamp(ts timestamp with time zone, maxspan numeric) RETURNS public.geometry
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
ts0 timestamptz;
|
||||
ts1 timestamptz;
|
||||
geom0 geometry;
|
||||
geom1 geometry;
|
||||
span numeric;
|
||||
fraction numeric;
|
||||
BEGIN
|
||||
|
||||
SELECT tstamp, geometry
|
||||
INTO ts0, geom0
|
||||
FROM real_time_inputs
|
||||
WHERE tstamp <= ts
|
||||
ORDER BY tstamp DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT tstamp, geometry
|
||||
INTO ts1, geom1
|
||||
FROM real_time_inputs
|
||||
WHERE tstamp >= ts
|
||||
ORDER BY tstamp ASC
|
||||
LIMIT 1;
|
||||
|
||||
IF geom0 IS NULL OR geom1 IS NULL THEN
|
||||
RAISE NOTICE 'Interpolation failed (no straddling data)';
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
-- See if we got an exact match
|
||||
IF ts0 = ts THEN
|
||||
RETURN geom0;
|
||||
ELSIF ts1 = ts THEN
|
||||
RETURN geom1;
|
||||
END IF;
|
||||
|
||||
span := extract('epoch' FROM ts1 - ts0);
|
||||
|
||||
IF span > maxspan THEN
|
||||
RAISE NOTICE 'Interpolation timespan % outside maximum requested (%)', span, maxspan;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
fraction := extract('epoch' FROM ts - ts0) / span;
|
||||
|
||||
IF fraction < 0 OR fraction > 1 THEN
|
||||
RAISE NOTICE 'Requested timestamp % outside of interpolation span (fraction: %)', ts, fraction;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
RETURN ST_LineInterpolatePoint(St_MakeLine(geom0, geom1), fraction);
|
||||
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.interpolate_geometry_from_tstamp(ts timestamp with time zone, maxspan numeric) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION interpolate_geometry_from_tstamp(ts timestamp with time zone, maxspan numeric); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.interpolate_geometry_from_tstamp(ts timestamp with time zone, maxspan numeric) IS 'Interpolate a position over a given maximum timespan (in seconds)
|
||||
based on real-time inputs. Returns a POINT geometry.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: notify(); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
@@ -182,23 +383,110 @@ $$;
|
||||
|
||||
ALTER FUNCTION public.notify() OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: sequence_shot_from_tstamp(timestamp with time zone); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.sequence_shot_from_tstamp(ts timestamp with time zone, OUT sequence numeric, OUT point numeric, OUT delta numeric) RETURNS record
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
SELECT * FROM public.sequence_shot_from_tstamp(ts, 3);
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.sequence_shot_from_tstamp(ts timestamp with time zone, OUT sequence numeric, OUT point numeric, OUT delta numeric) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION sequence_shot_from_tstamp(ts timestamp with time zone, OUT sequence numeric, OUT point numeric, OUT delta numeric); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.sequence_shot_from_tstamp(ts timestamp with time zone, OUT sequence numeric, OUT point numeric, OUT delta numeric) IS 'Get sequence and shotpoint from timestamp.
|
||||
|
||||
Overloaded form in which the tolerance value is implied and defaults to three seconds.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: sequence_shot_from_tstamp(timestamp with time zone, numeric); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.sequence_shot_from_tstamp(ts timestamp with time zone, tolerance numeric, OUT sequence numeric, OUT point numeric, OUT delta numeric) RETURNS record
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
SELECT
|
||||
(meta->>'_sequence')::numeric AS sequence,
|
||||
(meta->>'_point')::numeric AS point,
|
||||
extract('epoch' FROM (meta->>'tstamp')::timestamptz - ts ) AS delta
|
||||
FROM real_time_inputs
|
||||
WHERE
|
||||
meta ? '_sequence' AND
|
||||
abs(extract('epoch' FROM (meta->>'tstamp')::timestamptz - ts )) < tolerance
|
||||
ORDER BY abs(extract('epoch' FROM (meta->>'tstamp')::timestamptz - ts ))
|
||||
LIMIT 1;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.sequence_shot_from_tstamp(ts timestamp with time zone, tolerance numeric, OUT sequence numeric, OUT point numeric, OUT delta numeric) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION sequence_shot_from_tstamp(ts timestamp with time zone, tolerance numeric, OUT sequence numeric, OUT point numeric, OUT delta numeric); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.sequence_shot_from_tstamp(ts timestamp with time zone, tolerance numeric, OUT sequence numeric, OUT point numeric, OUT delta numeric) IS 'Get sequence and shotpoint from timestamp.
|
||||
|
||||
Given a timestamp this function returns the closest shot to it within the given tolerance value.
|
||||
|
||||
This uses the `real_time_inputs` table and it does not give an indication of which project the shotpoint belongs to. It is assumed that a single project is being acquired at a given time.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: set_survey(text); Type: PROCEDURE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE PROCEDURE public.set_survey(project_id text)
|
||||
CREATE PROCEDURE public.set_survey(IN project_id text)
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
SELECT set_config('search_path', (SELECT schema||',public' FROM public.projects WHERE pid = lower(project_id)), false);
|
||||
$$;
|
||||
|
||||
|
||||
ALTER PROCEDURE public.set_survey(project_id text) OWNER TO postgres;
|
||||
ALTER PROCEDURE public.set_survey(IN project_id text) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: update_timestamp(); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.update_timestamp() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
IF NEW.updated_on IS NOT NULL THEN
|
||||
NEW.updated_on := current_timestamp;
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
EXCEPTION
|
||||
WHEN undefined_column THEN RETURN NEW;
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.update_timestamp() OWNER TO postgres;
|
||||
|
||||
SET default_tablespace = '';
|
||||
|
||||
SET default_table_access_method = heap;
|
||||
|
||||
--
|
||||
-- Name: info; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE public.info (
|
||||
key text NOT NULL,
|
||||
value jsonb
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE public.info OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: projects; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
@@ -213,6 +501,46 @@ CREATE TABLE public.projects (
|
||||
|
||||
ALTER TABLE public.projects OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: queue_items; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE public.queue_items (
|
||||
item_id integer NOT NULL,
|
||||
status public.queue_item_status DEFAULT 'queued'::public.queue_item_status NOT NULL,
|
||||
payload jsonb NOT NULL,
|
||||
results jsonb DEFAULT '{}'::jsonb NOT NULL,
|
||||
created_on timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
updated_on timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
not_before timestamp with time zone DEFAULT '1970-01-01 00:00:00+00'::timestamp with time zone NOT NULL,
|
||||
parent_id integer
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE public.queue_items OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: queue_items_item_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.queue_items_item_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
ALTER TABLE public.queue_items_item_id_seq OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: queue_items_item_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.queue_items_item_id_seq OWNED BY public.queue_items.item_id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: real_time_inputs; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
@@ -227,16 +555,19 @@ CREATE TABLE public.real_time_inputs (
|
||||
ALTER TABLE public.real_time_inputs OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: info; Type: TABLE; Schema: public; Owner: postgres
|
||||
-- Name: queue_items item_id; Type: DEFAULT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE public.info (
|
||||
key text NOT NULL,
|
||||
value jsonb
|
||||
);
|
||||
ALTER TABLE ONLY public.queue_items ALTER COLUMN item_id SET DEFAULT nextval('public.queue_items_item_id_seq'::regclass);
|
||||
|
||||
|
||||
ALTER TABLE public.info OWNER TO postgres;
|
||||
--
|
||||
-- Name: info info_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.info
|
||||
ADD CONSTRAINT info_pkey PRIMARY KEY (key);
|
||||
|
||||
|
||||
--
|
||||
-- Name: projects projects_name_key; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
@@ -262,14 +593,12 @@ ALTER TABLE ONLY public.projects
|
||||
ADD CONSTRAINT projects_schema_key UNIQUE (schema);
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Name: info info_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
-- Name: queue_items queue_items_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.info
|
||||
ADD CONSTRAINT info_pkey PRIMARY KEY (key);
|
||||
|
||||
ALTER TABLE ONLY public.queue_items
|
||||
ADD CONSTRAINT queue_items_pkey PRIMARY KEY (item_id);
|
||||
|
||||
|
||||
--
|
||||
@@ -279,6 +608,13 @@ ALTER TABLE ONLY public.info
|
||||
CREATE INDEX tstamp_idx ON public.real_time_inputs USING btree (tstamp DESC);
|
||||
|
||||
|
||||
--
|
||||
-- Name: info info_tg; Type: TRIGGER; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TRIGGER info_tg AFTER INSERT OR DELETE OR UPDATE ON public.info FOR EACH ROW EXECUTE FUNCTION public.notify('info');
|
||||
|
||||
|
||||
--
|
||||
-- Name: projects projects_tg; Type: TRIGGER; Schema: public; Owner: postgres
|
||||
--
|
||||
@@ -286,6 +622,20 @@ CREATE INDEX tstamp_idx ON public.real_time_inputs USING btree (tstamp DESC);
|
||||
CREATE TRIGGER projects_tg AFTER INSERT OR DELETE OR UPDATE ON public.projects FOR EACH ROW EXECUTE FUNCTION public.notify('project');
|
||||
|
||||
|
||||
--
|
||||
-- Name: queue_items queue_items_tg0; Type: TRIGGER; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TRIGGER queue_items_tg0 BEFORE INSERT OR UPDATE ON public.queue_items FOR EACH ROW EXECUTE FUNCTION public.update_timestamp();
|
||||
|
||||
|
||||
--
|
||||
-- Name: queue_items queue_items_tg1; Type: TRIGGER; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TRIGGER queue_items_tg1 AFTER INSERT OR DELETE OR UPDATE ON public.queue_items FOR EACH ROW EXECUTE FUNCTION public.notify('queue_items');
|
||||
|
||||
|
||||
--
|
||||
-- Name: real_time_inputs real_time_inputs_tg; Type: TRIGGER; Schema: public; Owner: postgres
|
||||
--
|
||||
@@ -294,10 +644,11 @@ CREATE TRIGGER real_time_inputs_tg AFTER INSERT ON public.real_time_inputs FOR E
|
||||
|
||||
|
||||
--
|
||||
-- Name: info info_tg; Type: TRIGGER; Schema: public; Owner: postgres
|
||||
-- Name: queue_items queue_items_parent_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TRIGGER info_tg AFTER INSERT OR DELETE OR UPDATE ON public.info FOR EACH ROW EXECUTE FUNCTION public.notify('info');
|
||||
ALTER TABLE ONLY public.queue_items
|
||||
ADD CONSTRAINT queue_items_parent_id_fkey FOREIGN KEY (parent_id) REFERENCES public.queue_items(item_id);
|
||||
|
||||
|
||||
--
|
||||
|
||||
5
etc/db/database-version.sql
Normal file
5
etc/db/database-version.sql
Normal file
@@ -0,0 +1,5 @@
|
||||
\connect dougal
|
||||
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.2"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.2"}' WHERE public.info.key = 'version';
|
||||
File diff suppressed because it is too large
Load Diff
189
etc/db/upgrades/upgrade11-v0.2.1-tstamp-functions.sql
Normal file
189
etc/db/upgrades/upgrade11-v0.2.1-tstamp-functions.sql
Normal file
@@ -0,0 +1,189 @@
|
||||
-- Add function to retrieve sequence/shotpoint from timestamps and vice-versa
|
||||
--
|
||||
-- New schema version: 0.2.1
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects the public schema.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- Two new functions are defined:
|
||||
--
|
||||
-- sequence_shot_from_tstamp(tstamp, [tolerance]) → sequence, point, delta
|
||||
--
|
||||
-- Returns a sequence + shotpoint if one falls within `tolerance` seconds
|
||||
-- of `tstamp`. The tolerance may be omitted in which case it defaults to
|
||||
-- three seconds. If multiple values match, it returns the closest in time.
|
||||
--
|
||||
-- tstamp_from_sequence_shot(sequence, point) → tstamp
|
||||
--
|
||||
-- Returns a timestamp given a sequence and point number.
|
||||
--
|
||||
-- NOTE: This last function must be called from a search path including a
|
||||
-- project schema, as it accesses the raw_shots table.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can take a while if run on a large database.
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
-- NOTE: This will lock the database while the transaction is active.
|
||||
--
|
||||
-- WARNING: Applying this upgrade drops the old tables. Ensure that you
|
||||
-- have migrated the data first.
|
||||
--
|
||||
-- NOTE: This is a patch version change so it does not require a
|
||||
-- backend restart.
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION tstamp_from_sequence_shot(
|
||||
IN s numeric,
|
||||
IN p numeric,
|
||||
OUT "ts" timestamptz)
|
||||
AS $inner$
|
||||
SELECT tstamp FROM raw_shots WHERE sequence = s AND point = p LIMIT 1;
|
||||
$inner$ LANGUAGE SQL;
|
||||
|
||||
|
||||
COMMENT ON FUNCTION tstamp_from_sequence_shot(numeric, numeric)
|
||||
IS 'Get the timestamp of an existing shotpoint.';
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION tstamp_interpolate(s numeric, p numeric) RETURNS timestamptz
|
||||
AS $inner$
|
||||
DECLARE
|
||||
ts0 timestamptz;
|
||||
ts1 timestamptz;
|
||||
pt0 numeric;
|
||||
pt1 numeric;
|
||||
BEGIN
|
||||
|
||||
SELECT tstamp, point
|
||||
INTO ts0, pt0
|
||||
FROM raw_shots
|
||||
WHERE sequence = s AND point < p
|
||||
ORDER BY point DESC LIMIT 1;
|
||||
|
||||
|
||||
SELECT tstamp, point
|
||||
INTO ts1, pt1
|
||||
FROM raw_shots
|
||||
WHERE sequence = s AND point > p
|
||||
ORDER BY point ASC LIMIT 1;
|
||||
|
||||
RETURN (ts1-ts0)/abs(pt1-pt0)*abs(p-pt0)+ts0;
|
||||
|
||||
END;
|
||||
$inner$ LANGUAGE PLPGSQL;
|
||||
|
||||
COMMENT ON FUNCTION tstamp_interpolate(numeric, numeric)
|
||||
IS 'Interpolate a timestamp given sequence and point values.
|
||||
|
||||
It will try to find the points immediately before and after in the sequence and interpolate into the gap, which may consist of multiple missed shots.
|
||||
|
||||
If called on an existing shotpoint it will return an interpolated timestamp as if the shotpoint did not exist, as opposed to returning its actual timestamp.
|
||||
|
||||
Returns NULL if it is not possible to interpolate.';
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.sequence_shot_from_tstamp(
|
||||
IN ts timestamptz,
|
||||
IN tolerance numeric,
|
||||
OUT "sequence" numeric,
|
||||
OUT "point" numeric,
|
||||
OUT "delta" numeric)
|
||||
AS $inner$
|
||||
SELECT
|
||||
(meta->>'_sequence')::numeric AS sequence,
|
||||
(meta->>'_point')::numeric AS point,
|
||||
extract('epoch' FROM (meta->>'tstamp')::timestamptz - ts ) AS delta
|
||||
FROM real_time_inputs
|
||||
WHERE
|
||||
meta ? '_sequence' AND
|
||||
abs(extract('epoch' FROM (meta->>'tstamp')::timestamptz - ts )) < tolerance
|
||||
ORDER BY abs(extract('epoch' FROM (meta->>'tstamp')::timestamptz - ts ))
|
||||
LIMIT 1;
|
||||
$inner$ LANGUAGE SQL;
|
||||
|
||||
|
||||
COMMENT ON FUNCTION public.sequence_shot_from_tstamp(timestamptz, numeric)
|
||||
IS 'Get sequence and shotpoint from timestamp.
|
||||
|
||||
Given a timestamp this function returns the closest shot to it within the given tolerance value.
|
||||
|
||||
This uses the `real_time_inputs` table and it does not give an indication of which project the shotpoint belongs to. It is assumed that a single project is being acquired at a given time.';
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.sequence_shot_from_tstamp(
|
||||
IN ts timestamptz,
|
||||
OUT "sequence" numeric,
|
||||
OUT "point" numeric,
|
||||
OUT "delta" numeric)
|
||||
AS $inner$
|
||||
SELECT * FROM public.sequence_shot_from_tstamp(ts, 3);
|
||||
$inner$ LANGUAGE SQL;
|
||||
|
||||
COMMENT ON FUNCTION public.sequence_shot_from_tstamp(timestamptz)
|
||||
IS 'Get sequence and shotpoint from timestamp.
|
||||
|
||||
Overloaded form in which the tolerance value is implied and defaults to three seconds.';
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
|
||||
CALL show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
|
||||
CALL show_notice('Updating db_schema version');
|
||||
|
||||
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.2.1"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.2.1"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -1,6 +1,6 @@
|
||||
-- Add new event log schema.
|
||||
--
|
||||
-- New schema version: 0.2.1
|
||||
-- New schema version: 0.2.2
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
@@ -156,6 +156,15 @@ BEGIN
|
||||
BEGIN
|
||||
IF (TG_OP = 'INSERT') THEN
|
||||
|
||||
-- Complete the tstamp if possible
|
||||
IF NEW.sequence IS NOT NULL AND NEW.point IS NOT NULL AND NEW.tstamp IS NULL THEN
|
||||
SELECT COALESCE(
|
||||
tstamp_from_sequence_shot(NEW.sequence, NEW.point),
|
||||
tstamp_interpolate(NEW.sequence, NEW.point)
|
||||
)
|
||||
INTO NEW.tstamp;
|
||||
END IF;
|
||||
|
||||
-- Any id that is provided will be ignored. The generated
|
||||
-- id will match uid.
|
||||
INSERT INTO event_log_full
|
||||
@@ -178,6 +187,17 @@ BEGIN
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
-- If the sequence / point has changed, and no new tstamp is provided, get one
|
||||
IF NEW.sequence <> OLD.sequence OR NEW.point <> OLD.point
|
||||
AND NEW.sequence IS NOT NULL AND NEW.point IS NOT NULL
|
||||
AND NEW.tstamp IS NULL OR NEW.tstamp = OLD.tstamp THEN
|
||||
SELECT COALESCE(
|
||||
tstamp_from_sequence_shot(NEW.sequence, NEW.point),
|
||||
tstamp_interpolate(NEW.sequence, NEW.point)
|
||||
)
|
||||
INTO NEW.tstamp;
|
||||
END IF;
|
||||
|
||||
UPDATE event_log_full
|
||||
SET validity = tstzrange(lower(validity), current_timestamp)
|
||||
WHERE validity @> current_timestamp AND id = OLD.id;
|
||||
@@ -231,6 +251,75 @@ BEGIN
|
||||
UPDATE event_log_full SET meta = meta - 'geometry' WHERE meta->>'geometry' IS NULL;
|
||||
UPDATE event_log_full SET meta = meta - 'readonly' WHERE (meta->'readonly')::boolean IS false;
|
||||
|
||||
|
||||
-- This function used the superseded `events` view.
|
||||
-- We need to drop it because we're changing the return type.
|
||||
DROP FUNCTION IF EXISTS label_in_sequence (_sequence integer, _label text);
|
||||
|
||||
CREATE OR REPLACE FUNCTION label_in_sequence (_sequence integer, _label text)
|
||||
RETURNS event_log
|
||||
LANGUAGE sql
|
||||
AS $inner$
|
||||
SELECT * FROM event_log WHERE sequence = _sequence AND _label = ANY(labels);
|
||||
$inner$;
|
||||
|
||||
-- This function used the superseded `events` view (and a strange logic).
|
||||
CREATE OR REPLACE PROCEDURE handle_final_line_events (_seq integer, _label text, _column text)
|
||||
LANGUAGE plpgsql
|
||||
AS $inner$
|
||||
|
||||
DECLARE
|
||||
_line final_lines_summary%ROWTYPE;
|
||||
_column_value integer;
|
||||
_tg_name text := 'final_line';
|
||||
_event event_log%ROWTYPE;
|
||||
event_id integer;
|
||||
BEGIN
|
||||
|
||||
SELECT * INTO _line FROM final_lines_summary WHERE sequence = _seq;
|
||||
_event := label_in_sequence(_seq, _label);
|
||||
_column_value := row_to_json(_line)->>_column;
|
||||
|
||||
--RAISE NOTICE '% is %', _label, _event;
|
||||
--RAISE NOTICE 'Line is %', _line;
|
||||
--RAISE NOTICE '% is % (%)', _column, _column_value, _label;
|
||||
|
||||
IF _event IS NULL THEN
|
||||
--RAISE NOTICE 'We will populate the event log from the sequence data';
|
||||
|
||||
INSERT INTO event_log (sequence, point, remarks, labels, meta)
|
||||
VALUES (
|
||||
-- The sequence
|
||||
_seq,
|
||||
-- The shotpoint
|
||||
_column_value,
|
||||
-- Remark. Something like "FSP <linename>"
|
||||
format('%s %s', _label, (SELECT meta->>'lineName' FROM final_lines WHERE sequence = _seq)),
|
||||
-- Label
|
||||
ARRAY[_label],
|
||||
-- Meta. Something like {"auto" : {"FSP" : "final_line"}}
|
||||
json_build_object('auto', json_build_object(_label, _tg_name))
|
||||
);
|
||||
|
||||
ELSE
|
||||
--RAISE NOTICE 'We may populate the sequence meta from the event log';
|
||||
--RAISE NOTICE 'Unless the event log was populated by us previously';
|
||||
--RAISE NOTICE 'Populated by us previously? %', _event.meta->'auto'->>_label = _tg_name;
|
||||
|
||||
IF _event.meta->'auto'->>_label IS DISTINCT FROM _tg_name THEN
|
||||
|
||||
--RAISE NOTICE 'Adding % found in events log to final_line meta', _label;
|
||||
UPDATE final_lines
|
||||
SET meta = jsonb_set(meta, ARRAY[_label], to_jsonb(_event.point))
|
||||
WHERE sequence = _seq;
|
||||
|
||||
END IF;
|
||||
|
||||
END IF;
|
||||
END;
|
||||
$inner$;
|
||||
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
@@ -258,9 +347,9 @@ CALL show_notice('Updating db_schema version');
|
||||
-- This is technically still compatible with 0.2.0 as we are only adding
|
||||
-- some more tables and views but not yet dropping the old ones, which we
|
||||
-- will do separately so that these scripts do not get too big.
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.2.1"}')
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.2.2"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.2.1"}' WHERE public.info.key = 'version';
|
||||
SET value = public.info.value || '{"db_schema": "0.2.2"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
136
etc/db/upgrades/upgrade15-v0.3.2-fix-project-summary.sql
Normal file
136
etc/db/upgrades/upgrade15-v0.3.2-fix-project-summary.sql
Normal file
@@ -0,0 +1,136 @@
|
||||
-- Fix project_summary view.
|
||||
--
|
||||
-- New schema version: 0.3.2
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This fixes a problem with the project_summary view. In its common table
|
||||
-- expression, the view definition tried to search public.projects based on
|
||||
-- the search path value with the following expression:
|
||||
--
|
||||
-- (current_setting('search_path'::text) ~~ (p.schema || '%'::text))
|
||||
--
|
||||
-- That is of course bound to fail as soon as the schema goes above `survey_9`
|
||||
-- because `survey_10 LIKE ('survey_1' || '%')` is TRUE.
|
||||
--
|
||||
-- The new mechanism relies on splitting the search_path.
|
||||
--
|
||||
-- NOTE: The survey schema needs to be the leftmost element in search_path.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE VIEW project_summary AS
|
||||
WITH fls AS (
|
||||
SELECT avg((final_lines_summary.duration / ((final_lines_summary.num_points - 1))::double precision)) AS shooting_rate,
|
||||
avg((final_lines_summary.length / date_part('epoch'::text, final_lines_summary.duration))) AS speed,
|
||||
sum(final_lines_summary.duration) AS prod_duration,
|
||||
sum(final_lines_summary.length) AS prod_distance
|
||||
FROM final_lines_summary
|
||||
), project AS (
|
||||
SELECT p.pid,
|
||||
p.name,
|
||||
p.schema
|
||||
FROM public.projects p
|
||||
WHERE (split_part(current_setting('search_path'::text), ','::text, 1) = p.schema)
|
||||
)
|
||||
SELECT project.pid,
|
||||
project.name,
|
||||
project.schema,
|
||||
( SELECT count(*) AS count
|
||||
FROM preplot_lines
|
||||
WHERE (preplot_lines.class = 'V'::bpchar)) AS lines,
|
||||
ps.total,
|
||||
ps.virgin,
|
||||
ps.prime,
|
||||
ps.other,
|
||||
ps.ntba,
|
||||
ps.remaining,
|
||||
( SELECT to_json(fs.*) AS to_json
|
||||
FROM final_shots fs
|
||||
ORDER BY fs.tstamp
|
||||
LIMIT 1) AS fsp,
|
||||
( SELECT to_json(fs.*) AS to_json
|
||||
FROM final_shots fs
|
||||
ORDER BY fs.tstamp DESC
|
||||
LIMIT 1) AS lsp,
|
||||
( SELECT count(*) AS count
|
||||
FROM raw_lines rl) AS seq_raw,
|
||||
( SELECT count(*) AS count
|
||||
FROM final_lines rl) AS seq_final,
|
||||
fls.prod_duration,
|
||||
fls.prod_distance,
|
||||
fls.speed AS shooting_rate
|
||||
FROM preplot_summary ps,
|
||||
fls,
|
||||
project;
|
||||
|
||||
|
||||
ALTER TABLE project_summary OWNER TO postgres;
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_15 () AS $$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade_15();
|
||||
|
||||
CALL show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade_15 ();
|
||||
|
||||
CALL show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.2"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.2"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
169
etc/db/upgrades/upgrade16-v0.3.3-fix-event-log-edit.sql
Normal file
169
etc/db/upgrades/upgrade16-v0.3.3-fix-event-log-edit.sql
Normal file
@@ -0,0 +1,169 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.3
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- The event_log_update() function that gets called when trying to update
|
||||
-- the event_log view will not work if the caller does provide a timestamp
|
||||
-- or sequence + point in the list of fields to be updated. See:
|
||||
-- https://gitlab.com/wgp/dougal/software/-/issues/198
|
||||
--
|
||||
-- This fixes the problem by liberally using COALESCE() to merge the OLD
|
||||
-- and NEW records.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_log_update() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $inner$
|
||||
BEGIN
|
||||
IF (TG_OP = 'INSERT') THEN
|
||||
|
||||
-- Complete the tstamp if possible
|
||||
IF NEW.sequence IS NOT NULL AND NEW.point IS NOT NULL AND NEW.tstamp IS NULL THEN
|
||||
SELECT COALESCE(
|
||||
tstamp_from_sequence_shot(NEW.sequence, NEW.point),
|
||||
tstamp_interpolate(NEW.sequence, NEW.point)
|
||||
)
|
||||
INTO NEW.tstamp;
|
||||
END IF;
|
||||
|
||||
-- Any id that is provided will be ignored. The generated
|
||||
-- id will match uid.
|
||||
INSERT INTO event_log_full
|
||||
(tstamp, sequence, point, remarks, labels, meta)
|
||||
VALUES (NEW.tstamp, NEW.sequence, NEW.point, NEW.remarks, NEW.labels, NEW.meta);
|
||||
|
||||
RETURN NEW;
|
||||
|
||||
ELSIF (TG_OP = 'UPDATE') THEN
|
||||
-- Set end of validity and create a new entry with id
|
||||
-- matching that of the old entry.
|
||||
|
||||
-- NOTE: Do not allow updating an event that has meta.readonly = true
|
||||
IF EXISTS
|
||||
(SELECT *
|
||||
FROM event_log_full
|
||||
WHERE id = OLD.id AND (meta->>'readonly')::boolean IS TRUE)
|
||||
THEN
|
||||
RAISE check_violation USING MESSAGE = 'Cannot modify read-only entry';
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
-- If the sequence / point has changed, and no new tstamp is provided, get one
|
||||
IF NEW.sequence <> OLD.sequence OR NEW.point <> OLD.point
|
||||
AND NEW.sequence IS NOT NULL AND NEW.point IS NOT NULL
|
||||
AND NEW.tstamp IS NULL OR NEW.tstamp = OLD.tstamp THEN
|
||||
SELECT COALESCE(
|
||||
tstamp_from_sequence_shot(NEW.sequence, NEW.point),
|
||||
tstamp_interpolate(NEW.sequence, NEW.point)
|
||||
)
|
||||
INTO NEW.tstamp;
|
||||
END IF;
|
||||
|
||||
UPDATE event_log_full
|
||||
SET validity = tstzrange(lower(validity), current_timestamp)
|
||||
WHERE validity @> current_timestamp AND id = OLD.id;
|
||||
|
||||
-- Any attempt to modify id will be ignored.
|
||||
INSERT INTO event_log_full
|
||||
(id, tstamp, sequence, point, remarks, labels, meta)
|
||||
VALUES (
|
||||
OLD.id,
|
||||
COALESCE(NEW.tstamp, OLD.tstamp),
|
||||
COALESCE(NEW.sequence, OLD.sequence),
|
||||
COALESCE(NEW.point, OLD.point),
|
||||
COALESCE(NEW.remarks, OLD.remarks),
|
||||
COALESCE(NEW.labels, OLD.labels),
|
||||
COALESCE(NEW.meta, OLD.meta)
|
||||
);
|
||||
|
||||
RETURN NEW;
|
||||
|
||||
ELSIF (TG_OP = 'DELETE') THEN
|
||||
-- Set end of validity.
|
||||
|
||||
-- NOTE: We *do* allow deleting an event that has meta.readonly = true
|
||||
-- This could be of interest if for instance we wanted to keep the history
|
||||
-- of QC results for a point, provided that the QC routines write to
|
||||
-- event_log and not event_log_full
|
||||
UPDATE event_log_full
|
||||
SET validity = tstzrange(lower(validity), current_timestamp)
|
||||
WHERE validity @> current_timestamp AND id = OLD.id;
|
||||
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
END;
|
||||
$inner$;
|
||||
|
||||
CREATE OR REPLACE TRIGGER event_log_tg INSTEAD OF INSERT OR DELETE OR UPDATE ON event_log FOR EACH ROW EXECUTE FUNCTION event_log_update();
|
||||
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_16 () AS $$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade_16();
|
||||
|
||||
CALL show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade_16 ();
|
||||
|
||||
CALL show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.3"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.3"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
163
etc/db/upgrades/upgrade17-v0.3.4-geometry-functions.sql
Normal file
163
etc/db/upgrades/upgrade17-v0.3.4-geometry-functions.sql
Normal file
@@ -0,0 +1,163 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.4
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This creates a new procedure augment_event_data() which tries to
|
||||
-- populate missing event_log data, namely timestamps and geometries.
|
||||
--
|
||||
-- To do this it also adds a function public.geometry_from_tstamp()
|
||||
-- which, given a timestamp, tries to fetch a geometry from real_time_inputs.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE PROCEDURE augment_event_data ()
|
||||
LANGUAGE sql
|
||||
AS $inner$
|
||||
-- Populate the timestamp of sequence / point events
|
||||
UPDATE event_log_full
|
||||
SET tstamp = tstamp_from_sequence_shot(sequence, point)
|
||||
WHERE
|
||||
tstamp IS NULL AND sequence IS NOT NULL AND point IS NOT NULL;
|
||||
|
||||
-- Populate the geometry of sequence / point events for which
|
||||
-- there is raw_shots data.
|
||||
UPDATE event_log_full
|
||||
SET meta = meta ||
|
||||
jsonb_build_object(
|
||||
'geometry',
|
||||
(
|
||||
SELECT st_transform(geometry, 4326)::jsonb
|
||||
FROM raw_shots rs
|
||||
WHERE rs.sequence = event_log_full.sequence AND rs.point = event_log_full.point
|
||||
)
|
||||
)
|
||||
WHERE
|
||||
sequence IS NOT NULL AND point IS NOT NULL AND
|
||||
NOT meta ? 'geometry';
|
||||
|
||||
-- Populate the geometry of time-based events
|
||||
UPDATE event_log_full e
|
||||
SET
|
||||
meta = meta || jsonb_build_object('geometry',
|
||||
(SELECT st_transform(g.geometry, 4326)::jsonb
|
||||
FROM geometry_from_tstamp(e.tstamp, 3) g))
|
||||
WHERE
|
||||
tstamp IS NOT NULL AND
|
||||
sequence IS NULL AND point IS NULL AND
|
||||
NOT meta ? 'geometry';
|
||||
|
||||
-- Get rid of null geometries
|
||||
UPDATE event_log_full
|
||||
SET
|
||||
meta = meta - 'geometry'
|
||||
WHERE
|
||||
jsonb_typeof(meta->'geometry') = 'null';
|
||||
|
||||
-- Simplify the GeoJSON when the CRS is EPSG:4326
|
||||
UPDATE event_log_full
|
||||
SET
|
||||
meta = meta #- '{geometry, crs}'
|
||||
WHERE
|
||||
meta->'geometry'->'crs'->'properties'->>'name' = 'EPSG:4326';
|
||||
|
||||
$inner$;
|
||||
|
||||
COMMENT ON PROCEDURE augment_event_data()
|
||||
IS 'Populate missing timestamps and geometries in event_log_full';
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_17 () AS $$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
CALL show_notice('Adding index to real_time_inputs.meta->tstamp');
|
||||
CREATE INDEX IF NOT EXISTS meta_tstamp_idx
|
||||
ON public.real_time_inputs
|
||||
USING btree ((meta->>'tstamp') DESC);
|
||||
|
||||
CALL show_notice('Creating function geometry_from_tstamp');
|
||||
CREATE OR REPLACE FUNCTION public.geometry_from_tstamp(
|
||||
IN ts timestamptz,
|
||||
IN tolerance numeric,
|
||||
OUT "geometry" geometry,
|
||||
OUT "delta" numeric)
|
||||
AS $inner$
|
||||
SELECT
|
||||
geometry,
|
||||
extract('epoch' FROM (meta->>'tstamp')::timestamptz - ts ) AS delta
|
||||
FROM real_time_inputs
|
||||
WHERE
|
||||
geometry IS NOT NULL AND
|
||||
abs(extract('epoch' FROM (meta->>'tstamp')::timestamptz - ts )) < tolerance
|
||||
ORDER BY abs(extract('epoch' FROM (meta->>'tstamp')::timestamptz - ts ))
|
||||
LIMIT 1;
|
||||
$inner$ LANGUAGE SQL;
|
||||
|
||||
COMMENT ON FUNCTION public.geometry_from_tstamp(timestamptz, numeric)
|
||||
IS 'Get geometry from timestamp';
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade_17();
|
||||
|
||||
CALL show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade_17 ();
|
||||
|
||||
CALL show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.4"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.4"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
158
etc/db/upgrades/upgrade18-v0.3.5-label_in_sequence-function.sql
Normal file
158
etc/db/upgrades/upgrade18-v0.3.5-label_in_sequence-function.sql
Normal file
@@ -0,0 +1,158 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.5
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- The function label_in_sequence(integer, text) was missing for the
|
||||
-- production schemas. This patch (re-)defines the function as well
|
||||
-- as other function that depend on it (otherwise it does not get
|
||||
-- picked up).
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION label_in_sequence(_sequence integer, _label text) RETURNS event_log
|
||||
LANGUAGE sql
|
||||
AS $inner$
|
||||
SELECT * FROM event_log WHERE sequence = _sequence AND _label = ANY(labels);
|
||||
$inner$;
|
||||
|
||||
-- We need to redefine the functions / procedures that call label_in_sequence
|
||||
|
||||
CREATE OR REPLACE PROCEDURE handle_final_line_events(IN _seq integer, IN _label text, IN _column text)
|
||||
LANGUAGE plpgsql
|
||||
AS $inner$
|
||||
|
||||
DECLARE
|
||||
_line final_lines_summary%ROWTYPE;
|
||||
_column_value integer;
|
||||
_tg_name text := 'final_line';
|
||||
_event event_log%ROWTYPE;
|
||||
event_id integer;
|
||||
BEGIN
|
||||
|
||||
SELECT * INTO _line FROM final_lines_summary WHERE sequence = _seq;
|
||||
_event := label_in_sequence(_seq, _label);
|
||||
_column_value := row_to_json(_line)->>_column;
|
||||
|
||||
--RAISE NOTICE '% is %', _label, _event;
|
||||
--RAISE NOTICE 'Line is %', _line;
|
||||
--RAISE NOTICE '% is % (%)', _column, _column_value, _label;
|
||||
|
||||
IF _event IS NULL THEN
|
||||
--RAISE NOTICE 'We will populate the event log from the sequence data';
|
||||
|
||||
INSERT INTO event_log (sequence, point, remarks, labels, meta)
|
||||
VALUES (
|
||||
-- The sequence
|
||||
_seq,
|
||||
-- The shotpoint
|
||||
_column_value,
|
||||
-- Remark. Something like "FSP <linename>"
|
||||
format('%s %s', _label, (SELECT meta->>'lineName' FROM final_lines WHERE sequence = _seq)),
|
||||
-- Label
|
||||
ARRAY[_label],
|
||||
-- Meta. Something like {"auto" : {"FSP" : "final_line"}}
|
||||
json_build_object('auto', json_build_object(_label, _tg_name))
|
||||
);
|
||||
|
||||
ELSE
|
||||
--RAISE NOTICE 'We may populate the sequence meta from the event log';
|
||||
--RAISE NOTICE 'Unless the event log was populated by us previously';
|
||||
--RAISE NOTICE 'Populated by us previously? %', _event.meta->'auto'->>_label = _tg_name;
|
||||
|
||||
IF _event.meta->'auto'->>_label IS DISTINCT FROM _tg_name THEN
|
||||
|
||||
--RAISE NOTICE 'Adding % found in events log to final_line meta', _label;
|
||||
UPDATE final_lines
|
||||
SET meta = jsonb_set(meta, ARRAY[_label], to_jsonb(_event.point))
|
||||
WHERE sequence = _seq;
|
||||
|
||||
END IF;
|
||||
|
||||
END IF;
|
||||
END;
|
||||
$inner$;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE final_line_post_import(IN _seq integer)
|
||||
LANGUAGE plpgsql
|
||||
AS $inner$
|
||||
BEGIN
|
||||
|
||||
CALL handle_final_line_events(_seq, 'FSP', 'fsp');
|
||||
CALL handle_final_line_events(_seq, 'FGSP', 'fsp');
|
||||
CALL handle_final_line_events(_seq, 'LGSP', 'lsp');
|
||||
CALL handle_final_line_events(_seq, 'LSP', 'lsp');
|
||||
|
||||
END;
|
||||
$inner$;
|
||||
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_18 () AS $$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade_18();
|
||||
|
||||
CALL show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade_18 ();
|
||||
|
||||
CALL show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.5"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.5"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
162
etc/db/upgrades/upgrade19-v0.3.6-optimise-geometry-functions.sql
Normal file
162
etc/db/upgrades/upgrade19-v0.3.6-optimise-geometry-functions.sql
Normal file
@@ -0,0 +1,162 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.6
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This optimises geometry_from_tstamp() by many orders of magnitude
|
||||
-- (issue #241). The redefinition of geometry_from_tstamp() necessitates
|
||||
-- redefining dependent functions.
|
||||
--
|
||||
-- We also drop the index on real_time_inputs.meta->'tstamp' as it is no
|
||||
-- longer used.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE PROCEDURE augment_event_data ()
|
||||
LANGUAGE sql
|
||||
AS $inner$
|
||||
-- Populate the timestamp of sequence / point events
|
||||
UPDATE event_log_full
|
||||
SET tstamp = tstamp_from_sequence_shot(sequence, point)
|
||||
WHERE
|
||||
tstamp IS NULL AND sequence IS NOT NULL AND point IS NOT NULL;
|
||||
|
||||
-- Populate the geometry of sequence / point events for which
|
||||
-- there is raw_shots data.
|
||||
UPDATE event_log_full
|
||||
SET meta = meta ||
|
||||
jsonb_build_object(
|
||||
'geometry',
|
||||
(
|
||||
SELECT st_transform(geometry, 4326)::jsonb
|
||||
FROM raw_shots rs
|
||||
WHERE rs.sequence = event_log_full.sequence AND rs.point = event_log_full.point
|
||||
)
|
||||
)
|
||||
WHERE
|
||||
sequence IS NOT NULL AND point IS NOT NULL AND
|
||||
NOT meta ? 'geometry';
|
||||
|
||||
-- Populate the geometry of time-based events
|
||||
UPDATE event_log_full e
|
||||
SET
|
||||
meta = meta || jsonb_build_object('geometry',
|
||||
(SELECT st_transform(g.geometry, 4326)::jsonb
|
||||
FROM geometry_from_tstamp(e.tstamp, 3) g))
|
||||
WHERE
|
||||
tstamp IS NOT NULL AND
|
||||
sequence IS NULL AND point IS NULL AND
|
||||
NOT meta ? 'geometry';
|
||||
|
||||
-- Get rid of null geometries
|
||||
UPDATE event_log_full
|
||||
SET
|
||||
meta = meta - 'geometry'
|
||||
WHERE
|
||||
jsonb_typeof(meta->'geometry') = 'null';
|
||||
|
||||
-- Simplify the GeoJSON when the CRS is EPSG:4326
|
||||
UPDATE event_log_full
|
||||
SET
|
||||
meta = meta #- '{geometry, crs}'
|
||||
WHERE
|
||||
meta->'geometry'->'crs'->'properties'->>'name' = 'EPSG:4326';
|
||||
|
||||
$inner$;
|
||||
|
||||
COMMENT ON PROCEDURE augment_event_data()
|
||||
IS 'Populate missing timestamps and geometries in event_log_full';
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
CALL show_notice('Dropping index from real_time_inputs.meta->tstamp');
|
||||
DROP INDEX IF EXISTS meta_tstamp_idx;
|
||||
|
||||
CALL show_notice('Creating function geometry_from_tstamp');
|
||||
CREATE OR REPLACE FUNCTION public.geometry_from_tstamp(
|
||||
IN ts timestamptz,
|
||||
IN tolerance numeric,
|
||||
OUT "geometry" geometry,
|
||||
OUT "delta" numeric)
|
||||
AS $inner$
|
||||
SELECT
|
||||
geometry,
|
||||
extract('epoch' FROM tstamp - ts ) AS delta
|
||||
FROM real_time_inputs
|
||||
WHERE
|
||||
geometry IS NOT NULL AND
|
||||
tstamp BETWEEN (ts - tolerance * interval '1 second') AND (ts + tolerance * interval '1 second')
|
||||
ORDER BY abs(extract('epoch' FROM tstamp - ts ))
|
||||
LIMIT 1;
|
||||
$inner$ LANGUAGE SQL;
|
||||
|
||||
COMMENT ON FUNCTION public.geometry_from_tstamp(timestamptz, numeric)
|
||||
IS 'Get geometry from timestamp';
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.6"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.6"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,254 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.7
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This updates the adjust_planner() procedure to take into account the
|
||||
-- new events schema (the `event` view has been replaced by `event_log`).
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CALL pg_temp.show_notice('Replacing adjust_planner() procedure');
|
||||
CREATE OR REPLACE PROCEDURE adjust_planner()
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
_planner_config jsonb;
|
||||
_planned_line planned_lines%ROWTYPE;
|
||||
_lag interval;
|
||||
_last_sequence sequences_summary%ROWTYPE;
|
||||
_deltatime interval;
|
||||
_shotinterval interval;
|
||||
_tstamp timestamptz;
|
||||
_incr integer;
|
||||
BEGIN
|
||||
|
||||
SET CONSTRAINTS planned_lines_pkey DEFERRED;
|
||||
|
||||
SELECT data->'planner'
|
||||
INTO _planner_config
|
||||
FROM file_data
|
||||
WHERE data ? 'planner';
|
||||
|
||||
SELECT *
|
||||
INTO _last_sequence
|
||||
FROM sequences_summary
|
||||
ORDER BY sequence DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT *
|
||||
INTO _planned_line
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
SELECT
|
||||
COALESCE(
|
||||
((lead(ts0) OVER (ORDER BY sequence)) - ts1),
|
||||
make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer)
|
||||
)
|
||||
INTO _lag
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
_incr = sign(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE '_planner_config: %', _planner_config;
|
||||
RAISE NOTICE '_last_sequence: %', _last_sequence;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
RAISE NOTICE '_incr: %', _incr;
|
||||
|
||||
-- Does the latest sequence match a planned sequence?
|
||||
IF _planned_line IS NULL THEN -- No it doesn't
|
||||
RAISE NOTICE 'Latest sequence shot does not match a planned sequence';
|
||||
SELECT * INTO _planned_line FROM planned_lines ORDER BY sequence ASC LIMIT 1;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
|
||||
IF _planned_line.sequence <= _last_sequence.sequence THEN
|
||||
RAISE NOTICE 'Renumbering the planned sequences starting from %', _planned_line.sequence + 1;
|
||||
-- Renumber the planned sequences starting from last shot sequence number + 1
|
||||
UPDATE planned_lines
|
||||
SET sequence = sequence + _last_sequence.sequence - _planned_line.sequence + 1;
|
||||
END IF;
|
||||
|
||||
-- The correction to make to the first planned line's ts0 will be based on either the last
|
||||
-- sequence's EOL + default line change time or the current time, whichever is later.
|
||||
_deltatime := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1) + make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer), current_timestamp) - _planned_line.ts0;
|
||||
|
||||
-- Is the first of the planned lines start time in the past? (±5 mins)
|
||||
IF _planned_line.ts0 < (current_timestamp - make_interval(mins => 5)) THEN
|
||||
RAISE NOTICE 'First planned line is in the past. Adjusting times by %', _deltatime;
|
||||
-- Adjust the start / end time of the planned lines by assuming that we are at
|
||||
-- `defaultLineChangeDuration` minutes away from SOL of the first planned line.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime;
|
||||
END IF;
|
||||
|
||||
ELSE -- Yes it does
|
||||
RAISE NOTICE 'Latest sequence does match a planned sequence: %, %', _planned_line.sequence, _planned_line.line;
|
||||
|
||||
-- Is it online?
|
||||
IF EXISTS(SELECT 1 FROM raw_lines_files WHERE sequence = _last_sequence.sequence AND hash = '*online*') THEN
|
||||
-- Yes it is
|
||||
RAISE NOTICE 'Sequence % is online', _last_sequence.sequence;
|
||||
|
||||
-- Let us get the SOL from the events log if we can
|
||||
RAISE NOTICE 'Trying to set fsp, ts0 from events log FSP, FGSP';
|
||||
WITH e AS (
|
||||
SELECT * FROM event_log
|
||||
WHERE
|
||||
sequence = _last_sequence.sequence
|
||||
AND ('FSP' = ANY(labels) OR 'FGSP' = ANY(labels))
|
||||
ORDER BY tstamp LIMIT 1
|
||||
)
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
fsp = COALESCE(e.point, fsp),
|
||||
ts0 = COALESCE(e.tstamp, ts0)
|
||||
FROM e
|
||||
WHERE planned_lines.sequence = _last_sequence.sequence;
|
||||
|
||||
-- Shot interval
|
||||
_shotinterval := (_last_sequence.ts1 - _last_sequence.ts0) / abs(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE 'Estimating EOL from current shot interval: %', _shotinterval;
|
||||
|
||||
SELECT (abs(lsp-fsp) * _shotinterval + ts0) - ts1
|
||||
INTO _deltatime
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
---- Set ts1 for the current sequence
|
||||
--UPDATE planned_lines
|
||||
--SET
|
||||
--ts1 = (abs(lsp-fsp) * _shotinterval) + ts0
|
||||
--WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Adjustment is %', _deltatime;
|
||||
|
||||
IF abs(EXTRACT(EPOCH FROM _deltatime)) < 8 THEN
|
||||
RAISE NOTICE 'Adjustment too small (< 8 s), so not applying it';
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- Adjust ts1 for the current sequence
|
||||
UPDATE planned_lines
|
||||
SET ts1 = ts1 + _deltatime
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
-- Now shift all sequences after
|
||||
UPDATE planned_lines
|
||||
SET ts0 = ts0 + _deltatime, ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences before %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence < _last_sequence.sequence;
|
||||
|
||||
ELSE
|
||||
-- No it isn't
|
||||
RAISE NOTICE 'Sequence % is offline', _last_sequence.sequence;
|
||||
|
||||
-- We were supposed to finish at _planned_line.ts1 but we finished at:
|
||||
_tstamp := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1), current_timestamp);
|
||||
-- WARNING Next line is for testing only
|
||||
--_tstamp := COALESCE(_last_sequence.ts1_final, _last_sequence.ts1);
|
||||
-- So we need to adjust timestamps by:
|
||||
_deltatime := _tstamp - _planned_line.ts1;
|
||||
|
||||
RAISE NOTICE 'Planned end: %, actual end: % (%, %)', _planned_line.ts1, _tstamp, _planned_line.sequence, _last_sequence.sequence;
|
||||
RAISE NOTICE 'Shifting times by % for sequences > %', _deltatime, _planned_line.sequence;
|
||||
-- NOTE: This won't work if sequences are not, err… sequential.
|
||||
-- NOTE: This has been known to happen in 2020.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _planned_line.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences up to %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence <= _last_sequence.sequence;
|
||||
|
||||
END IF;
|
||||
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.7"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.7"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
267
etc/db/upgrades/upgrade21-v0.3.8-add-event-data-functions.sql
Normal file
267
etc/db/upgrades/upgrade21-v0.3.8-add-event-data-functions.sql
Normal file
@@ -0,0 +1,267 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.8
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This adds event_position() and event_meta() functions which are used
|
||||
-- to retrieve position or metadata, respectively, given either a timestamp
|
||||
-- or a sequence / point pair. Intended to be used in the context of #229.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
--
|
||||
-- event_position(): Fetch event position
|
||||
--
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_position (
|
||||
tstamp timestamptz, sequence integer, point integer, tolerance numeric
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
DECLARE
|
||||
position geometry;
|
||||
BEGIN
|
||||
|
||||
-- Try and get position by sequence / point first
|
||||
IF sequence IS NOT NULL AND point IS NOT NULL THEN
|
||||
-- Try and get the position from final_shots or raw_shots
|
||||
SELECT COALESCE(f.geometry, r.geometry) geometry
|
||||
INTO position
|
||||
FROM raw_shots r LEFT JOIN final_shots f USING (sequence, point)
|
||||
WHERE r.sequence = event_position.sequence AND r.point = event_position.point;
|
||||
|
||||
IF position IS NOT NULL THEN
|
||||
RETURN position;
|
||||
ELSIF tstamp IS NULL THEN
|
||||
-- Get the timestamp for the sequence / point, if we can.
|
||||
-- It will be used later in the function as we fall back
|
||||
-- to timestamp based search.
|
||||
-- We also adjust the tolerance as we're now dealing with
|
||||
-- an exact timestamp.
|
||||
SELECT COALESCE(f.tstamp, r.tstamp) tstamp, 0.002 tolerance
|
||||
INTO tstamp, tolerance
|
||||
FROM raw_shots r LEFT JOIN final_shots f USING (sequence, point)
|
||||
WHERE r.sequence = event_position.sequence AND r.point = event_position.point;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- If we got here, we better have a timestamp
|
||||
-- First attempt, get a position from final_shots, raw_shots. This may
|
||||
-- be redundant if we got here from the position of having a sequence /
|
||||
-- point without a position, but never mind.
|
||||
SELECT COALESCE(f.geometry, r.geometry) geometry
|
||||
INTO position
|
||||
FROM raw_shots r LEFT JOIN final_shots f USING (sequence, point)
|
||||
WHERE r.tstamp = event_position.tstamp OR f.tstamp = event_position.tstamp
|
||||
LIMIT 1; -- Just to be sure
|
||||
|
||||
IF position IS NULL THEN
|
||||
-- Ok, so everything else so far has failed, let's try and get this
|
||||
-- from real time data. We skip the search via sequence / point and
|
||||
-- go directly for timestamp.
|
||||
SELECT geometry
|
||||
INTO position
|
||||
FROM geometry_from_tstamp(tstamp, tolerance);
|
||||
END IF;
|
||||
|
||||
RETURN position;
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_position (timestamptz, integer, integer, numeric) IS
|
||||
'Return the position associated with a sequence / point in the current project or
|
||||
with a given timestamp. Timestamp that is first searched for in the shot tables
|
||||
of the current prospect or, if not found, in the real-time data.
|
||||
|
||||
Returns a geometry.';
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_position (
|
||||
tstamp timestamptz, sequence integer, point integer
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_position(tstamp, sequence, point, 3);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_position (timestamptz, integer, integer) IS
|
||||
'Overload of event_position with a default tolerance of three seconds.';
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_position (
|
||||
tstamp timestamptz
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_position(tstamp, NULL, NULL);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_position (timestamptz) IS
|
||||
'Overload of event_position (timestamptz, integer, integer) for use when searching by timestamp.';
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_position (
|
||||
sequence integer, point integer
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_position(NULL, sequence, point);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_position (integer, integer) IS
|
||||
'Overload of event_position (timestamptz, integer, integer) for use when searching by sequence / point.';
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
--
|
||||
-- event_meta(): Fetch event metadata
|
||||
--
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_meta (
|
||||
tstamp timestamptz, sequence integer, point integer
|
||||
)
|
||||
RETURNS jsonb
|
||||
AS $$
|
||||
DECLARE
|
||||
result jsonb;
|
||||
-- Tolerance is hard-coded, at least until a need to expose arises.
|
||||
tolerance numeric;
|
||||
BEGIN
|
||||
tolerance := 3; -- seconds
|
||||
|
||||
-- We search by timestamp if we can, as that's a lot quicker
|
||||
IF tstamp IS NOT NULL THEN
|
||||
|
||||
SELECT meta
|
||||
INTO result
|
||||
FROM real_time_inputs rti
|
||||
WHERE
|
||||
rti.tstamp BETWEEN (event_meta.tstamp - tolerance * interval '1 second') AND (event_meta.tstamp + tolerance * interval '1 second')
|
||||
ORDER BY abs(extract('epoch' FROM rti.tstamp - event_meta.tstamp ))
|
||||
LIMIT 1;
|
||||
|
||||
ELSE
|
||||
|
||||
SELECT meta
|
||||
INTO result
|
||||
FROM real_time_inputs rti
|
||||
WHERE
|
||||
(meta->>'_sequence')::integer = event_meta.sequence AND
|
||||
(meta->>'_point')::integer = event_meta.point
|
||||
ORDER BY rti.tstamp DESC
|
||||
LIMIT 1;
|
||||
|
||||
END IF;
|
||||
|
||||
RETURN result;
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_meta (timestamptz, integer, integer) IS
|
||||
'Return the real-time event metadata associated with a sequence / point in the current project or
|
||||
with a given timestamp. Timestamp that is first searched for in the shot tables
|
||||
of the current prospect or, if not found, in the real-time data.
|
||||
|
||||
Returns a JSONB object.';
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_meta (
|
||||
tstamp timestamptz
|
||||
)
|
||||
RETURNS jsonb
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_meta(tstamp, NULL, NULL);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_meta (timestamptz) IS
|
||||
'Overload of event_meta (timestamptz, integer, integer) for use when searching by timestamp.';
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_meta (
|
||||
sequence integer, point integer
|
||||
)
|
||||
RETURNS jsonb
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_meta(NULL, sequence, point);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_meta (integer, integer) IS
|
||||
'Overload of event_meta (timestamptz, integer, integer) for use when searching by sequence / point.';
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.8"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.8"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,229 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.9
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This defines a replace_placeholders() function, taking as arguments
|
||||
-- a text string and either a timestamp or a sequence / point pair. It
|
||||
-- uses the latter arguments to find metadata from which it can extract
|
||||
-- relevant information and replace it into the text string wherever the
|
||||
-- appropriate placeholders appear. For instance, given a call such as
|
||||
-- replace_placeholders('The position is @POS@', NULL, 11, 2600) it will
|
||||
-- replace '@POS@' with the position of point 2600 in sequence 11, if it
|
||||
-- exists (or leave the placeholder untouched otherwise).
|
||||
--
|
||||
-- A scan_placeholders() procedure is also defined, which calls the above
|
||||
-- function on the entire event log.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION replace_placeholders (
|
||||
text_in text, tstamp timestamptz, sequence integer, point integer
|
||||
)
|
||||
RETURNS text
|
||||
AS $$
|
||||
DECLARE
|
||||
position geometry;
|
||||
metadata jsonb;
|
||||
text_out text;
|
||||
|
||||
json_query text;
|
||||
json_result jsonb;
|
||||
expect_recursion boolean := false;
|
||||
BEGIN
|
||||
|
||||
text_out := text_in;
|
||||
|
||||
-- We only get a position if we are going to need it…
|
||||
IF regexp_match(text_out, '@DMS@|@POS@|@DEG@') IS NOT NULL THEN
|
||||
position := ST_Transform(event_position(tstamp, sequence, point), 4326);
|
||||
END IF;
|
||||
|
||||
-- …and likewise with the metadata.
|
||||
IF regexp_match(text_out, '@BSP@|@WD@|@CMG@|@EN@|@GRID@|@(\$\..*?)@@') IS NOT NULL THEN
|
||||
metadata := event_meta(tstamp, sequence, point);
|
||||
END IF;
|
||||
|
||||
-- We shortcut the evaluation if neither of the above regexps matched
|
||||
IF position IS NULL AND metadata IS NULL THEN
|
||||
RETURN text_out;
|
||||
END IF;
|
||||
|
||||
IF position('@DMS@' IN text_out) != 0 THEN
|
||||
text_out := replace(text_out, '@DMS@', ST_AsLatLonText(position));
|
||||
END IF;
|
||||
|
||||
IF position('@POS@' IN text_out) != 0 THEN
|
||||
text_out := replace(text_out, '@POS@', replace(ST_AsLatLonText(position, 'D.DDDDDD'), ' ', ', '));
|
||||
END IF;
|
||||
|
||||
IF position('@DEG@' IN text_out) != 0 THEN
|
||||
text_out := replace(text_out, '@DEG@', replace(ST_AsLatLonText(position, 'D.DDDDDD'), ' ', ', '));
|
||||
END IF;
|
||||
|
||||
IF position('@EN@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'easting' AND metadata ? 'northing' THEN
|
||||
text_out := replace(text_out, '@EN@', (metadata->>'easting') || ', ' || (metadata->>'northing'));
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@GRID@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'easting' AND metadata ? 'northing' THEN
|
||||
text_out := replace(text_out, '@GRID@', (metadata->>'easting') || ', ' || (metadata->>'northing'));
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@CMG@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'bearing' THEN
|
||||
text_out := replace(text_out, '@CMG@', metadata->>'bearing');
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@BSP@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'speed' THEN
|
||||
text_out := replace(text_out, '@BSP@', round((metadata->>'speed')::numeric * 3600 / 1852, 1)::text);
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@WD@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'waterDepth' THEN
|
||||
text_out := replace(text_out, '@WD@', metadata->>'waterDepth');
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
json_query := (regexp_match(text_out, '@(\$\..*?)@@'))[1];
|
||||
IF json_query IS NOT NULL THEN
|
||||
json_result := jsonb_path_query_array(metadata, json_query::jsonpath);
|
||||
IF jsonb_array_length(json_result) = 1 THEN
|
||||
text_out := replace(text_out, '@'||json_query||'@@', json_result->>0);
|
||||
ELSE
|
||||
text_out := replace(text_out, '@'||json_query||'@@', json_result::text);
|
||||
END IF;
|
||||
-- There might be multiple JSONPath queries, so we may have to recurse
|
||||
expect_recursion := true;
|
||||
END IF;
|
||||
|
||||
IF expect_recursion IS TRUE AND text_in != text_out THEN
|
||||
--RAISE NOTICE 'Recursing %', text_out;
|
||||
-- We don't know if we have found all the JSONPath expression
|
||||
-- so we do another pass.
|
||||
RETURN replace_placeholders(text_out, tstamp, sequence, point);
|
||||
ELSE
|
||||
RETURN text_out;
|
||||
END IF;
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION replace_placeholders (text, timestamptz, integer, integer) IS
|
||||
'Replace certain placeholder strings in the input text with data obtained from shot or real-time data.';
|
||||
|
||||
|
||||
CREATE OR REPLACE PROCEDURE scan_placeholders ()
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
-- We update non read-only events via the event_log view to leave a trace
|
||||
-- of the fact that placeholders were replaced (and when).
|
||||
-- Note that this will not replace placeholders of old edits.
|
||||
UPDATE event_log
|
||||
SET remarks = replace_placeholders(remarks, tstamp, sequence, point)
|
||||
FROM (
|
||||
SELECT id
|
||||
FROM event_log e
|
||||
WHERE
|
||||
(meta->'readonly')::boolean IS NOT TRUE AND (
|
||||
regexp_match(remarks, '@DMS@|@POS@|@DEG@') IS NOT NULL OR
|
||||
regexp_match(remarks, '@BSP@|@WD@|@CMG@|@EN@|@GRID@|@(\$\..*?)@@') IS NOT NULL
|
||||
)
|
||||
) t
|
||||
WHERE event_log.id = t.id;
|
||||
|
||||
-- And then we update read-only events directly on the event_log_full table
|
||||
-- (as of this version of the schema we're prevented from updating read-only
|
||||
-- events via event_log anyway).
|
||||
UPDATE event_log_full
|
||||
SET remarks = replace_placeholders(remarks, tstamp, sequence, point)
|
||||
FROM (
|
||||
SELECT uid
|
||||
FROM event_log_full e
|
||||
WHERE
|
||||
(meta->'readonly')::boolean IS TRUE AND (
|
||||
regexp_match(remarks, '@DMS@|@POS@|@DEG@') IS NOT NULL OR
|
||||
regexp_match(remarks, '@BSP@|@WD@|@CMG@|@EN@|@GRID@|@(\$\..*?)@@') IS NOT NULL
|
||||
)
|
||||
) t
|
||||
WHERE event_log_full.uid = t.uid;
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE scan_placeholders () IS
|
||||
'Run replace_placeholders() on the entire event log.';
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.9"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.9"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,127 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.10
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects only the public schema.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This defines a interpolate_geometry_from_tstamp(), taking a timestamp
|
||||
-- and a maximum timespan in seconds. It will then interpolate a position
|
||||
-- at the exact timestamp based on data from real_time_inputs, provided
|
||||
-- that the effective interpolation timespan does not exceed the maximum
|
||||
-- requested.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
BEGIN
|
||||
|
||||
CALL pg_temp.show_notice('Defining interpolate_geometry_from_tstamp()');
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.interpolate_geometry_from_tstamp(
|
||||
IN ts timestamptz,
|
||||
IN maxspan numeric
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
DECLARE
|
||||
ts0 timestamptz;
|
||||
ts1 timestamptz;
|
||||
geom0 geometry;
|
||||
geom1 geometry;
|
||||
span numeric;
|
||||
fraction numeric;
|
||||
BEGIN
|
||||
|
||||
SELECT tstamp, geometry
|
||||
INTO ts0, geom0
|
||||
FROM real_time_inputs
|
||||
WHERE tstamp <= ts
|
||||
ORDER BY tstamp DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT tstamp, geometry
|
||||
INTO ts1, geom1
|
||||
FROM real_time_inputs
|
||||
WHERE tstamp >= ts
|
||||
ORDER BY tstamp ASC
|
||||
LIMIT 1;
|
||||
|
||||
IF geom0 IS NULL OR geom1 IS NULL THEN
|
||||
RAISE NOTICE 'Interpolation failed (no straddling data)';
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
-- See if we got an exact match
|
||||
IF ts0 = ts THEN
|
||||
RETURN geom0;
|
||||
ELSIF ts1 = ts THEN
|
||||
RETURN geom1;
|
||||
END IF;
|
||||
|
||||
span := extract('epoch' FROM ts1 - ts0);
|
||||
|
||||
IF span > maxspan THEN
|
||||
RAISE NOTICE 'Interpolation timespan % outside maximum requested (%)', span, maxspan;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
fraction := extract('epoch' FROM ts - ts0) / span;
|
||||
|
||||
IF fraction < 0 OR fraction > 1 THEN
|
||||
RAISE NOTICE 'Requested timestamp % outside of interpolation span (fraction: %)', ts, fraction;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
RETURN ST_LineInterpolatePoint(St_MakeLine(geom0, geom1), fraction);
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION public.interpolate_geometry_from_tstamp(timestamptz, numeric) IS
|
||||
'Interpolate a position over a given maximum timespan (in seconds)
|
||||
based on real-time inputs. Returns a POINT geometry.';
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.10"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.10"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,149 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.11
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This redefines augment_event_data() to use interpolation rather than
|
||||
-- nearest neighbour. It now takes an argument indicating the maximum
|
||||
-- allowed interpolation timespan. An overload with a default of ten
|
||||
-- minutes is also provided, as an in situ replacement for the previous
|
||||
-- version.
|
||||
--
|
||||
-- The ten minute default is based on Triggerfish headers behaviour seen
|
||||
-- on crew 248 during soft starts.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE PROCEDURE augment_event_data (maxspan numeric)
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
-- Populate the timestamp of sequence / point events
|
||||
UPDATE event_log_full
|
||||
SET tstamp = tstamp_from_sequence_shot(sequence, point)
|
||||
WHERE
|
||||
tstamp IS NULL AND sequence IS NOT NULL AND point IS NOT NULL;
|
||||
|
||||
-- Populate the geometry of sequence / point events for which
|
||||
-- there is raw_shots data.
|
||||
UPDATE event_log_full
|
||||
SET meta = meta ||
|
||||
jsonb_build_object(
|
||||
'geometry',
|
||||
(
|
||||
SELECT st_transform(geometry, 4326)::jsonb
|
||||
FROM raw_shots rs
|
||||
WHERE rs.sequence = event_log_full.sequence AND rs.point = event_log_full.point
|
||||
)
|
||||
)
|
||||
WHERE
|
||||
sequence IS NOT NULL AND point IS NOT NULL AND
|
||||
NOT meta ? 'geometry';
|
||||
|
||||
-- Populate the geometry of time-based events
|
||||
UPDATE event_log_full e
|
||||
SET
|
||||
meta = meta || jsonb_build_object('geometry',
|
||||
(SELECT st_transform(g.geometry, 4326)::jsonb
|
||||
FROM interpolate_geometry_from_tstamp(e.tstamp, maxspan) g))
|
||||
WHERE
|
||||
tstamp IS NOT NULL AND
|
||||
sequence IS NULL AND point IS NULL AND
|
||||
NOT meta ? 'geometry';
|
||||
|
||||
-- Get rid of null geometries
|
||||
UPDATE event_log_full
|
||||
SET
|
||||
meta = meta - 'geometry'
|
||||
WHERE
|
||||
jsonb_typeof(meta->'geometry') = 'null';
|
||||
|
||||
-- Simplify the GeoJSON when the CRS is EPSG:4326
|
||||
UPDATE event_log_full
|
||||
SET
|
||||
meta = meta #- '{geometry, crs}'
|
||||
WHERE
|
||||
meta->'geometry'->'crs'->'properties'->>'name' = 'EPSG:4326';
|
||||
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE augment_event_data(numeric)
|
||||
IS 'Populate missing timestamps and geometries in event_log_full';
|
||||
|
||||
CREATE OR REPLACE PROCEDURE augment_event_data ()
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
CALL augment_event_data(600);
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE augment_event_data()
|
||||
IS 'Overload of augment_event_data(maxspan numeric) with a maxspan value of 600 seconds.';
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.11"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.11"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,193 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.12
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This defines a midnight_shots view and a log_midnight_shots() procedure
|
||||
-- (with some overloads). The view returns all points straddling midnight
|
||||
-- UTC and belonging to the same sequence (so last shot of the day and
|
||||
-- first shot of the next day).
|
||||
--
|
||||
-- The procedure inserts the corresponding events (optionally constrained
|
||||
-- by an earliest and a latest date) in the event log, unless the events
|
||||
-- already exist.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE VIEW midnight_shots AS
|
||||
WITH straddlers AS (
|
||||
-- Get sequence numbers straddling midnight UTC
|
||||
SELECT sequence
|
||||
FROM final_shots
|
||||
GROUP BY sequence
|
||||
HAVING min(date(tstamp)) != max(date(tstamp))
|
||||
),
|
||||
ts AS (
|
||||
-- Get earliest and latest timestamps for each day
|
||||
-- for each of the above sequences.
|
||||
-- This will return the timestamps for:
|
||||
-- FSP, LDSP, FDSP, LSP.
|
||||
SELECT
|
||||
fs.sequence,
|
||||
min(fs.tstamp) AS ts0,
|
||||
max(fs.tstamp) AS ts1
|
||||
FROM final_shots fs INNER JOIN straddlers USING (sequence)
|
||||
GROUP BY fs.sequence, (date(fs.tstamp))
|
||||
ORDER BY fs.sequence, date(fs.tstamp)
|
||||
),
|
||||
spts AS (
|
||||
-- Filter out FSP, LSP from the above.
|
||||
-- NOTE: This *should* in theory be able to cope with
|
||||
-- a sequence longer than 24 hours (so with more than
|
||||
-- one LDSP, FDSP) but that hasn't been tested.
|
||||
SELECT DISTINCT
|
||||
sequence,
|
||||
min(ts1) OVER (PARTITION BY sequence) ldsp,
|
||||
max(ts0) OVER (PARTITION BY sequence) fdsp
|
||||
FROM ts
|
||||
ORDER BY sequence
|
||||
), evt AS (
|
||||
SELECT
|
||||
fs.tstamp,
|
||||
fs.sequence,
|
||||
point,
|
||||
'Last shotpoint of the day' remarks,
|
||||
'{LDSP}'::text[] labels
|
||||
FROM final_shots fs
|
||||
INNER JOIN spts ON fs.sequence = spts.sequence AND fs.tstamp = spts.ldsp
|
||||
UNION SELECT
|
||||
fs.tstamp,
|
||||
fs.sequence,
|
||||
point,
|
||||
'First shotpoint of the day' remarks,
|
||||
'{FDSP}'::text[] labels
|
||||
FROM final_shots fs
|
||||
INNER JOIN spts ON fs.sequence = spts.sequence AND fs.tstamp = spts.fdsp
|
||||
ORDER BY tstamp
|
||||
)
|
||||
SELECT * FROM evt;
|
||||
|
||||
|
||||
CREATE OR REPLACE PROCEDURE log_midnight_shots (dt0 date, dt1 date)
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
INSERT INTO event_log (sequence, point, remarks, labels, meta)
|
||||
SELECT
|
||||
sequence, point, remarks, labels,
|
||||
'{"auto": true, "insertedBy": "log_midnight_shots"}'::jsonb
|
||||
FROM midnight_shots ms
|
||||
WHERE
|
||||
(dt0 IS NULL OR ms.tstamp >= dt0) AND
|
||||
(dt1 IS NULL OR ms.tstamp <= dt1) AND
|
||||
NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM event_log el
|
||||
WHERE ms.sequence = el.sequence AND ms.point = el.point AND el.labels @> ms.labels
|
||||
);
|
||||
|
||||
-- Delete any midnight shots that might have been inserted in the log
|
||||
-- but are no longer relevant according to the final_shots data.
|
||||
-- We operate on event_log, so the deletion is traceable.
|
||||
DELETE
|
||||
FROM event_log
|
||||
WHERE id IN (
|
||||
SELECT id
|
||||
FROM event_log el
|
||||
LEFT JOIN midnight_shots ms USING (sequence, point)
|
||||
WHERE
|
||||
'{LDSP,FDSP}'::text[] && el.labels -- &&: Do the arrays overlap?
|
||||
AND ms.sequence IS NULL
|
||||
);
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE log_midnight_shots (date, date)
|
||||
IS 'Add midnight shots between two dates dt0 and dt1 to the event_log, unless the events already exist.';
|
||||
|
||||
|
||||
CREATE OR REPLACE PROCEDURE log_midnight_shots (dt0 date)
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
CALL log_midnight_shots(dt0, NULL);
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE log_midnight_shots (date)
|
||||
IS 'Overload taking only a dt0 (adds events on that date or after).';
|
||||
|
||||
CREATE OR REPLACE PROCEDURE log_midnight_shots ()
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
CALL log_midnight_shots(NULL, NULL);
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE log_midnight_shots ()
|
||||
IS 'Overload taking no arguments (adds all missing events).';
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.12"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.12"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
162
etc/db/upgrades/upgrade26-v0.3.13-fix-missing-shots-summary.sql
Normal file
162
etc/db/upgrades/upgrade26-v0.3.13-fix-missing-shots-summary.sql
Normal file
@@ -0,0 +1,162 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.3.13
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- Fixes a bug in the `final_lines_summary` and `raw_lines_summary` views
|
||||
-- which results in the number of missing shots being miscounted on jobs
|
||||
-- using three sources.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
|
||||
CREATE OR REPLACE VIEW raw_lines_summary AS
|
||||
WITH summary AS (
|
||||
SELECT DISTINCT rs.sequence,
|
||||
first_value(rs.point) OVER w AS fsp,
|
||||
last_value(rs.point) OVER w AS lsp,
|
||||
first_value(rs.tstamp) OVER w AS ts0,
|
||||
last_value(rs.tstamp) OVER w AS ts1,
|
||||
count(rs.point) OVER w AS num_points,
|
||||
count(pp.point) OVER w AS num_preplots,
|
||||
public.st_distance(first_value(rs.geometry) OVER w, last_value(rs.geometry) OVER w) AS length,
|
||||
((public.st_azimuth(first_value(rs.geometry) OVER w, last_value(rs.geometry) OVER w) * (180)::double precision) / pi()) AS azimuth
|
||||
FROM (raw_shots rs
|
||||
LEFT JOIN preplot_points pp USING (line, point))
|
||||
WINDOW w AS (PARTITION BY rs.sequence ORDER BY rs.tstamp ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
|
||||
)
|
||||
SELECT rl.sequence,
|
||||
rl.line,
|
||||
s.fsp,
|
||||
s.lsp,
|
||||
s.ts0,
|
||||
s.ts1,
|
||||
(s.ts1 - s.ts0) AS duration,
|
||||
s.num_points,
|
||||
s.num_preplots,
|
||||
(SELECT count(*) AS count
|
||||
FROM missing_sequence_raw_points
|
||||
WHERE missing_sequence_raw_points.sequence = s.sequence) AS missing_shots,
|
||||
s.length,
|
||||
s.azimuth,
|
||||
rl.remarks,
|
||||
rl.ntbp,
|
||||
rl.meta
|
||||
FROM (summary s
|
||||
JOIN raw_lines rl USING (sequence));
|
||||
|
||||
|
||||
CREATE OR REPLACE VIEW final_lines_summary AS
|
||||
WITH summary AS (
|
||||
SELECT DISTINCT fs.sequence,
|
||||
first_value(fs.point) OVER w AS fsp,
|
||||
last_value(fs.point) OVER w AS lsp,
|
||||
first_value(fs.tstamp) OVER w AS ts0,
|
||||
last_value(fs.tstamp) OVER w AS ts1,
|
||||
count(fs.point) OVER w AS num_points,
|
||||
public.st_distance(first_value(fs.geometry) OVER w, last_value(fs.geometry) OVER w) AS length,
|
||||
((public.st_azimuth(first_value(fs.geometry) OVER w, last_value(fs.geometry) OVER w) * (180)::double precision) / pi()) AS azimuth
|
||||
FROM final_shots fs
|
||||
WINDOW w AS (PARTITION BY fs.sequence ORDER BY fs.tstamp ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
|
||||
)
|
||||
SELECT fl.sequence,
|
||||
fl.line,
|
||||
s.fsp,
|
||||
s.lsp,
|
||||
s.ts0,
|
||||
s.ts1,
|
||||
(s.ts1 - s.ts0) AS duration,
|
||||
s.num_points,
|
||||
( SELECT count(*) AS count
|
||||
FROM missing_sequence_final_points
|
||||
WHERE missing_sequence_final_points.sequence = s.sequence) AS missing_shots,
|
||||
s.length,
|
||||
s.azimuth,
|
||||
fl.remarks,
|
||||
fl.meta
|
||||
FROM (summary s
|
||||
JOIN final_lines fl USING (sequence));
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.3.13' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.3.12' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.13"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.13"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,122 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.4.0
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This adapts the schema to the change in how project configurations are
|
||||
-- handled (https://gitlab.com/wgp/dougal/software/-/merge_requests/29)
|
||||
-- by creating a project_configuration() function which returns the
|
||||
-- current project's configuration data.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION project_configuration()
|
||||
RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
schema_name text;
|
||||
configuration jsonb;
|
||||
BEGIN
|
||||
|
||||
SELECT nspname
|
||||
INTO schema_name
|
||||
FROM pg_namespace
|
||||
WHERE oid = (
|
||||
SELECT pronamespace
|
||||
FROM pg_proc
|
||||
WHERE oid = 'project_configuration'::regproc::oid
|
||||
);
|
||||
|
||||
SELECT meta
|
||||
INTO configuration
|
||||
FROM public.projects
|
||||
WHERE schema = schema_name;
|
||||
|
||||
RETURN configuration;
|
||||
END
|
||||
$$;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.3.12' AND current_db_version != '0.3.13' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.0"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.0"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,264 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.4.1
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This modifies adjust_planner() to use project_configuration()
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
|
||||
CREATE OR REPLACE PROCEDURE adjust_planner()
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
_planner_config jsonb;
|
||||
_planned_line planned_lines%ROWTYPE;
|
||||
_lag interval;
|
||||
_last_sequence sequences_summary%ROWTYPE;
|
||||
_deltatime interval;
|
||||
_shotinterval interval;
|
||||
_tstamp timestamptz;
|
||||
_incr integer;
|
||||
BEGIN
|
||||
|
||||
SET CONSTRAINTS planned_lines_pkey DEFERRED;
|
||||
|
||||
SELECT project_configuration()->'planner'
|
||||
INTO _planner_config;
|
||||
|
||||
SELECT *
|
||||
INTO _last_sequence
|
||||
FROM sequences_summary
|
||||
ORDER BY sequence DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT *
|
||||
INTO _planned_line
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
SELECT
|
||||
COALESCE(
|
||||
((lead(ts0) OVER (ORDER BY sequence)) - ts1),
|
||||
make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer)
|
||||
)
|
||||
INTO _lag
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
_incr = sign(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE '_planner_config: %', _planner_config;
|
||||
RAISE NOTICE '_last_sequence: %', _last_sequence;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
RAISE NOTICE '_incr: %', _incr;
|
||||
|
||||
-- Does the latest sequence match a planned sequence?
|
||||
IF _planned_line IS NULL THEN -- No it doesn't
|
||||
RAISE NOTICE 'Latest sequence shot does not match a planned sequence';
|
||||
SELECT * INTO _planned_line FROM planned_lines ORDER BY sequence ASC LIMIT 1;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
|
||||
IF _planned_line.sequence <= _last_sequence.sequence THEN
|
||||
RAISE NOTICE 'Renumbering the planned sequences starting from %', _planned_line.sequence + 1;
|
||||
-- Renumber the planned sequences starting from last shot sequence number + 1
|
||||
UPDATE planned_lines
|
||||
SET sequence = sequence + _last_sequence.sequence - _planned_line.sequence + 1;
|
||||
END IF;
|
||||
|
||||
-- The correction to make to the first planned line's ts0 will be based on either the last
|
||||
-- sequence's EOL + default line change time or the current time, whichever is later.
|
||||
_deltatime := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1) + make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer), current_timestamp) - _planned_line.ts0;
|
||||
|
||||
-- Is the first of the planned lines start time in the past? (±5 mins)
|
||||
IF _planned_line.ts0 < (current_timestamp - make_interval(mins => 5)) THEN
|
||||
RAISE NOTICE 'First planned line is in the past. Adjusting times by %', _deltatime;
|
||||
-- Adjust the start / end time of the planned lines by assuming that we are at
|
||||
-- `defaultLineChangeDuration` minutes away from SOL of the first planned line.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime;
|
||||
END IF;
|
||||
|
||||
ELSE -- Yes it does
|
||||
RAISE NOTICE 'Latest sequence does match a planned sequence: %, %', _planned_line.sequence, _planned_line.line;
|
||||
|
||||
-- Is it online?
|
||||
IF EXISTS(SELECT 1 FROM raw_lines_files WHERE sequence = _last_sequence.sequence AND hash = '*online*') THEN
|
||||
-- Yes it is
|
||||
RAISE NOTICE 'Sequence % is online', _last_sequence.sequence;
|
||||
|
||||
-- Let us get the SOL from the events log if we can
|
||||
RAISE NOTICE 'Trying to set fsp, ts0 from events log FSP, FGSP';
|
||||
WITH e AS (
|
||||
SELECT * FROM event_log
|
||||
WHERE
|
||||
sequence = _last_sequence.sequence
|
||||
AND ('FSP' = ANY(labels) OR 'FGSP' = ANY(labels))
|
||||
ORDER BY tstamp LIMIT 1
|
||||
)
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
fsp = COALESCE(e.point, fsp),
|
||||
ts0 = COALESCE(e.tstamp, ts0)
|
||||
FROM e
|
||||
WHERE planned_lines.sequence = _last_sequence.sequence;
|
||||
|
||||
-- Shot interval
|
||||
_shotinterval := (_last_sequence.ts1 - _last_sequence.ts0) / abs(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE 'Estimating EOL from current shot interval: %', _shotinterval;
|
||||
|
||||
SELECT (abs(lsp-fsp) * _shotinterval + ts0) - ts1
|
||||
INTO _deltatime
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
---- Set ts1 for the current sequence
|
||||
--UPDATE planned_lines
|
||||
--SET
|
||||
--ts1 = (abs(lsp-fsp) * _shotinterval) + ts0
|
||||
--WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Adjustment is %', _deltatime;
|
||||
|
||||
IF abs(EXTRACT(EPOCH FROM _deltatime)) < 8 THEN
|
||||
RAISE NOTICE 'Adjustment too small (< 8 s), so not applying it';
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- Adjust ts1 for the current sequence
|
||||
UPDATE planned_lines
|
||||
SET ts1 = ts1 + _deltatime
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
-- Now shift all sequences after
|
||||
UPDATE planned_lines
|
||||
SET ts0 = ts0 + _deltatime, ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences before %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence < _last_sequence.sequence;
|
||||
|
||||
ELSE
|
||||
-- No it isn't
|
||||
RAISE NOTICE 'Sequence % is offline', _last_sequence.sequence;
|
||||
|
||||
-- We were supposed to finish at _planned_line.ts1 but we finished at:
|
||||
_tstamp := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1), current_timestamp);
|
||||
-- WARNING Next line is for testing only
|
||||
--_tstamp := COALESCE(_last_sequence.ts1_final, _last_sequence.ts1);
|
||||
-- So we need to adjust timestamps by:
|
||||
_deltatime := _tstamp - _planned_line.ts1;
|
||||
|
||||
RAISE NOTICE 'Planned end: %, actual end: % (%, %)', _planned_line.ts1, _tstamp, _planned_line.sequence, _last_sequence.sequence;
|
||||
RAISE NOTICE 'Shifting times by % for sequences > %', _deltatime, _planned_line.sequence;
|
||||
-- NOTE: This won't work if sequences are not, err… sequential.
|
||||
-- NOTE: This has been known to happen in 2020.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _planned_line.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences up to %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence <= _last_sequence.sequence;
|
||||
|
||||
END IF;
|
||||
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.1"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.1"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,98 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.4.2
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This modifies binning_parameters() to use project_configuration()
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION binning_parameters() RETURNS jsonb
|
||||
LANGUAGE sql STABLE LEAKPROOF PARALLEL SAFE
|
||||
AS $$
|
||||
SELECT project_configuration()->'binning' binning;
|
||||
$$;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.2"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.2"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
164
etc/db/upgrades/upgrade30-v0.4.3-large-notification-payloads.sql
Normal file
164
etc/db/upgrades/upgrade30-v0.4.3-large-notification-payloads.sql
Normal file
@@ -0,0 +1,164 @@
|
||||
-- Support notification payloads larger than Postgres' NOTIFY limit.
|
||||
--
|
||||
-- New schema version: 0.4.3
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects the public schema only.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This creates a new table where large notification payloads are stored
|
||||
-- temporarily and from which they might be recalled by the notification
|
||||
-- listeners. It also creates a purge_notifications() procedure used to
|
||||
-- clean up old notifications from the notifications log and finally,
|
||||
-- modifies notify() to support these changes. When a large payload is
|
||||
-- encountered, the payload is stored in the notify_payloads table and
|
||||
-- a trimmed down version containing a notification_id is sent to listeners
|
||||
-- instead. Listeners can then query notify_payloads to retrieve the full
|
||||
-- payloads. It is the application layer's responsibility to delete old
|
||||
-- notifications.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_schema () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating public schema';
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO public');
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.notify_payloads (
|
||||
id SERIAL,
|
||||
tstamp timestamptz NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
payload text NOT NULL DEFAULT '',
|
||||
PRIMARY KEY (id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS notify_payload_tstamp ON notify_payloads (tstamp);
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.notify() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
channel text := TG_ARGV[0];
|
||||
pid text;
|
||||
payload text;
|
||||
notification text;
|
||||
payload_id integer;
|
||||
BEGIN
|
||||
|
||||
SELECT projects.pid INTO pid FROM projects WHERE schema = TG_TABLE_SCHEMA;
|
||||
|
||||
payload := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'old', row_to_json(OLD),
|
||||
'new', row_to_json(NEW),
|
||||
'pid', pid
|
||||
)::text;
|
||||
|
||||
IF octet_length(payload) < 1000 THEN
|
||||
PERFORM pg_notify(channel, payload);
|
||||
ELSE
|
||||
-- We need to find another solution
|
||||
-- FIXME Consider storing the payload in a temporary memory table,
|
||||
-- referenced by some form of autogenerated ID. Then send the ID
|
||||
-- as the payload and then it's up to the user to fetch the original
|
||||
-- payload if interested. This needs a mechanism to expire older payloads
|
||||
-- in the interest of conserving memory.
|
||||
|
||||
INSERT INTO notify_payloads (payload) VALUES (payload) RETURNING id INTO payload_id;
|
||||
|
||||
notification := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'pid', pid,
|
||||
'payload_id', payload_id
|
||||
)::text;
|
||||
|
||||
PERFORM pg_notify(channel, notification);
|
||||
RAISE INFO 'Payload over limit';
|
||||
END IF;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$;
|
||||
|
||||
CREATE PROCEDURE public.purge_notifications (age_seconds numeric DEFAULT 120) AS $$
|
||||
DELETE FROM notify_payloads WHERE EXTRACT(epoch FROM CURRENT_TIMESTAMP - tstamp) > age_seconds;
|
||||
$$ LANGUAGE sql;
|
||||
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
-- This upgrade modified the `public` schema only, not individual
|
||||
-- project schemas.
|
||||
CALL pg_temp.upgrade_schema();
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_schema ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.3"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.3"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,104 @@
|
||||
-- Add event_log_changes function
|
||||
--
|
||||
-- New schema version: 0.4.4
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This adds a function event_log_changes which returns the subset of
|
||||
-- events from event_log_full which have been modified on or after a
|
||||
-- given timestamp.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_log_changes(ts0 timestamptz)
|
||||
RETURNS SETOF event_log_full
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
SELECT *
|
||||
FROM event_log_full
|
||||
WHERE lower(validity) > ts0 OR upper(validity) IS NOT NULL AND upper(validity) > ts0
|
||||
ORDER BY lower(validity);
|
||||
$$;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.4"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.4"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -4,21 +4,21 @@
|
||||
<head>
|
||||
<title>{{DglProjectId}} Line Log Report – {%if Sequences.length > 1 %}Multiple sequences{% else %}Sequence {{Sequences[0].SequenceNumber}}{% endif %}</title>
|
||||
<style>
|
||||
|
||||
|
||||
@media print {
|
||||
body, html, table {
|
||||
font-size: 10px !important;
|
||||
}
|
||||
|
||||
|
||||
a {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
|
||||
tr.aside {
|
||||
font-size: 8px !important;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
html {
|
||||
font-size: 16px;
|
||||
text-rendering: optimizeLegibility;
|
||||
@@ -180,7 +180,7 @@ footer {
|
||||
font-size: smaller;
|
||||
border-top: thin solid;
|
||||
min-height: 25px;
|
||||
|
||||
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
background: white;
|
||||
@@ -246,13 +246,24 @@ footer {
|
||||
{% if Begin.Reshoot %}Reshoot{% endif -%}
|
||||
|
||||
<div class="comments">
|
||||
|
||||
{% if Sequence.DglSequenceComments %}
|
||||
{% if Sequence.DglSequenceComments[0] %}
|
||||
<h3>Acquisition Comments</h3>
|
||||
<div class="comment">
|
||||
{{ Sequence.DglSequenceComments[0] | markdown }}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if Sequence.DglSequenceComments[1] %}
|
||||
<h3>Processing Comments</h3>
|
||||
<div class="comment">
|
||||
{{ Sequence.DglSequenceComments[1] | markdown }}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% else %}
|
||||
<h3>Sequence comments</h3>
|
||||
|
||||
{% for Comment in Sequence.DglSequenceComments %}
|
||||
<div class="comment">{{ Comment | markdown }}</div>
|
||||
{% endfor %}
|
||||
|
||||
{% if not Sequence.DglSequenceComments %}<div class="nocomment">(Nil)</div>{% endif %}
|
||||
<div class="nocomment">(Nil)</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<div class="events">
|
||||
|
||||
@@ -7,14 +7,20 @@
|
||||
id: missing_shots
|
||||
check: |
|
||||
const sequence = currentItem;
|
||||
const sp0 = Math.min(sequence.fsp, sequence.lsp);
|
||||
const sp1 = Math.max(sequence.fsp, sequence.lsp);
|
||||
const missing = preplots.filter(r => r.line == sequence.line &&
|
||||
r.point >= sp0 && r.point <= sp1 &&
|
||||
!sequence.shots.find(s => s.point == r.point)
|
||||
);
|
||||
let results;
|
||||
if (sequence.missing_shots) {
|
||||
results = {
|
||||
shots: {}
|
||||
}
|
||||
const missing_shots = missingShotpoints.filter(i => !i.ntba);
|
||||
for (const shot of missing_shots) {
|
||||
results.shots[shot.point] = { remarks: "Missed shot", labels: [ "QC", "QCAcq" ] };
|
||||
}
|
||||
} else {
|
||||
results = true;
|
||||
}
|
||||
|
||||
missing.length == 0 || missing.map(r => `Missing shot: ${r.point}`).join("\n")
|
||||
results;
|
||||
-
|
||||
name: "Gun QC"
|
||||
disabled: false
|
||||
@@ -25,15 +31,15 @@
|
||||
iterate: "sequences"
|
||||
id: seq_no_gun_data
|
||||
check: |
|
||||
const sequence = currentItem;
|
||||
currentItem.has_smsrc_data || "Sequence has no gun data"
|
||||
shotpoints.some(i => i.meta?.raw?.smsrc) || "Sequence has no gun data"
|
||||
-
|
||||
name: "Missing gun data"
|
||||
id: missing_gun_data
|
||||
ignoreAllFailed: true
|
||||
check: |
|
||||
sequences.some(s => s.sequence == currentItem.sequence && s.has_smsrc_data)
|
||||
? (!!currentItem._("raw_meta.smsrc.guns") || "Missing gun data")
|
||||
: true
|
||||
!!currentItem._("raw_meta.smsrc.guns")
|
||||
? true
|
||||
: "Missing gun data"
|
||||
|
||||
-
|
||||
name: "No fire"
|
||||
@@ -41,8 +47,8 @@
|
||||
check: |
|
||||
const currentShot = currentItem;
|
||||
const gunData = currentItem._("raw_meta.smsrc");
|
||||
(gunData && gunData.num_nofire != 0)
|
||||
? `Source ${gunData.src_number}: No fire (${gunData.num_nofire} guns)`
|
||||
(gunData && gunData.guns && gunData.guns.length != gunData.num_active)
|
||||
? `Source ${gunData.src_number}: No fire (${gunData.guns.length - gunData.num_active} guns)`
|
||||
: true;
|
||||
|
||||
-
|
||||
@@ -56,8 +62,8 @@
|
||||
.guns
|
||||
.filter(gun => ((gun[2] == gunData.src_number) && (gun[pressure]/parameters.gunPressureNominal - 1) > parameters.gunPressureToleranceRatio))
|
||||
.map(gun =>
|
||||
`source ${gun[2]}, string ${gun[0]}, gun ${gun[1]}, pressure: ${gun[pressure]} / ${parameters.gunPressureNominal} = ${(Math.abs(gunData.manifold/parameters.gunPressureNominal - 1)*100).toFixed(1)}% > ${(parameters.gunPressureToleranceRatio*100).toFixed(1)}%`
|
||||
);
|
||||
`source ${gun[2]}, string ${gun[0]}, gun ${gun[1]}, pressure: ${gun[pressure]} / ${parameters.gunPressureNominal} = ${(Math.abs(gun[pressure]/parameters.gunPressureNominal - 1)*100).toFixed(2)}% > ${(parameters.gunPressureToleranceRatio*100).toFixed(2)}%`
|
||||
).join(" \n");
|
||||
results && results.length
|
||||
? results
|
||||
: true
|
||||
@@ -159,7 +165,7 @@
|
||||
.filter(gun => Math.abs(gun[firetime]-gun[aimpoint]) >= parameters.gunTimingWarning && Math.abs(gun[firetime]-gun[aimpoint]) <= parameters.gunTiming)
|
||||
.forEach(gun => {
|
||||
const value = Math.abs(gun[firetime]-gun[aimpoint]);
|
||||
result.push(`Delta error: source ${gun[2]}, string ${gun[0]}, gun ${gun[1]}: ${parameters.gunTimingWarning} ≤ ${value.toFixed(2)} ≤ ${parameters.gunTiming}`);
|
||||
result.push(`Delta warning: source ${gun[2]}, string ${gun[0]}, gun ${gun[1]}: ${parameters.gunTimingWarning} ≤ ${value.toFixed(2)} ≤ ${parameters.gunTiming}`);
|
||||
});
|
||||
}
|
||||
if (result.length) {
|
||||
@@ -201,7 +207,7 @@
|
||||
check: |
|
||||
const currentShot = currentItem;
|
||||
Math.abs(currentShot.error_i) <= parameters.crosslineError
|
||||
|| `Crossline error (${currentShot.type}): ${currentShot.error_i.toFixed(1)} > ${parameters.crosslineError}`
|
||||
|| `Crossline error (${currentShot.type}): ${currentShot.error_i.toFixed(2)} > ${parameters.crosslineError}`
|
||||
|
||||
-
|
||||
name: "Inline"
|
||||
@@ -209,7 +215,7 @@
|
||||
check: |
|
||||
const currentShot = currentItem;
|
||||
Math.abs(currentShot.error_j) <= parameters.inlineError
|
||||
|| `Inline error (${currentShot.type}): ${currentShot.error_j.toFixed(1)} > ${parameters.inlineError}`
|
||||
|| `Inline error (${currentShot.type}): ${currentShot.error_j.toFixed(2)} > ${parameters.inlineError}`
|
||||
|
||||
-
|
||||
name: "Centre of source preplot deviation (moving average)"
|
||||
@@ -222,11 +228,16 @@
|
||||
id: crossline_average
|
||||
check: |
|
||||
const currentSequence = currentItem;
|
||||
const i_err = currentSequence.shots.filter(s => s.error_i != null).map(a => a.error_i);
|
||||
//const i_err = shotpoints.filter(s => s.error_i != null).map(a => a.error_i);
|
||||
const i_err = shotpoints.map(i =>
|
||||
(i.errorfinal?.coordinates ?? i.errorraw?.coordinates)[0]
|
||||
)
|
||||
.filter(i => !isNaN(i));
|
||||
|
||||
if (i_err.length) {
|
||||
const avg = i_err.reduce( (a, b) => a+b)/i_err.length;
|
||||
avg <= parameters.crosslineErrorAverage ||
|
||||
`Average crossline error: ${avg.toFixed(1)} > ${parameters.crosslineErrorAverage}`
|
||||
`Average crossline error: ${avg.toFixed(2)} > ${parameters.crosslineErrorAverage}`
|
||||
} else {
|
||||
`Sequence ${currentSequence.sequence} has no shots within preplot`
|
||||
}
|
||||
@@ -239,16 +250,27 @@
|
||||
check: |
|
||||
const currentSequence = currentItem;
|
||||
const n = parameters.inlineErrorRunningAverageShots; // For brevity
|
||||
const results = currentSequence.shots.slice(n/2, -n/2).map( (shot, index) => {
|
||||
const shots = currentSequence.shots.slice(index, index+n).map(i => i.error_j).filter(i => i !== null);
|
||||
const results = shotpoints.slice(n/2, -n/2).map( (shot, index) => {
|
||||
const shots = shotpoints.slice(index, index+n).map(i =>
|
||||
(i.errorfinal?.coordinates ?? i.errorraw?.coordinates)[1]
|
||||
).filter(i => i !== null);
|
||||
if (!shots.length) {
|
||||
// We are outside the preplot
|
||||
// Nothing to see here, move along
|
||||
return true;
|
||||
}
|
||||
const mean = shots.reduce( (a, b) => a+b ) / shots.length;
|
||||
return Math.abs(mean) <= parameters.inlineErrorRunningAverageValue ||
|
||||
`Running average inline error: shot ${shot.point}, ${mean.toFixed(1)} > ${parameters.inlineErrorRunningAverageValue}`
|
||||
return Math.abs(mean) <= parameters.inlineErrorRunningAverageValue || [
|
||||
shot.point,
|
||||
{
|
||||
remarks: `Running average inline error: ${mean.toFixed(2)} > ${parameters.inlineErrorRunningAverageValue}`,
|
||||
labels: [ "QC", "QCNav" ]
|
||||
}
|
||||
]
|
||||
}).filter(i => i !== true);
|
||||
|
||||
results.length == 0 || results.join("\n");
|
||||
results.length == 0 || {
|
||||
remarks: "Sequence exceeds inline error running average limit",
|
||||
shots: Object.fromEntries(results)
|
||||
}
|
||||
|
||||
13052
lib/www/client/source/package-lock.json
generated
13052
lib/www/client/source/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -3,11 +3,11 @@
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"serve": "vue-cli-service serve",
|
||||
"serve": "vue-cli-service serve --host=0.0.0.0",
|
||||
"build": "vue-cli-service build"
|
||||
},
|
||||
"dependencies": {
|
||||
"@mdi/font": "^5.6.55",
|
||||
"@mdi/font": "^7.2.96",
|
||||
"core-js": "^3.6.5",
|
||||
"d3": "^7.0.1",
|
||||
"jwt-decode": "^3.0.0",
|
||||
@@ -16,7 +16,7 @@
|
||||
"leaflet-realtime": "^2.2.0",
|
||||
"leaflet.markercluster": "^1.4.1",
|
||||
"marked": "^2.0.3",
|
||||
"plotly.js-dist": "^2.5.0",
|
||||
"plotly.js-dist": "^2.27.0",
|
||||
"suncalc": "^1.8.0",
|
||||
"typeface-roboto": "0.0.75",
|
||||
"vue": "^2.6.12",
|
||||
@@ -31,7 +31,7 @@
|
||||
"@vue/cli-plugin-router": "~4.4.0",
|
||||
"@vue/cli-plugin-vuex": "~4.4.0",
|
||||
"@vue/cli-service": "^4.5.13",
|
||||
"sass": "^1.26.11",
|
||||
"sass": "~1.32",
|
||||
"sass-loader": "^8.0.0",
|
||||
"stylus": "^0.54.8",
|
||||
"stylus-loader": "^3.0.2",
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
<style lang="stylus">
|
||||
@import '../node_modules/typeface-roboto/index.css'
|
||||
@import '../node_modules/@mdi/font/css/materialdesignicons.css'
|
||||
|
||||
|
||||
.markdown.v-textarea textarea
|
||||
font-family monospace
|
||||
line-height 1.1 !important
|
||||
@@ -35,7 +35,7 @@
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { mapActions } from 'vuex';
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import DougalNavigation from './components/navigation';
|
||||
import DougalFooter from './components/footer';
|
||||
|
||||
@@ -53,7 +53,8 @@ export default {
|
||||
|
||||
computed: {
|
||||
snackText () { return this.$store.state.snack.snackText },
|
||||
snackColour () { return this.$store.state.snack.snackColour }
|
||||
snackColour () { return this.$store.state.snack.snackColour },
|
||||
...mapGetters(["serverEvent"])
|
||||
},
|
||||
|
||||
watch: {
|
||||
@@ -66,7 +67,7 @@ export default {
|
||||
snackText (newVal) {
|
||||
this.snack = !!newVal;
|
||||
},
|
||||
|
||||
|
||||
snack (newVal) {
|
||||
// When the snack is hidden (one way or another), clear
|
||||
// the text so that if we receive the same message again
|
||||
@@ -75,17 +76,25 @@ export default {
|
||||
if (!newVal) {
|
||||
this.$store.commit('setSnackText', "");
|
||||
}
|
||||
},
|
||||
|
||||
async serverEvent (event) {
|
||||
if (event.channel == "project" && event.payload?.schema == "public") {
|
||||
// Projects changed in some way or another
|
||||
await this.refreshProjects();
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
...mapActions(["setCredentials"])
|
||||
...mapActions(["setCredentials", "refreshProjects"])
|
||||
},
|
||||
|
||||
mounted () {
|
||||
async mounted () {
|
||||
// Local Storage values are always strings
|
||||
this.$vuetify.theme.dark = localStorage.getItem("darkTheme") == "true";
|
||||
this.setCredentials()
|
||||
await this.setCredentials();
|
||||
this.refreshProjects();
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
<template>
|
||||
<v-tabs :value="tab" show-arrows>
|
||||
<v-tab v-for="tab, index in tabs" :key="index" link :to="tabLink(tab.href)" v-text="tab.text"></v-tab>
|
||||
</v-tabs>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
|
||||
export default {
|
||||
name: 'DougalAppBarExtensionProject',
|
||||
data() {
|
||||
return {
|
||||
tabs: [
|
||||
{ href: "summary", text: "Summary" },
|
||||
{ href: "lines", text: "Lines" },
|
||||
{ href: "plan", text: "Plan" },
|
||||
{ href: "sequences", text: "Sequences" },
|
||||
{ href: "calendar", text: "Calendar" },
|
||||
{ href: "log", text: "Log" },
|
||||
{ href: "qc", text: "QC" },
|
||||
{ href: "graphs", text: "Graphs" },
|
||||
{ href: "map", text: "Map" }
|
||||
]
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
page () {
|
||||
return this.$route.path.split(/\/+/)[3];
|
||||
},
|
||||
|
||||
tab () {
|
||||
return this.tabs.findIndex(t => t.href == this.page);
|
||||
},
|
||||
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
tabLink (href) {
|
||||
return `/projects/${this.$route.params.project}/${href}`;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
</script>
|
||||
@@ -188,9 +188,9 @@ export default {
|
||||
labelToItem (k) {
|
||||
return {
|
||||
text: k,
|
||||
icon: this.labels[k].view?.icon,
|
||||
colour: this.labels[k].view?.colour,
|
||||
title: this.labels[k].view?.description
|
||||
icon: this.labels?.[k]?.view?.icon,
|
||||
colour: this.labels?.[k]?.view?.colour,
|
||||
title: this.labels?.[k]?.view?.description
|
||||
};
|
||||
},
|
||||
|
||||
|
||||
@@ -44,7 +44,7 @@
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-text-field
|
||||
v-model="tsDate"
|
||||
:disabled="!!(sequence || point || entrySequence || entryPoint)"
|
||||
:disabled="!!(entrySequence || entryPoint)"
|
||||
label="Date"
|
||||
suffix="UTC"
|
||||
prepend-icon="mdi-calendar"
|
||||
@@ -64,7 +64,7 @@
|
||||
<v-col>
|
||||
<v-text-field
|
||||
v-model="tsTime"
|
||||
:disabled="!!(sequence || point || entrySequence || entryPoint)"
|
||||
:disabled="!!(entrySequence || entryPoint)"
|
||||
label="Time"
|
||||
suffix="UTC"
|
||||
prepend-icon="mdi-clock-outline"
|
||||
@@ -123,28 +123,11 @@
|
||||
|
||||
<v-row dense>
|
||||
<v-col cols="12">
|
||||
<v-combobox
|
||||
ref="remarks"
|
||||
v-model="entryRemarks"
|
||||
:disabled="loading"
|
||||
:search-input.sync="entryRemarksInput"
|
||||
:items="remarksAvailable"
|
||||
:filter="searchRemarks"
|
||||
item-text="text"
|
||||
return-object
|
||||
label="Remarks"
|
||||
prepend-icon="mdi-text-box-outline"
|
||||
append-outer-icon="mdi-magnify"
|
||||
@click:append-outer="(e) => remarksMenu = e"
|
||||
></v-combobox>
|
||||
|
||||
<dougal-context-menu
|
||||
:value="remarksMenu"
|
||||
@input="handleRemarksMenu"
|
||||
:items="presetRemarks"
|
||||
absolute
|
||||
></dougal-context-menu>
|
||||
|
||||
<dougal-event-select
|
||||
v-bind.sync="entryRemarks"
|
||||
:preset-remarks="presetRemarks"
|
||||
@update:labels="(v) => this.entryLabels = v"
|
||||
></dougal-event-select>
|
||||
</v-col>
|
||||
</v-row>
|
||||
|
||||
@@ -255,6 +238,15 @@
|
||||
>
|
||||
Cancel
|
||||
</v-btn>
|
||||
<v-btn v-if="!id && (entrySequence || entryPoint)"
|
||||
color="info"
|
||||
text
|
||||
title="Enter an event by time"
|
||||
@click="timed"
|
||||
>
|
||||
<v-icon left small>mdi-clock-outline</v-icon>
|
||||
Timed
|
||||
</v-btn>
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn
|
||||
:disabled="!canSave"
|
||||
@@ -280,6 +272,7 @@
|
||||
<script>
|
||||
import { mapActions } from 'vuex';
|
||||
import DougalContextMenu from '@/components/context-menu';
|
||||
import DougalEventSelect from '@/components/event-select';
|
||||
|
||||
function stringSort (a, b) {
|
||||
return a == b
|
||||
@@ -298,6 +291,7 @@ function flattenRemarks(items, keywords=[], labels=[]) {
|
||||
if (!item.items) {
|
||||
result.push({
|
||||
text: item.text,
|
||||
properties: item.properties,
|
||||
labels: labels.concat(item.labels??[]),
|
||||
keywords
|
||||
})
|
||||
@@ -332,7 +326,8 @@ export default {
|
||||
name: 'DougalEventEdit',
|
||||
|
||||
components: {
|
||||
DougalContextMenu
|
||||
DougalContextMenu,
|
||||
DougalEventSelect
|
||||
},
|
||||
|
||||
props: {
|
||||
@@ -344,6 +339,7 @@ export default {
|
||||
sequence: { type: Number },
|
||||
point: { type: Number },
|
||||
remarks: { type: String },
|
||||
meta: { type: Object },
|
||||
labels: { type: Array, default: () => [] },
|
||||
latitude: { type: Number },
|
||||
longitude: { type: Number },
|
||||
@@ -361,18 +357,11 @@ export default {
|
||||
entrySequence: null,
|
||||
entryPoint: null,
|
||||
entryRemarks: null,
|
||||
entryRemarksInput: null,
|
||||
entryLatitude: null,
|
||||
entryLongitude: null
|
||||
}),
|
||||
|
||||
computed: {
|
||||
remarksAvailable () {
|
||||
return this.entryRemarksInput == this.entryRemarks?.text ||
|
||||
this.entryRemarksInput == this.entryRemarks
|
||||
? []
|
||||
: flattenRemarks(this.presetRemarks);
|
||||
},
|
||||
|
||||
allSelected () {
|
||||
return this.entryLabels.length === this.items.length
|
||||
@@ -487,11 +476,8 @@ export default {
|
||||
|
||||
this.entrySequence = this.sequence;
|
||||
this.entryPoint = this.point;
|
||||
this.entryRemarks = this.remarks;
|
||||
this.entryLabels = [...(this.labels??[])];
|
||||
|
||||
// Focus remarks field
|
||||
this.$nextTick(() => this.$refs.remarks.focus());
|
||||
this.makeEntryRemarks();
|
||||
}
|
||||
},
|
||||
|
||||
@@ -562,22 +548,13 @@ export default {
|
||||
};
|
||||
},
|
||||
|
||||
searchRemarks (item, queryText, itemText) {
|
||||
const needle = queryText.toLowerCase();
|
||||
const text = item.text.toLowerCase();
|
||||
const keywords = item.keywords.map(i => i.toLowerCase());
|
||||
const labels = item.labels.map(i => i.toLowerCase());
|
||||
return text.includes(needle) ||
|
||||
keywords.some(i => i.includes(needle)) ||
|
||||
labels.some(i => i.includes(needle));
|
||||
},
|
||||
|
||||
handleRemarksMenu (event) {
|
||||
if (typeof event == 'boolean') {
|
||||
this.remarksMenu = event;
|
||||
} else {
|
||||
this.entryRemarks = event;
|
||||
this.remarksMenu = false;
|
||||
makeEntryRemarks () {
|
||||
this.entryRemarks = {
|
||||
template: null,
|
||||
schema: {},
|
||||
values: [],
|
||||
...this.meta?.structured_values,
|
||||
text: this.remarks
|
||||
}
|
||||
},
|
||||
|
||||
@@ -626,16 +603,40 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
timed () {
|
||||
const tstamp = (new Date()).toISOString();
|
||||
this.entrySequence = null;
|
||||
this.entryPoint = null;
|
||||
this.tsDate = tstamp.substr(0, 10);
|
||||
this.tsTime = tstamp.substr(11, 8);
|
||||
},
|
||||
|
||||
close () {
|
||||
this.entryLabels = this.selectedLabels.map(this.labelToItem)
|
||||
this.$emit("input", false);
|
||||
},
|
||||
|
||||
save () {
|
||||
// In case the focus goes directly from the remarks field
|
||||
// to the Save button.
|
||||
|
||||
let meta;
|
||||
|
||||
if (this.entryRemarks.values?.length) {
|
||||
meta = {
|
||||
structured_values: {
|
||||
template: this.entryRemarks.template,
|
||||
schema: this.entryRemarks.schema,
|
||||
values: this.entryRemarks.values
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const data = {
|
||||
id: this.id,
|
||||
remarks: this.entryRemarksText,
|
||||
labels: this.entryLabels
|
||||
labels: this.entryLabels,
|
||||
meta
|
||||
};
|
||||
|
||||
/* NOTE This is the purist way.
|
||||
|
||||
142
lib/www/client/source/src/components/event-properties.vue
Normal file
142
lib/www/client/source/src/components/event-properties.vue
Normal file
@@ -0,0 +1,142 @@
|
||||
<template>
|
||||
<v-card flat>
|
||||
<v-card-subtitle v-text="text">
|
||||
</v-card-subtitle>
|
||||
<v-card-text style="max-height:350px;overflow:scroll;">
|
||||
<v-form>
|
||||
<template v-for="key in fieldKeys">
|
||||
<template v-if="schema[key].enum">
|
||||
<v-select v-if="schema[key].type == 'number'" :key="key"
|
||||
v-model.number="fieldValues[key]"
|
||||
:items="schema[key].enum"
|
||||
:label="schema[key].title"
|
||||
:hint="schema[key].description"
|
||||
@input="updateFieldValue(key, Number($event))"
|
||||
></v-select>
|
||||
<v-select v-else :key="key"
|
||||
v-model="fieldValues[key]"
|
||||
:items="schema[key].enum"
|
||||
:label="schema[key].title"
|
||||
:hint="schema[key].description"
|
||||
@input="updateFieldValue(key, $event)"
|
||||
></v-select>
|
||||
</template>
|
||||
<template v-else>
|
||||
<v-text-field v-if="schema[key].type == 'number'" :key="key"
|
||||
v-model.number="fieldValues[key]"
|
||||
type="number"
|
||||
:min="schema[key].minimum"
|
||||
:max="schema[key].maximum"
|
||||
:step="schema[key].multiplier"
|
||||
:label="schema[key].title"
|
||||
:hint="schema[key].description"
|
||||
@input="updateFieldValue(key, Number($event))"
|
||||
>
|
||||
</v-text-field>
|
||||
<v-text-field v-else-if="schema[key].type == 'string'" :key="key"
|
||||
v-model="fieldValues[key]"
|
||||
:label="schema[key].title"
|
||||
:hint="schema[key].description"
|
||||
@input="updateFieldValue(key, $event)"
|
||||
>
|
||||
</v-text-field>
|
||||
<v-checkbox v-else-if="schema[key].type == 'boolean'" :key="key"
|
||||
v-model="fieldValues[key]"
|
||||
:label="schema[key].title"
|
||||
:hint="schema[key].description"
|
||||
@change="updateFieldValue(key, $event)"
|
||||
>
|
||||
</v-checkbox>
|
||||
<v-text-field v-else :key="key"
|
||||
v-model="fieldValues[key]"
|
||||
:label="schema[key].title"
|
||||
:hint="schema[key].description"
|
||||
@input="updateFieldValue(key, $event)"
|
||||
>
|
||||
</v-text-field>
|
||||
</template>
|
||||
</template>
|
||||
</v-form>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
|
||||
export default {
|
||||
name: "DougalEventPropertiesEdit",
|
||||
|
||||
components: {
|
||||
},
|
||||
|
||||
props: {
|
||||
value: String,
|
||||
template: String,
|
||||
schema: Object,
|
||||
values: Array
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
fieldKeys () {
|
||||
return Object.entries(this.schema).sort((a, b) => a[1].title > b[1].title ? 1 : -1).map(i => i[0]);
|
||||
},
|
||||
|
||||
fieldValues () {
|
||||
const keys = Object.keys(this.schema ?? this.values);
|
||||
return Object.fromEntries(
|
||||
keys.map( (k, idx) =>
|
||||
[ k, this.values?.[idx] ?? this.schema[k].default ]));
|
||||
},
|
||||
|
||||
/*
|
||||
fields () {
|
||||
// TODO Remove this and rename fields → schema
|
||||
return this.schema;
|
||||
},
|
||||
*/
|
||||
|
||||
text () {
|
||||
if (this.template) {
|
||||
const rx = /{{([a-z_][a-z0-9_]*)}}/ig;
|
||||
return this.template.replace(rx, (match, p1) => this.fieldValues[p1] ?? "(n/a)");
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
values () {
|
||||
this.$emit("input", this.text);
|
||||
},
|
||||
|
||||
template () {
|
||||
this.$emit("input", this.text);
|
||||
},
|
||||
|
||||
schema () {
|
||||
this.$emit("input", this.text);
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
methods: {
|
||||
updateFieldValue(key, ev) {
|
||||
const values = {...this.fieldValues};
|
||||
values[key] = ev;
|
||||
this.$emit("update:values", Object.values(values));
|
||||
}
|
||||
},
|
||||
|
||||
mount () {
|
||||
}
|
||||
}
|
||||
|
||||
</script>
|
||||
163
lib/www/client/source/src/components/event-select.vue
Normal file
163
lib/www/client/source/src/components/event-select.vue
Normal file
@@ -0,0 +1,163 @@
|
||||
<template>
|
||||
<div>
|
||||
<v-combobox
|
||||
ref="remarks"
|
||||
:value="text"
|
||||
@input="handleComboBox"
|
||||
:search-input.sync="entryRemarksInput"
|
||||
:items="remarksAvailable"
|
||||
:filter="searchRemarks"
|
||||
item-text="text"
|
||||
return-object
|
||||
label="Remarks"
|
||||
hint="Placeholders: @DMS@, @DEG@, @EN@, @WD@, @BSP@, @CMG@, …"
|
||||
prepend-icon="mdi-text-box-outline"
|
||||
append-outer-icon="mdi-magnify"
|
||||
@click:append-outer="(e) => remarksMenu = e"
|
||||
></v-combobox>
|
||||
|
||||
<dougal-context-menu
|
||||
:value="remarksMenu"
|
||||
@input="handleRemarksMenu"
|
||||
:items="presetRemarks"
|
||||
absolute
|
||||
></dougal-context-menu>
|
||||
|
||||
<v-expansion-panels v-if="haveProperties"
|
||||
class="px-8"
|
||||
:value="0"
|
||||
>
|
||||
<v-expansion-panel>
|
||||
<v-expansion-panel-header>Properties</v-expansion-panel-header>
|
||||
<v-expansion-panel-content>
|
||||
<dougal-event-properties-edit
|
||||
:value="text"
|
||||
@input="$emit('update:text', $event)"
|
||||
:template="template"
|
||||
:schema="schema"
|
||||
:values="values"
|
||||
@update:values="$emit('update:values', $event)"
|
||||
>
|
||||
</dougal-event-properties-edit>
|
||||
</v-expansion-panel-content>
|
||||
</v-expansion-panel>
|
||||
</v-expansion-panels>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import DougalContextMenu from '@/components/context-menu';
|
||||
import DougalEventPropertiesEdit from '@/components/event-properties';
|
||||
|
||||
export default {
|
||||
name: "DougalEventSelect",
|
||||
|
||||
components: {
|
||||
DougalContextMenu,
|
||||
DougalEventPropertiesEdit
|
||||
},
|
||||
|
||||
props: {
|
||||
text: String,
|
||||
template: String,
|
||||
schema: Object,
|
||||
values: Array,
|
||||
presetRemarks: Array
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
entryRemarksInput: null,
|
||||
remarksMenu: false,
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
remarksAvailable () {
|
||||
return this.entryRemarksInput == this.text
|
||||
? []
|
||||
: this.flattenRemarks(this.presetRemarks);
|
||||
},
|
||||
|
||||
haveProperties () {
|
||||
for (const key in this.schema) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
|
||||
flattenRemarks (items, keywords=[], labels=[]) {
|
||||
const result = [];
|
||||
|
||||
if (items) {
|
||||
for (const item of items) {
|
||||
if (!item.items) {
|
||||
result.push({
|
||||
text: item.text,
|
||||
properties: item.properties,
|
||||
labels: labels.concat(item.labels??[]),
|
||||
keywords
|
||||
})
|
||||
} else {
|
||||
const k = [...keywords, item.text];
|
||||
const l = [...labels, ...(item.labels??[])];
|
||||
result.push(...this.flattenRemarks(item.items, k, l))
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
},
|
||||
|
||||
searchRemarks (item, queryText, itemText) {
|
||||
const needle = queryText.toLowerCase();
|
||||
const text = item.text.toLowerCase();
|
||||
const keywords = item.keywords.map(i => i.toLowerCase());
|
||||
const labels = item.labels.map(i => i.toLowerCase());
|
||||
return text.includes(needle) ||
|
||||
keywords.some(i => i.includes(needle)) ||
|
||||
labels.some(i => i.includes(needle));
|
||||
},
|
||||
|
||||
handleComboBox (event) {
|
||||
if (typeof event == "object") {
|
||||
this.$emit("update:text", event.text);
|
||||
this.$emit("update:template", event.template ?? event.text);
|
||||
this.$emit("update:schema", event.properties);
|
||||
this.$emit("update:labels", event.labels);
|
||||
} else {
|
||||
this.$emit("update:text", event);
|
||||
this.$emit("update:template", null);
|
||||
this.$emit("update:properties", null);
|
||||
this.$emit("update:labels", []);
|
||||
}
|
||||
},
|
||||
|
||||
handleRemarksMenu (event) {
|
||||
if (typeof event == 'boolean') {
|
||||
this.remarksMenu = event;
|
||||
} else {
|
||||
this.$emit("update:text", event.text);
|
||||
this.$emit("update:template", event.template ?? event.text);
|
||||
this.$emit("update:schema", event.properties);
|
||||
this.$emit("update:labels", event.labels);
|
||||
this.remarksMenu = false;
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
mount () {
|
||||
// Focus remarks field
|
||||
this.$nextTick(() => this.$refs.remarks.focus());
|
||||
}
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -11,7 +11,7 @@
|
||||
|
||||
<v-icon v-if="serverConnected" class="mr-6" small title="Connected to server">mdi-lan-connect</v-icon>
|
||||
<v-icon v-else class="mr-6" small color="red" title="Server connection lost (we'll reconnect automatically when the server comes back)">mdi-lan-disconnect</v-icon>
|
||||
|
||||
|
||||
<dougal-notifications-control class="mr-6"></dougal-notifications-control>
|
||||
|
||||
<div title="Night mode">
|
||||
@@ -31,7 +31,7 @@
|
||||
font-family: "Bank Gothic Medium";
|
||||
src: local("Bank Gothic Medium"), url("/fonts/bank-gothic-medium.woff");
|
||||
}
|
||||
|
||||
|
||||
.brand {
|
||||
font-family: "Bank Gothic Medium";
|
||||
}
|
||||
@@ -56,7 +56,7 @@ export default {
|
||||
const date = new Date();
|
||||
return date.getUTCFullYear();
|
||||
},
|
||||
|
||||
|
||||
...mapState({serverConnected: state => state.notify.serverConnected})
|
||||
}
|
||||
};
|
||||
|
||||
@@ -50,7 +50,7 @@ import unpack from '@/lib/unpack.js';
|
||||
|
||||
export default {
|
||||
name: 'DougalGraphArraysIJScatter',
|
||||
|
||||
|
||||
props: [ "data", "settings" ],
|
||||
|
||||
data () {
|
||||
@@ -62,15 +62,15 @@ export default {
|
||||
histogram: false
|
||||
};
|
||||
},
|
||||
|
||||
|
||||
computed: {
|
||||
|
||||
|
||||
//...mapGetters(['apiUrl'])
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
watch: {
|
||||
|
||||
|
||||
data (newVal, oldVal) {
|
||||
if (newVal === null) {
|
||||
this.busy = true;
|
||||
@@ -79,46 +79,46 @@ export default {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
settings () {
|
||||
for (const key in this.settings) {
|
||||
this[key] = this.settings[key];
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
histogram () {
|
||||
this.plot();
|
||||
this.$emit("update:settings", {[`${this.$options.name}.histogram`]: this.histogram});
|
||||
},
|
||||
|
||||
|
||||
|
||||
|
||||
scatterplot () {
|
||||
this.plot();
|
||||
this.$emit("update:settings", {[`${this.$options.name}.scatterplot`]: this.scatterplot});
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
methods: {
|
||||
|
||||
|
||||
plot () {
|
||||
|
||||
|
||||
this.plotSeries();
|
||||
|
||||
|
||||
if (this.histogram) {
|
||||
this.plotHistogram();
|
||||
}
|
||||
|
||||
|
||||
if (this.scatterplot) {
|
||||
this.plotScatter();
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
plotSeries () {
|
||||
if (!this.data) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
function transform (d, idx=0, otherParams={}) {
|
||||
const errortype = d.errorfinal ? "errorfinal" : "errorraw";
|
||||
const coords = unpack(unpack(d, errortype), "coordinates");
|
||||
@@ -141,7 +141,7 @@ export default {
|
||||
};
|
||||
return data;
|
||||
}
|
||||
|
||||
|
||||
const data = [
|
||||
transform(this.data.items, 1, {
|
||||
xaxis: 'x',
|
||||
@@ -155,7 +155,7 @@ export default {
|
||||
})
|
||||
];
|
||||
this.busy = false;
|
||||
|
||||
|
||||
const layout = {
|
||||
//autosize: true,
|
||||
title: {text: "Inline / crossline error – sequence %{meta.sequence}"},
|
||||
@@ -177,25 +177,25 @@ export default {
|
||||
},
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
|
||||
const config = {
|
||||
editable: false,
|
||||
displaylogo: false
|
||||
};
|
||||
|
||||
|
||||
this.graph[0] = Plotly.newPlot(this.$refs.graph0, data, layout, config);
|
||||
},
|
||||
|
||||
|
||||
plotScatter () {
|
||||
|
||||
|
||||
console.log("plot");
|
||||
|
||||
|
||||
if (!this.data) {
|
||||
console.log("missing data");
|
||||
return;
|
||||
}
|
||||
console.log("Will plot sequence", this.data.meta.project, this.data.meta.sequence);
|
||||
|
||||
|
||||
function transform (d) {
|
||||
const errortype = d.errorfinal ? "errorfinal" : "errorraw";
|
||||
const coords = unpack(unpack(d, errortype), "coordinates");
|
||||
@@ -217,10 +217,10 @@ export default {
|
||||
}];
|
||||
return data;
|
||||
}
|
||||
|
||||
|
||||
const data = transform(this.data.items);
|
||||
this.busy = false;
|
||||
|
||||
|
||||
const layout = {
|
||||
//autosize: true,
|
||||
//title: {text: "Inline / crossline error – sequence %{meta.sequence}"},
|
||||
@@ -235,22 +235,22 @@ export default {
|
||||
},
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
|
||||
const config = {
|
||||
editable: false,
|
||||
displaylogo: false
|
||||
};
|
||||
|
||||
|
||||
this.graph[1] = Plotly.newPlot(this.$refs.graph1, data, layout, config);
|
||||
},
|
||||
|
||||
|
||||
plotHistogram () {
|
||||
|
||||
|
||||
if (!this.data) {
|
||||
console.log("missing data");
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
function transform (d, idx=0, otherParams={}) {
|
||||
const errortype = d.errorfinal ? "errorfinal" : "errorraw";
|
||||
const coords = unpack(unpack(d, errortype), "coordinates");
|
||||
@@ -271,7 +271,7 @@ export default {
|
||||
};
|
||||
return data;
|
||||
}
|
||||
|
||||
|
||||
const data = [
|
||||
transform(this.data.items, 0, {
|
||||
xaxis: 'x',
|
||||
@@ -284,7 +284,7 @@ export default {
|
||||
name: 'Inline'
|
||||
})
|
||||
];
|
||||
|
||||
|
||||
const layout = {
|
||||
//autosize: true,
|
||||
//title: {text: "Inline / crossline error – sequence %{meta.sequence}"},
|
||||
@@ -308,7 +308,7 @@ export default {
|
||||
},
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
|
||||
const config = {
|
||||
editable: false,
|
||||
displaylogo: false
|
||||
@@ -319,12 +319,12 @@ export default {
|
||||
|
||||
this.graph[2] = Plotly.newPlot(this.$refs.graph2, data, layout, config);
|
||||
},
|
||||
|
||||
|
||||
replot () {
|
||||
if (!this.graph.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
console.log("Replotting");
|
||||
this.graph.forEach( (graph, idx) => {
|
||||
const ref = this.$refs["graph"+idx];
|
||||
@@ -334,23 +334,23 @@ export default {
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
async mounted () {
|
||||
|
||||
|
||||
if (this.data) {
|
||||
this.plot();
|
||||
} else {
|
||||
this.busy = true;
|
||||
}
|
||||
|
||||
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graph0);
|
||||
this.resizeObserver.observe(this.$refs.graph1);
|
||||
this.resizeObserver.observe(this.$refs.graph2);
|
||||
},
|
||||
|
||||
|
||||
beforeDestroy () {
|
||||
if (this.resizeObserver) {
|
||||
this.resizeObserver.unobserve(this.$refs.graph2);
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<v-switch v-model="shotpoint" label="Shotpoint"></v-switch>
|
||||
<v-switch class="ml-4" v-model="violinplot" label="Violin plot"></v-switch>
|
||||
</v-card-title>
|
||||
|
||||
|
||||
<v-container fluid fill-height>
|
||||
<v-row>
|
||||
<v-col>
|
||||
@@ -49,7 +49,7 @@ import * as aes from '@/lib/graphs/aesthetics.js';
|
||||
|
||||
export default {
|
||||
name: 'DougalGraphGunsDepth',
|
||||
|
||||
|
||||
props: [ "data", "settings" ],
|
||||
|
||||
data () {
|
||||
@@ -62,16 +62,16 @@ export default {
|
||||
violinplot: false
|
||||
};
|
||||
},
|
||||
|
||||
|
||||
computed: {
|
||||
//...mapGetters(['apiUrl'])
|
||||
},
|
||||
|
||||
|
||||
watch: {
|
||||
|
||||
|
||||
data (newVal, oldVal) {
|
||||
console.log("data changed");
|
||||
|
||||
|
||||
if (newVal === null) {
|
||||
this.busy = true;
|
||||
} else {
|
||||
@@ -79,42 +79,42 @@ export default {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
settings () {
|
||||
for (const key in this.settings) {
|
||||
this[key] = this.settings[key];
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
shotpoint () {
|
||||
if (this.shotpoint) {
|
||||
this.replot();
|
||||
}
|
||||
this.$emit("update:settings", {[`${this.$options.name}.shotpoint`]: this.shotpoint});
|
||||
},
|
||||
|
||||
|
||||
violinplot () {
|
||||
if (this.violinplot) {
|
||||
this.plotViolin();
|
||||
}
|
||||
this.$emit("update:settings", {[`${this.$options.name}.violinplot`]: this.violinplot});
|
||||
}
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
methods: {
|
||||
|
||||
|
||||
plot () {
|
||||
this.plotSeries();
|
||||
if (this.violinplot) {
|
||||
this.plotViolin();
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
async plotSeries () {
|
||||
|
||||
|
||||
function transformSeries (d, src_number, otherParams={}) {
|
||||
|
||||
|
||||
const meta = src_number
|
||||
? unpack(d, "meta").filter( s => s.src_number == src_number )
|
||||
: unpack(d, "meta");
|
||||
@@ -122,11 +122,11 @@ export default {
|
||||
const gunDepths = guns.map(s => s.map(g => g[10]));
|
||||
const gunDepthsSorted = gunDepths.map(s => d3a.sort(s));
|
||||
const gunsAvgDepth = gunDepths.map( (s, sidx) => d3a.mean(s) );
|
||||
|
||||
|
||||
const x = src_number
|
||||
? unpack(d.filter(s => s.meta.src_number == src_number), "point")
|
||||
: unpack(d, "point");
|
||||
|
||||
|
||||
const tracesGunDepths = [{
|
||||
type: "scatter",
|
||||
mode: "lines",
|
||||
@@ -150,7 +150,7 @@ export default {
|
||||
y: gunDepthsSorted.map(s => d3a.quantileSorted(s, 0.75)),
|
||||
...aes.gunArrays[src_number || 1].max
|
||||
}];
|
||||
|
||||
|
||||
const tracesGunsDepthsIndividual = {
|
||||
//name: `Array ${src_number} outliers`,
|
||||
type: "scatter",
|
||||
@@ -166,22 +166,22 @@ export default {
|
||||
).flat(),
|
||||
...aes.gunArrays[src_number || 1].out
|
||||
};
|
||||
|
||||
|
||||
const data = [ ...tracesGunDepths, tracesGunsDepthsIndividual ]
|
||||
return data;
|
||||
}
|
||||
|
||||
|
||||
if (!this.data) {
|
||||
console.log("missing data");
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
const sources = [ ...new Set(unpack(this.data.items, "meta").map( s => s.src_number ))];
|
||||
const data = sources.map( src_number => transformSeries(this.data.items, src_number) ).flat();
|
||||
console.log("Sources", sources);
|
||||
console.log(data);
|
||||
this.busy = false;
|
||||
|
||||
|
||||
const layout = {
|
||||
//autosize: true,
|
||||
title: {text: "Gun depths – sequence %{meta.sequence}"},
|
||||
@@ -198,12 +198,12 @@ export default {
|
||||
},
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
|
||||
const config = {
|
||||
editable: false,
|
||||
displaylogo: false
|
||||
};
|
||||
|
||||
|
||||
this.graph = Plotly.newPlot(this.$refs.graphSeries, data, layout, config);
|
||||
this.$refs.graphSeries.on('plotly_hover', (d) => {
|
||||
const point = d.points[0].x;
|
||||
@@ -220,7 +220,7 @@ export default {
|
||||
groups: unpack(guns, 0)
|
||||
}],
|
||||
}];
|
||||
|
||||
|
||||
const layout = {
|
||||
title: {text: "Gun depths – shot %{meta.point}"},
|
||||
height: 300,
|
||||
@@ -236,19 +236,19 @@ export default {
|
||||
point
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
const config = { displaylogo: false };
|
||||
|
||||
|
||||
Plotly.react(this.$refs.graphBar, data, layout, config);
|
||||
});
|
||||
},
|
||||
|
||||
|
||||
async plotViolin () {
|
||||
|
||||
|
||||
function transformViolin (d, opts = {}) {
|
||||
|
||||
|
||||
const styles = [];
|
||||
|
||||
|
||||
unpack(unpack(d, "meta"), "guns").flat().forEach(i => {
|
||||
const gunId = i[1];
|
||||
const arrayId = i[2];
|
||||
@@ -256,7 +256,7 @@ export default {
|
||||
styles[gunId] = Object.assign({target: gunId}, aes.gunArrayViolins[arrayId]);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
const data = {
|
||||
type: 'violin',
|
||||
x: unpack(unpack(unpack(d, "meta"), "guns").flat(), 1), // Gun number
|
||||
@@ -277,21 +277,21 @@ export default {
|
||||
styles: styles.filter(i => !!i)
|
||||
}]
|
||||
}
|
||||
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
console.log("plot violin");
|
||||
if (!this.data) {
|
||||
console.log("missing data");
|
||||
return;
|
||||
}
|
||||
console.log("Will plot sequence", this.data.meta.project, this.data.meta.sequence);
|
||||
|
||||
|
||||
const data = [ transformViolin(this.data.items) ];
|
||||
this.busy = false;
|
||||
|
||||
|
||||
const layout = {
|
||||
//autosize: true,
|
||||
showlegend: false,
|
||||
@@ -307,21 +307,21 @@ export default {
|
||||
},
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
|
||||
const config = {
|
||||
editable: false,
|
||||
displaylogo: false
|
||||
};
|
||||
|
||||
|
||||
this.graph = Plotly.newPlot(this.$refs.graphViolin, data, layout, config);
|
||||
},
|
||||
|
||||
|
||||
|
||||
replot () {
|
||||
if (!this.graph) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
console.log("Replotting");
|
||||
Object.values(this.$refs).forEach( ref => {
|
||||
if (ref.data) {
|
||||
@@ -333,25 +333,25 @@ export default {
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
|
||||
...mapActions(["api"])
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
mounted () {
|
||||
|
||||
|
||||
if (this.data) {
|
||||
this.plot();
|
||||
} else {
|
||||
this.busy = true;
|
||||
}
|
||||
|
||||
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graphSeries);
|
||||
this.resizeObserver.observe(this.$refs.graphViolin);
|
||||
this.resizeObserver.observe(this.$refs.graphBar);
|
||||
},
|
||||
|
||||
|
||||
beforeDestroy () {
|
||||
if (this.resizeObserver) {
|
||||
this.resizeObserver.unobserve(this.$refs.graphBar);
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
<v-card-title class="headline">
|
||||
Gun details
|
||||
</v-card-title>
|
||||
|
||||
|
||||
<v-container fluid fill-height>
|
||||
<v-row>
|
||||
<v-col>
|
||||
@@ -37,7 +37,7 @@ import * as aes from '@/lib/graphs/aesthetics.js';
|
||||
|
||||
export default {
|
||||
name: 'DougalGraphGunsDepth',
|
||||
|
||||
|
||||
props: [ "data" ],
|
||||
|
||||
data () {
|
||||
@@ -54,16 +54,16 @@ export default {
|
||||
]
|
||||
};
|
||||
},
|
||||
|
||||
|
||||
computed: {
|
||||
//...mapGetters(['apiUrl'])
|
||||
},
|
||||
|
||||
|
||||
watch: {
|
||||
|
||||
|
||||
data (newVal, oldVal) {
|
||||
console.log("data changed");
|
||||
|
||||
|
||||
if (newVal === null) {
|
||||
this.busy = true;
|
||||
} else {
|
||||
@@ -71,31 +71,31 @@ export default {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
violinplot () {
|
||||
if (this.violinplot) {
|
||||
this.plotViolin();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
methods: {
|
||||
|
||||
|
||||
plot () {
|
||||
this.plotHeat();
|
||||
},
|
||||
|
||||
|
||||
async plotHeat () {
|
||||
|
||||
|
||||
|
||||
|
||||
if (!this.data) {
|
||||
console.log("missing data");
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
function transform (data, aspects=["Depth", "Pressure"]) {
|
||||
|
||||
|
||||
const facets = [
|
||||
// Mode
|
||||
{
|
||||
@@ -103,9 +103,9 @@ export default {
|
||||
name: "Mode",
|
||||
hovertemplate: "SP%{x}<br>%{y}<br>%{text}",
|
||||
},
|
||||
|
||||
|
||||
text: [ "Off", "Auto", "Manual", "Disabled" ],
|
||||
|
||||
|
||||
conversion: (gun, shot) => {
|
||||
switch (gun[3]) {
|
||||
case "A":
|
||||
@@ -119,16 +119,16 @@ export default {
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
// Detect
|
||||
{
|
||||
params: {
|
||||
name: "Detect",
|
||||
hovertemplate: "SP%{x}<br>%{y}<br>%{text}",
|
||||
},
|
||||
|
||||
|
||||
text: [ "Zero", "Peak", "Level" ],
|
||||
|
||||
|
||||
conversion: (gun, shot) => {
|
||||
switch (gun[4]) {
|
||||
case "P":
|
||||
@@ -140,41 +140,41 @@ export default {
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
// Autofire
|
||||
{
|
||||
params: {
|
||||
name: "Autofire",
|
||||
hovertemplate: "SP%{x}<br>%{y}<br>%{text}",
|
||||
},
|
||||
|
||||
|
||||
text: [ "False", "True" ],
|
||||
|
||||
|
||||
conversion: (gun, shot) => {
|
||||
return gun[5] ? 1 : 0;
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
// Aimpoint
|
||||
{
|
||||
params: {
|
||||
name: "Aimpoint",
|
||||
hovertemplate: "SP%{x}<br>%{y}<br>%{z} ms"
|
||||
},
|
||||
|
||||
|
||||
conversion: (gun, shot) => gun[7]
|
||||
},
|
||||
|
||||
|
||||
// Firetime
|
||||
{
|
||||
params: {
|
||||
name: "Firetime",
|
||||
hovertemplate: "SP%{x}<br>%{y}<br>%{z} ms"
|
||||
},
|
||||
|
||||
|
||||
conversion: (gun, shot) => gun[2] == shot.meta.src_number ? gun[8] : null
|
||||
},
|
||||
|
||||
|
||||
// Delta
|
||||
{
|
||||
params: {
|
||||
@@ -187,7 +187,7 @@ export default {
|
||||
zmin: -2,
|
||||
zmax: 2
|
||||
},
|
||||
|
||||
|
||||
conversion: (gun, shot) => gun[2] == shot.meta.src_number ? gun[7]-gun[8] : null
|
||||
},
|
||||
|
||||
@@ -197,7 +197,7 @@ export default {
|
||||
name: "Delay",
|
||||
hovertemplate: "SP%{x}<br>%{y}<br>%{z} ms"
|
||||
},
|
||||
|
||||
|
||||
conversion: (gun, shot) => gun[9]
|
||||
},
|
||||
|
||||
@@ -207,7 +207,7 @@ export default {
|
||||
name: "Depth",
|
||||
hovertemplate: "SP%{x}<br>%{y}<br>%{z} m"
|
||||
},
|
||||
|
||||
|
||||
conversion: (gun, shot) => gun[10]
|
||||
},
|
||||
|
||||
@@ -217,7 +217,7 @@ export default {
|
||||
name: "Pressure",
|
||||
hovertemplate: "SP%{x}<br>%{y}<br>%{z} psi"
|
||||
},
|
||||
|
||||
|
||||
conversion: (gun, shot) => gun[11]
|
||||
},
|
||||
|
||||
@@ -227,7 +227,7 @@ export default {
|
||||
name: "Volume",
|
||||
hovertemplate: "SP%{x}<br>%{y}<br>%{z} in³"
|
||||
},
|
||||
|
||||
|
||||
conversion: (gun, shot) => gun[12]
|
||||
},
|
||||
|
||||
@@ -237,14 +237,14 @@ export default {
|
||||
name: "Filltime",
|
||||
hovertemplate: "SP%{x}<br>%{y}<br>%{z} ms"
|
||||
},
|
||||
|
||||
|
||||
// NOTE that filltime is applicable to the *non* firing guns
|
||||
conversion: (gun, shot) => gun[2] == shot.meta.src_number ? null : gun[13]
|
||||
}
|
||||
|
||||
|
||||
|
||||
];
|
||||
|
||||
|
||||
// Get gun numbers
|
||||
const guns = [...new Set(data.map( s => s.meta.guns.map( g => g[1] ) ).flat())];
|
||||
|
||||
@@ -256,13 +256,13 @@ export default {
|
||||
// ]
|
||||
// }
|
||||
const z = {};
|
||||
|
||||
|
||||
// x is an array of shotpoints
|
||||
const x = [];
|
||||
|
||||
|
||||
// y is an array of gun numbers
|
||||
const y = guns.map( gun => `G${gun}` );
|
||||
|
||||
|
||||
// Build array of guns (i.e., populate z)
|
||||
// We prefer to do this outside the shot-to-shot loop
|
||||
// for efficiency
|
||||
@@ -273,15 +273,15 @@ export default {
|
||||
z[label][i] = [];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Populate array of guns with shotpoint data
|
||||
for (let shot of data) {
|
||||
x.push(shot.point);
|
||||
|
||||
|
||||
for (const facet of facets) {
|
||||
const label = facet.params.name;
|
||||
const facetGunsArray = z[label];
|
||||
|
||||
|
||||
for (const gun of shot.meta.guns) {
|
||||
const gunIndex = gun[1]-1;
|
||||
const facetGun = facetGunsArray[gunIndex];
|
||||
@@ -289,10 +289,10 @@ export default {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return aspects.map( (aspect, idx) => {
|
||||
const facet = facets.find(el => el.params.name == aspect) || {};
|
||||
|
||||
|
||||
const defaultParams = {
|
||||
name: aspect,
|
||||
type: "heatmap",
|
||||
@@ -304,15 +304,15 @@ export default {
|
||||
xaxis: "x",
|
||||
yaxis: "y" + (idx > 0 ? idx+1 : "")
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
return Object.assign({}, defaultParams, facet.params);
|
||||
});
|
||||
}
|
||||
|
||||
const data = transform(this.data.items, this.aspects);
|
||||
this.busy = false;
|
||||
|
||||
|
||||
const layout = {
|
||||
title: {text: "Gun details – sequence %{meta.sequence}"},
|
||||
height: 200*this.aspects.length,
|
||||
@@ -327,15 +327,15 @@ export default {
|
||||
*/
|
||||
//autosize: true,
|
||||
// colorscale: "sequential",
|
||||
|
||||
|
||||
xaxis: {
|
||||
title: "Shotpoint",
|
||||
showspikes: true
|
||||
},
|
||||
|
||||
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
|
||||
this.aspects.forEach ( (aspect, idx) => {
|
||||
const num = idx+1;
|
||||
const key = "yaxis" + num;
|
||||
@@ -352,21 +352,21 @@ export default {
|
||||
domain
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
const config = {
|
||||
//editable: true,
|
||||
displaylogo: false
|
||||
};
|
||||
|
||||
|
||||
this.graph = Plotly.newPlot(this.$refs.graphHeat, data, layout, config);
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
replot () {
|
||||
if (!this.graph) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
console.log("Replotting");
|
||||
Object.values(this.$refs).forEach( ref => {
|
||||
if (ref.data) {
|
||||
@@ -378,23 +378,23 @@ export default {
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
|
||||
...mapActions(["api"])
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
mounted () {
|
||||
|
||||
|
||||
if (this.data) {
|
||||
this.plot();
|
||||
} else {
|
||||
this.busy = true;
|
||||
}
|
||||
|
||||
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graphHeat);
|
||||
},
|
||||
|
||||
|
||||
beforeDestroy () {
|
||||
if (this.resizeObserver) {
|
||||
this.resizeObserver.unobserve(this.$refs.graphHeat);
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<v-switch v-model="shotpoint" label="Shotpoint"></v-switch>
|
||||
<v-switch class="ml-4" v-model="violinplot" label="Violin plot"></v-switch>
|
||||
</v-card-title>
|
||||
|
||||
|
||||
<v-container fluid fill-height>
|
||||
<v-row>
|
||||
<v-col>
|
||||
@@ -49,7 +49,7 @@ import * as aes from '@/lib/graphs/aesthetics.js';
|
||||
|
||||
export default {
|
||||
name: 'DougalGraphGunsPressure',
|
||||
|
||||
|
||||
props: [ "data", "settings" ],
|
||||
|
||||
data () {
|
||||
@@ -62,16 +62,16 @@ export default {
|
||||
violinplot: false
|
||||
};
|
||||
},
|
||||
|
||||
|
||||
computed: {
|
||||
//...mapGetters(['apiUrl'])
|
||||
},
|
||||
|
||||
|
||||
watch: {
|
||||
|
||||
|
||||
data (newVal, oldVal) {
|
||||
console.log("data changed");
|
||||
|
||||
|
||||
if (newVal === null) {
|
||||
this.busy = true;
|
||||
} else {
|
||||
@@ -79,42 +79,42 @@ export default {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
settings () {
|
||||
for (const key in this.settings) {
|
||||
this[key] = this.settings[key];
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
shotpoint () {
|
||||
if (this.shotpoint) {
|
||||
this.replot();
|
||||
}
|
||||
this.$emit("update:settings", {[`${this.$options.name}.shotpoint`]: this.shotpoint});
|
||||
},
|
||||
|
||||
|
||||
violinplot () {
|
||||
if (this.violinplot) {
|
||||
this.plotViolin();
|
||||
}
|
||||
this.$emit("update:settings", {[`${this.$options.name}.violinplot`]: this.violinplot});
|
||||
}
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
methods: {
|
||||
|
||||
|
||||
plot () {
|
||||
this.plotSeries();
|
||||
if (this.violinplot) {
|
||||
this.plotViolin();
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
async plotSeries () {
|
||||
|
||||
|
||||
function transformSeries (d, src_number, otherParams={}) {
|
||||
|
||||
|
||||
const meta = src_number
|
||||
? unpack(d, "meta").filter( s => s.src_number == src_number )
|
||||
: unpack(d, "meta");
|
||||
@@ -126,12 +126,12 @@ export default {
|
||||
const gunsWeightedAvgPressure = gunPressures.map( (s, sidx) =>
|
||||
d3a.sum(s.map( (pressure, gidx) => pressure * gunPressureWeights[sidx][gidx] )) / d3a.sum(gunPressureWeights[sidx])
|
||||
);
|
||||
|
||||
|
||||
const manifold = unpack(meta, "manifold");
|
||||
const x = src_number
|
||||
? unpack(d.filter(s => s.meta.src_number == src_number), "point")
|
||||
: unpack(d, "point");
|
||||
|
||||
|
||||
const traceManifold = {
|
||||
name: "Manifold",
|
||||
type: "scatter",
|
||||
@@ -140,7 +140,7 @@ export default {
|
||||
x,
|
||||
y: manifold,
|
||||
};
|
||||
|
||||
|
||||
const tracesGunPressures = [{
|
||||
type: "scatter",
|
||||
mode: "lines",
|
||||
@@ -164,7 +164,7 @@ export default {
|
||||
y: gunPressuresSorted.map(s => d3a.quantileSorted(s, 0.75)),
|
||||
...aes.gunArrays[src_number || 1].max
|
||||
}];
|
||||
|
||||
|
||||
const tracesGunsPressuresIndividual = {
|
||||
//name: `Array ${src_number} outliers`,
|
||||
type: "scatter",
|
||||
@@ -180,22 +180,22 @@ export default {
|
||||
).flat(),
|
||||
...aes.gunArrays[src_number || 1].out
|
||||
};
|
||||
|
||||
|
||||
const data = [ traceManifold, ...tracesGunPressures, tracesGunsPressuresIndividual ]
|
||||
return data;
|
||||
}
|
||||
|
||||
|
||||
if (!this.data) {
|
||||
console.log("missing data");
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
const sources = [ ...new Set(unpack(this.data.items, "meta").map( s => s.src_number ))];
|
||||
const data = sources.map( src_number => transformSeries(this.data.items, src_number) ).flat();
|
||||
console.log("Sources", sources);
|
||||
console.log(data);
|
||||
this.busy = false;
|
||||
|
||||
|
||||
const layout = {
|
||||
//autosize: true,
|
||||
title: {text: "Gun pressures – sequence %{meta.sequence}"},
|
||||
@@ -212,12 +212,12 @@ export default {
|
||||
},
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
|
||||
const config = {
|
||||
editable: false,
|
||||
displaylogo: false
|
||||
};
|
||||
|
||||
|
||||
this.graph = Plotly.newPlot(this.$refs.graphSeries, data, layout, config);
|
||||
this.$refs.graphSeries.on('plotly_hover', (d) => {
|
||||
const point = d.points[0].x;
|
||||
@@ -237,7 +237,7 @@ export default {
|
||||
groups: unpack(guns, 0)
|
||||
}],
|
||||
}];
|
||||
|
||||
|
||||
const layout = {
|
||||
title: {text: "Gun pressures – shot %{meta.point}"},
|
||||
height: 300,
|
||||
@@ -253,19 +253,19 @@ export default {
|
||||
point
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
const config = { displaylogo: false };
|
||||
|
||||
|
||||
Plotly.react(this.$refs.graphBar, data, layout, config);
|
||||
});
|
||||
},
|
||||
|
||||
|
||||
async plotViolin () {
|
||||
|
||||
|
||||
function transformViolin (d, opts = {}) {
|
||||
|
||||
|
||||
const styles = [];
|
||||
|
||||
|
||||
unpack(unpack(d, "meta"), "guns").flat().forEach(i => {
|
||||
const gunId = i[1];
|
||||
const arrayId = i[2];
|
||||
@@ -273,7 +273,7 @@ export default {
|
||||
styles[gunId] = Object.assign({target: gunId}, aes.gunArrayViolins[arrayId]);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
const data = {
|
||||
type: 'violin',
|
||||
x: unpack(unpack(unpack(d, "meta"), "guns").flat(), 1), // Gun number
|
||||
@@ -294,21 +294,21 @@ export default {
|
||||
styles: styles.filter(i => !!i)
|
||||
}]
|
||||
}
|
||||
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
console.log("plot violin");
|
||||
if (!this.data) {
|
||||
console.log("missing data");
|
||||
return;
|
||||
}
|
||||
console.log("Will plot sequence", this.data.meta.project, this.data.meta.sequence);
|
||||
|
||||
|
||||
const data = [ transformViolin(this.data.items) ];
|
||||
this.busy = false;
|
||||
|
||||
|
||||
const layout = {
|
||||
//autosize: true,
|
||||
showlegend: false,
|
||||
@@ -324,21 +324,21 @@ export default {
|
||||
},
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
|
||||
const config = {
|
||||
editable: false,
|
||||
displaylogo: false
|
||||
};
|
||||
|
||||
|
||||
this.graph = Plotly.newPlot(this.$refs.graphViolin, data, layout, config);
|
||||
},
|
||||
|
||||
|
||||
|
||||
replot () {
|
||||
if (!this.graph) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
console.log("Replotting");
|
||||
Object.values(this.$refs).forEach( ref => {
|
||||
if (ref.data) {
|
||||
@@ -350,25 +350,25 @@ export default {
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
|
||||
...mapActions(["api"])
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
mounted () {
|
||||
|
||||
|
||||
if (this.data) {
|
||||
this.plot();
|
||||
} else {
|
||||
this.busy = true;
|
||||
}
|
||||
|
||||
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graphSeries);
|
||||
this.resizeObserver.observe(this.$refs.graphViolin);
|
||||
this.resizeObserver.observe(this.$refs.graphBar);
|
||||
},
|
||||
|
||||
|
||||
beforeDestroy () {
|
||||
if (this.resizeObserver) {
|
||||
this.resizeObserver.unobserve(this.$refs.graphBar);
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<v-switch v-model="shotpoint" label="Shotpoint"></v-switch>
|
||||
<v-switch class="ml-4" v-model="violinplot" label="Violin plot"></v-switch>
|
||||
</v-card-title>
|
||||
|
||||
|
||||
<v-container fluid fill-height>
|
||||
<v-row>
|
||||
<v-col>
|
||||
@@ -49,7 +49,7 @@ import * as aes from '@/lib/graphs/aesthetics.js';
|
||||
|
||||
export default {
|
||||
name: 'DougalGraphGunsTiming',
|
||||
|
||||
|
||||
props: [ "data", "settings" ],
|
||||
|
||||
data () {
|
||||
@@ -62,16 +62,16 @@ export default {
|
||||
violinplot: false
|
||||
};
|
||||
},
|
||||
|
||||
|
||||
computed: {
|
||||
//...mapGetters(['apiUrl'])
|
||||
},
|
||||
|
||||
|
||||
watch: {
|
||||
|
||||
|
||||
data (newVal, oldVal) {
|
||||
console.log("data changed");
|
||||
|
||||
|
||||
if (newVal === null) {
|
||||
this.busy = true;
|
||||
} else {
|
||||
@@ -79,42 +79,42 @@ export default {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
settings () {
|
||||
for (const key in this.settings) {
|
||||
this[key] = this.settings[key];
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
shotpoint () {
|
||||
if (this.shotpoint) {
|
||||
this.replot();
|
||||
}
|
||||
this.$emit("update:settings", {[`${this.$options.name}.shotpoint`]: this.shotpoint});
|
||||
},
|
||||
|
||||
|
||||
violinplot () {
|
||||
if (this.violinplot) {
|
||||
this.plotViolin();
|
||||
}
|
||||
this.$emit("update:settings", {[`${this.$options.name}.violinplot`]: this.violinplot});
|
||||
}
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
methods: {
|
||||
|
||||
|
||||
plot () {
|
||||
this.plotSeries();
|
||||
if (this.violinplot) {
|
||||
this.plotViolin();
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
async plotSeries () {
|
||||
|
||||
|
||||
function transformSeries (d, src_number, otherParams={}) {
|
||||
|
||||
|
||||
const meta = src_number
|
||||
? unpack(d, "meta").filter( s => s.src_number == src_number )
|
||||
: unpack(d, "meta");
|
||||
@@ -122,11 +122,11 @@ export default {
|
||||
const gunTimings = guns.map(s => s.map(g => g[9]));
|
||||
const gunTimingsSorted = gunTimings.map(s => d3a.sort(s));
|
||||
const gunsAvgTiming = gunTimings.map( (s, sidx) => d3a.mean(s) );
|
||||
|
||||
|
||||
const x = src_number
|
||||
? unpack(d.filter(s => s.meta.src_number == src_number), "point")
|
||||
: unpack(d, "point");
|
||||
|
||||
|
||||
const tracesGunTimings = [{
|
||||
type: "scatter",
|
||||
mode: "lines",
|
||||
@@ -150,7 +150,7 @@ export default {
|
||||
y: gunTimingsSorted.map(s => d3a.quantileSorted(s, 0.75)),
|
||||
...aes.gunArrays[src_number || 1].max
|
||||
}];
|
||||
|
||||
|
||||
const tracesGunsTimingsIndividual = {
|
||||
//name: `Array ${src_number} outliers`,
|
||||
type: "scatter",
|
||||
@@ -166,22 +166,22 @@ export default {
|
||||
).flat(),
|
||||
...aes.gunArrays[src_number || 1].out
|
||||
};
|
||||
|
||||
|
||||
const data = [ ...tracesGunTimings, tracesGunsTimingsIndividual ]
|
||||
return data;
|
||||
}
|
||||
|
||||
|
||||
if (!this.data) {
|
||||
console.log("missing data");
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
const sources = [ ...new Set(unpack(this.data.items, "meta").map( s => s.src_number ))];
|
||||
const data = sources.map( src_number => transformSeries(this.data.items, src_number) ).flat();
|
||||
console.log("Sources", sources);
|
||||
console.log(data);
|
||||
this.busy = false;
|
||||
|
||||
|
||||
const layout = {
|
||||
//autosize: true,
|
||||
title: {text: "Gun timings – sequence %{meta.sequence}"},
|
||||
@@ -198,12 +198,12 @@ export default {
|
||||
},
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
|
||||
const config = {
|
||||
editable: false,
|
||||
displaylogo: false
|
||||
};
|
||||
|
||||
|
||||
this.graph = Plotly.newPlot(this.$refs.graphSeries, data, layout, config);
|
||||
this.$refs.graphSeries.on('plotly_hover', (d) => {
|
||||
const point = d.points[0].x;
|
||||
@@ -220,7 +220,7 @@ export default {
|
||||
groups: unpack(guns, 0)
|
||||
}],
|
||||
}];
|
||||
|
||||
|
||||
const layout = {
|
||||
title: {text: "Gun timings – shot %{meta.point}"},
|
||||
height: 300,
|
||||
@@ -236,19 +236,19 @@ export default {
|
||||
point
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
const config = { displaylogo: false };
|
||||
|
||||
|
||||
Plotly.react(this.$refs.graphBar, data, layout, config);
|
||||
});
|
||||
},
|
||||
|
||||
|
||||
async plotViolin () {
|
||||
|
||||
|
||||
function transformViolin (d, opts = {}) {
|
||||
|
||||
|
||||
const styles = [];
|
||||
|
||||
|
||||
unpack(unpack(d, "meta"), "guns").flat().forEach(i => {
|
||||
const gunId = i[1];
|
||||
const arrayId = i[2];
|
||||
@@ -256,7 +256,7 @@ export default {
|
||||
styles[gunId] = Object.assign({target: gunId}, aes.gunArrayViolins[arrayId]);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
const data = {
|
||||
type: 'violin',
|
||||
x: unpack(unpack(unpack(d, "meta"), "guns").flat(), 1), // Gun number
|
||||
@@ -277,21 +277,21 @@ export default {
|
||||
styles: styles.filter(i => !!i)
|
||||
}]
|
||||
}
|
||||
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
console.log("plot violin");
|
||||
if (!this.data) {
|
||||
console.log("missing data");
|
||||
return;
|
||||
}
|
||||
console.log("Will plot sequence", this.data.meta.project, this.data.meta.sequence);
|
||||
|
||||
|
||||
const data = [ transformViolin(this.data.items) ];
|
||||
this.busy = false;
|
||||
|
||||
|
||||
const layout = {
|
||||
//autosize: true,
|
||||
showlegend: false,
|
||||
@@ -307,21 +307,21 @@ export default {
|
||||
},
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
|
||||
const config = {
|
||||
editable: false,
|
||||
displaylogo: false
|
||||
};
|
||||
|
||||
|
||||
this.graph = Plotly.newPlot(this.$refs.graphViolin, data, layout, config);
|
||||
},
|
||||
|
||||
|
||||
|
||||
replot () {
|
||||
if (!this.graph) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
console.log("Replotting");
|
||||
Object.values(this.$refs).forEach( ref => {
|
||||
if (ref.data) {
|
||||
@@ -333,25 +333,25 @@ export default {
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
|
||||
...mapActions(["api"])
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
mounted () {
|
||||
|
||||
|
||||
if (this.data) {
|
||||
this.plot();
|
||||
} else {
|
||||
this.busy = true;
|
||||
}
|
||||
|
||||
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graphSeries);
|
||||
this.resizeObserver.observe(this.$refs.graphViolin);
|
||||
this.resizeObserver.observe(this.$refs.graphBar);
|
||||
},
|
||||
|
||||
|
||||
beforeDestroy () {
|
||||
if (this.resizeObserver) {
|
||||
this.resizeObserver.unobserve(this.$refs.graphBar);
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
<template>
|
||||
|
||||
<v-dialog v-model="open">
|
||||
|
||||
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-btn icon v-bind="attrs" v-on="on" title="Configure visible aspects">
|
||||
<v-icon small>mdi-wrench-outline</v-icon>
|
||||
</v-btn>
|
||||
|
||||
|
||||
</template>
|
||||
|
||||
|
||||
<v-card>
|
||||
<v-list nav subheader>
|
||||
|
||||
|
||||
<v-subheader>Visualisations</v-subheader>
|
||||
|
||||
|
||||
<v-list-item-group v-model="aspectsVisible" multiple>
|
||||
|
||||
|
||||
<v-list-item value="DougalGraphGunsPressure">
|
||||
<template v-slot:default="{ active }">
|
||||
<v-list-item-action>
|
||||
@@ -28,7 +28,7 @@
|
||||
</v-list-item-content>
|
||||
</template>
|
||||
</v-list-item>
|
||||
|
||||
|
||||
<v-list-item value="DougalGraphGunsTiming">
|
||||
<template v-slot:default="{ active }">
|
||||
<v-list-item-action>
|
||||
@@ -41,7 +41,7 @@
|
||||
</v-list-item-content>
|
||||
</template>
|
||||
</v-list-item>
|
||||
|
||||
|
||||
<v-list-item value="DougalGraphGunsDepth">
|
||||
<template v-slot:default="{ active }">
|
||||
<v-list-item-action>
|
||||
@@ -54,7 +54,7 @@
|
||||
</v-list-item-content>
|
||||
</template>
|
||||
</v-list-item>
|
||||
|
||||
|
||||
<v-list-item value="DougalGraphGunsHeatmap">
|
||||
<template v-slot:default="{ active }">
|
||||
<v-list-item-action>
|
||||
@@ -67,7 +67,7 @@
|
||||
</v-list-item-content>
|
||||
</template>
|
||||
</v-list-item>
|
||||
|
||||
|
||||
<v-list-item value="DougalGraphArraysIJScatter">
|
||||
<template v-slot:default="{ active }">
|
||||
<v-list-item-action>
|
||||
@@ -83,14 +83,14 @@
|
||||
|
||||
</v-list-item-group>
|
||||
</v-list>
|
||||
|
||||
|
||||
<v-divider></v-divider>
|
||||
<v-card-actions>
|
||||
<v-btn v-if="user" color="warning" text @click="save" :title="'Save as preference for user '+user.name+' on this computer (other users may have other defaults).'">Save as default</v-btn>
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn color="primary" text @click="open=false">Close</v-btn>
|
||||
</v-card-actions>
|
||||
|
||||
|
||||
</v-card>
|
||||
</v-dialog>
|
||||
|
||||
@@ -102,20 +102,20 @@ import { mapActions, mapGetters } from 'vuex';
|
||||
|
||||
export default {
|
||||
name: "DougalGraphSettingsSequence",
|
||||
|
||||
|
||||
props: [
|
||||
"aspects"
|
||||
],
|
||||
|
||||
|
||||
data () {
|
||||
return {
|
||||
open: false,
|
||||
aspectsVisible: this.aspects || []
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
watch: {
|
||||
|
||||
|
||||
aspects () {
|
||||
// Update the aspects selection list iff the list
|
||||
// is not currently open.
|
||||
@@ -123,19 +123,19 @@ export default {
|
||||
this.aspectsVisible = this.aspects;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
computed: {
|
||||
...mapGetters(['user', 'writeaccess', 'loading', 'serverEvent'])
|
||||
},
|
||||
|
||||
|
||||
methods: {
|
||||
save () {
|
||||
this.open = false;
|
||||
this.$nextTick( () => this.$emit("update:aspects", {aspects: [...this.aspectsVisible]}) );
|
||||
},
|
||||
|
||||
|
||||
reset () {
|
||||
this.aspectsVisible = this.aspects || [];
|
||||
}
|
||||
|
||||
@@ -0,0 +1,290 @@
|
||||
<template>
|
||||
<div ref="graph"
|
||||
class="graph-container"
|
||||
></div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
.graph-container {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import Plotly from 'plotly.js-dist';
|
||||
import unpack from '@/lib/unpack.js';
|
||||
|
||||
export default {
|
||||
name: "DougalGraphProjectSequenceInlineCrossline",
|
||||
|
||||
props: {
|
||||
items: Array,
|
||||
gunDataFormat: { type: String, default: "smsrc" },
|
||||
facet: { type: String, default: "scatter" }
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
plotted: false,
|
||||
resizeObserver: null
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
config () {
|
||||
switch (this.facet) {
|
||||
case "scatter":
|
||||
default:
|
||||
return {
|
||||
editable: false,
|
||||
displayLogo: false
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
layout () {
|
||||
const base = {
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
}
|
||||
};
|
||||
|
||||
switch (this.facet) {
|
||||
case "scatter":
|
||||
return {
|
||||
...base,
|
||||
autocolorscale: true,
|
||||
title: {text: `Preplot deviation <span style="font-size:smaller;">(x̅: %{data[0].meta.avg_x} ±%{data[0].meta.std_x} m; y̅: %{data[0].meta.avg_y} ±%{data[0].meta.std_y} m)</span>`},
|
||||
xaxis: {
|
||||
title: "Crossline (m)"
|
||||
},
|
||||
yaxis: {
|
||||
title: "Inline (m)"
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)"
|
||||
};
|
||||
|
||||
case "crossline":
|
||||
return {
|
||||
...base,
|
||||
autocolorscale: true,
|
||||
title: {text: `Crossline deviation <span style="font-size:smaller;">(x̅: %{data[0].meta.avg_x} ±%{data[0].meta.std_x} m)</span>`},
|
||||
xaxis: {
|
||||
title: "Shotpoint"
|
||||
},
|
||||
yaxis: {
|
||||
title: "Crossline (m)"
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)"
|
||||
};
|
||||
|
||||
case "2dhist":
|
||||
return {
|
||||
...base,
|
||||
showlegend: true,
|
||||
title: {text: `Preplot deviation <span style="font-size:smaller;">(x̅: %{data[0].meta.avg_x} ±%{data[0].meta.std_x} m; y̅: %{data[0].meta.avg_y} ±%{data[0].meta.std_y} m)</span>`},
|
||||
xaxis: {
|
||||
title: "Crossline (m)",
|
||||
showgrid: true,
|
||||
zeroline: true
|
||||
},
|
||||
yaxis: {
|
||||
title: "Inline (m)",
|
||||
showgrid: true,
|
||||
zeroline: true
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)"
|
||||
};
|
||||
|
||||
case "c-o":
|
||||
return {
|
||||
...base,
|
||||
showlegend: true,
|
||||
title: {text: `Final vs raw <span style="font-size:smaller;">(x̅: %{data[0].meta.avg_x} ±%{data[0].meta.std_x} m; y̅: %{data[0].meta.avg_y} ±%{data[0].meta.std_y} m)</span>`},
|
||||
xaxis: {
|
||||
title: "Crossline (m)",
|
||||
showgrid: true,
|
||||
zeroline: true
|
||||
},
|
||||
yaxis: {
|
||||
title: "Inline (m)",
|
||||
showgrid: true,
|
||||
zeroline: true
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)"
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
data () {
|
||||
if (!this.items?.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let x, y, avg_x, avg_y, std_x, std_y;
|
||||
|
||||
const items = this.items.sort( (a, b) => a.point - b.point );
|
||||
const meta = unpack(items, "meta");
|
||||
const src_number = unpack(unpack(unpack(meta, "raw"), this.gunDataFormat), "src_number");
|
||||
|
||||
if (this.facet == "c-o") {
|
||||
const _items = items.filter(i => i.errorfinal && i.errorraw);
|
||||
const εf = unpack(unpack(_items, "errorfinal"), "coordinates");
|
||||
const εr = unpack(unpack(_items, "errorraw"), "coordinates");
|
||||
|
||||
x = εf.map( (f, idx) => f[0] - εr[idx][0] )
|
||||
y = εf.map( (f, idx) => f[1] - εr[idx][1] )
|
||||
|
||||
} else {
|
||||
const coords = unpack(unpack(items, ((row) => row?.errorfinal ? row.errorfinal : row.errorraw)), "coordinates");
|
||||
|
||||
x = unpack(coords, 0);
|
||||
y = unpack(coords, 1);
|
||||
|
||||
|
||||
}
|
||||
|
||||
// No chance of overflow
|
||||
avg_x = (x.reduce((acc, cur) => acc + cur, 0) / x.length).toFixed(2);
|
||||
avg_y = (y.reduce((acc, cur) => acc + cur, 0) / y.length).toFixed(2);
|
||||
std_x = Math.sqrt(x.reduce((acc, cur) => (cur-avg_x)**2 + acc, 0) / x.length).toFixed(2);
|
||||
std_y = Math.sqrt(y.reduce((acc, cur) => (cur-avg_y)**2 + acc, 0) / y.length).toFixed(2);
|
||||
|
||||
if (this.facet == "scatter") {
|
||||
|
||||
const data = [{
|
||||
type: "scatter",
|
||||
mode: "markers",
|
||||
x,
|
||||
y,
|
||||
meta: { avg_x, avg_y, std_x, std_y},
|
||||
transforms: [{
|
||||
type: "groupby",
|
||||
groups: src_number,
|
||||
styles: [
|
||||
{target: 1, value: {line: {color: "green"}}},
|
||||
{target: 2, value: {line: {color: "red"}}},
|
||||
{target: 3, value: {line: {color: "blue"}}}
|
||||
]
|
||||
}],
|
||||
}];
|
||||
|
||||
return data;
|
||||
|
||||
} else if (this.facet == "crossline") {
|
||||
|
||||
const s = unpack(items, "point");
|
||||
|
||||
const data = [{
|
||||
type: "scatter",
|
||||
x: s,
|
||||
y: x,
|
||||
meta: { avg_x, avg_y, std_x, std_y},
|
||||
_transforms: [{
|
||||
type: "groupby",
|
||||
groups: src_number,
|
||||
styles: [
|
||||
{target: 1, value: {line: {color: "green"}}},
|
||||
{target: 2, value: {line: {color: "red"}}},
|
||||
{target: 3, value: {line: {color: "blue"}}}
|
||||
]
|
||||
}],
|
||||
}];
|
||||
|
||||
return data;
|
||||
|
||||
} else if (this.facet == "2dhist" || this.facet == "c-o") {
|
||||
|
||||
const bottomValue = this.$vuetify.theme.isDark
|
||||
? ['0.0', 'rgba(0,0,0,0)']
|
||||
: ['0.0', 'rgb(165,0,38)'];
|
||||
const topValue = this.$vuetify.theme.isDark
|
||||
? ['1.0', 'rgb(49,54,149)']
|
||||
: ['1.0', 'rgba(0,0,0,0)'];
|
||||
|
||||
const colourscale = this.facet == "c-o"
|
||||
? [bottomValue, [0.1, 'rgb(0,0,0)'], [0.9, 'rgb(255,255,255)'], topValue]
|
||||
: [
|
||||
bottomValue,
|
||||
['0.111111111111', 'rgb(215,48,39)'],
|
||||
['0.222222222222', 'rgb(244,109,67)'],
|
||||
['0.333333333333', 'rgb(253,174,97)'],
|
||||
['0.444444444444', 'rgb(254,224,144)'],
|
||||
['0.555555555556', 'rgb(224,243,248)'],
|
||||
['0.666666666667', 'rgb(171,217,233)'],
|
||||
['0.777777777778', 'rgb(116,173,209)'],
|
||||
['0.888888888889', 'rgb(69,117,180)'],
|
||||
topValue
|
||||
];
|
||||
|
||||
const data = [{
|
||||
type: "histogram2dcontour",
|
||||
ncontours: 20,
|
||||
colorscale: colourscale,
|
||||
showscale: false,
|
||||
reversescale: !this.$vuetify.theme.isDark,
|
||||
contours: {
|
||||
coloring: this.facet == "c-o" ? "fill" : "heatmap",
|
||||
},
|
||||
x,
|
||||
y,
|
||||
meta: { avg_x, avg_y, std_x, std_y}
|
||||
}];
|
||||
|
||||
return data;
|
||||
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
watch: {
|
||||
items (cur, prev) {
|
||||
if (cur != prev) {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
"$vuetify.theme.isDark" () {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
plot () {
|
||||
Plotly.newPlot(this.$refs.graph, this.data, this.layout, this.config);
|
||||
this.plotted = true;
|
||||
},
|
||||
|
||||
replot () {
|
||||
if (this.plotted) {
|
||||
const ref = this.$refs.graph;
|
||||
Plotly.relayout(ref, {
|
||||
width: ref.clientWidth,
|
||||
height: ref.clientHeight
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graph);
|
||||
},
|
||||
|
||||
beforeDestroy () {
|
||||
if (this.resizeObserver) {
|
||||
this.resizeObserver.unobserve(this.$refs.graph);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -0,0 +1,196 @@
|
||||
<template>
|
||||
<div ref="graph"
|
||||
class="graph-container"
|
||||
></div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
.graph-container {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import Plotly from 'plotly.js-dist';
|
||||
import unpack from '@/lib/unpack.js';
|
||||
|
||||
export default {
|
||||
name: "DougalGraphProjectSequenceShotpointTiming",
|
||||
|
||||
props: {
|
||||
items: Array,
|
||||
gunDataFormat: { type: String, default: "smsrc" },
|
||||
facet: { type: String, default: "bars" }
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
plotted: false,
|
||||
resizeObserver: null
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
config () {
|
||||
return {
|
||||
editable: false,
|
||||
displayLogo: false
|
||||
};
|
||||
},
|
||||
|
||||
layout () {
|
||||
return {
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
title: {text: "Shotpoint timing %{data[0].meta.subtitle}"},
|
||||
xaxis: {
|
||||
title: "Shotpoint"
|
||||
},
|
||||
yaxis: {
|
||||
title: "Time (s)"
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)"
|
||||
};
|
||||
},
|
||||
|
||||
data () {
|
||||
|
||||
const items = this.items.map(i => {
|
||||
return {
|
||||
point: i.point,
|
||||
tstamp: new Date(i.tstamp)
|
||||
}
|
||||
}).sort( (a, b) => a.tstamp - b.tstamp );
|
||||
const x = [...unpack(items, "point")];
|
||||
const y = items.map( (i, idx, ary) => (ary[idx+1]?.tstamp - i.tstamp)/1000 );
|
||||
const src_number = unpack(this.items, ["meta", "raw", this.gunDataFormat, "src_number"]);
|
||||
|
||||
// We're dealing with intervals not points
|
||||
x.pop(); y.pop(); src_number.pop();
|
||||
|
||||
const meta = {};
|
||||
|
||||
const stats = this.stats(x, y, src_number);
|
||||
|
||||
// We need to do the subtitle here rather than in layout as layout knows nothing
|
||||
// about the number of arrays
|
||||
|
||||
if (stats.src_ids.length == 1) {
|
||||
meta.subtitle = `<span style="font-size:smaller;">(μ = ${stats.avg.all.toFixed(2)} ±${stats.std.all.toFixed(2)} s)</span>`;
|
||||
} else {
|
||||
meta.subtitle = `<span style="font-size:smaller;">(μ = ${stats.avg.all.toFixed(2)} ±${stats.std.all.toFixed(2)} s)</span>`;
|
||||
const per_source = [];
|
||||
for (const key in stats.avg) {
|
||||
if (key == "all") continue;
|
||||
const s = `μ<sub>${key}</sub> = ${stats.avg[key].toFixed(2)} ±${stats.std[key].toFixed(2)} s`;
|
||||
per_source.push(s);
|
||||
}
|
||||
meta.subtitle += `<br><span style="font-size:smaller;">` + per_source.join("; ") + "</span>";
|
||||
}
|
||||
|
||||
|
||||
const trace0 = {
|
||||
type: "bar",
|
||||
x,
|
||||
y,
|
||||
transforms: [{
|
||||
type: "groupby",
|
||||
groups: src_number,
|
||||
styles: [
|
||||
{target: 1, value: {line: {color: "green"}}},
|
||||
{target: 2, value: {line: {color: "red"}}},
|
||||
{target: 3, value: {line: {color: "blue"}}}
|
||||
]
|
||||
}],
|
||||
meta
|
||||
};
|
||||
|
||||
switch (this.facet) {
|
||||
case "lines":
|
||||
trace0.type = "scatter";
|
||||
break;
|
||||
case "area":
|
||||
trace0.type = "scatter";
|
||||
trace0.fill = "tozeroy";
|
||||
break;
|
||||
case "bars":
|
||||
default:
|
||||
// Nothing
|
||||
}
|
||||
|
||||
return [trace0]
|
||||
}
|
||||
},
|
||||
|
||||
watch: {
|
||||
items (cur, prev) {
|
||||
if (cur != prev) {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
"$vuetify.theme.isDark" () {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
plot () {
|
||||
Plotly.newPlot(this.$refs.graph, this.data, this.layout, this.config);
|
||||
this.plotted = true;
|
||||
},
|
||||
|
||||
replot () {
|
||||
if (this.plotted) {
|
||||
const ref = this.$refs.graph;
|
||||
Plotly.relayout(ref, {
|
||||
width: ref.clientWidth,
|
||||
height: ref.clientHeight
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
stats (x, y, src_number) {
|
||||
const avg = {};
|
||||
const std = {};
|
||||
|
||||
const avg_all = (y.reduce((acc, cur) => acc + cur, 0) / y.length);
|
||||
const std_all = Math.sqrt(y.reduce((acc, cur) => (cur-avg_all)**2 + acc, 0) / y.length);
|
||||
|
||||
avg.all = avg_all;
|
||||
std.all = std_all;
|
||||
|
||||
const src_ids = new Set(src_number);
|
||||
|
||||
for (const src of src_ids) {
|
||||
const v = y.filter((i, idx) => src_number[idx] == src);
|
||||
const μ = (v.reduce((acc, cur) => acc + cur, 0) / v.length);
|
||||
const σ = Math.sqrt(v.reduce((acc, cur) => (cur-μ)**2 + acc, 0) / v.length);
|
||||
avg[src] = μ;
|
||||
std[src] = σ;
|
||||
}
|
||||
|
||||
return { avg, std, src_ids };
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graph);
|
||||
},
|
||||
|
||||
beforeDestroy () {
|
||||
if (this.resizeObserver) {
|
||||
this.resizeObserver.unobserve(this.$refs.graph);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -2,8 +2,8 @@
|
||||
<div class="line-status" v-if="sequences.length == 0">
|
||||
<slot name="empty"></slot>
|
||||
</div>
|
||||
<div class="line-status" v-else-if="sequenceHref">
|
||||
<router-link v-for="sequence in sequences"
|
||||
<div class="line-status" v-else-if="sequenceHref || plannedSequenceHref || pendingReshootHref">
|
||||
<router-link v-for="sequence in sequences" :key="sequence.sequence" v-if="sequenceHref"
|
||||
class="sequence"
|
||||
:class="sequence.status"
|
||||
:style="style(sequence)"
|
||||
@@ -11,15 +11,41 @@
|
||||
:to="sequenceHref(sequence)"
|
||||
>
|
||||
</router-link>
|
||||
<router-link v-for="sequence in plannedSequences" :key="sequence.sequence" v-if="plannedSequenceHref"
|
||||
class="sequence planned"
|
||||
:style="style(sequence)"
|
||||
:title="title(sequence, 'planned')"
|
||||
:to="plannedSequenceHref(sequence)"
|
||||
>
|
||||
</router-link>
|
||||
<router-link v-for="(line, key) in pendingReshoots" :key="key" v-if="pendingReshootHref"
|
||||
class="sequence reshoot"
|
||||
:style="style(line)"
|
||||
:title="title(line, 'reshoot')"
|
||||
:to="pendingReshootHref(line)"
|
||||
>
|
||||
</router-link>
|
||||
</div>
|
||||
<div class="line-status" v-else>
|
||||
<div v-for="sequence in sequences"
|
||||
<div v-for="sequence in sequences" :key="sequence.sequence"
|
||||
class="sequence"
|
||||
:class="sequence.status"
|
||||
:style="style(sequence)"
|
||||
:title="title(sequence)"
|
||||
>
|
||||
</div>
|
||||
<div v-for="sequence in plannedSequences" :key="sequence.sequence"
|
||||
class="sequence planned"
|
||||
:style="style(sequence)"
|
||||
:title="title(sequence, 'planned')"
|
||||
>
|
||||
</div>
|
||||
<div v-for="(line, key) in pendingReshoots" :key="key"
|
||||
class="sequence reshoot"
|
||||
:style="style(line)"
|
||||
:title="title(line, 'reshoot')"
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
@@ -32,12 +58,12 @@
|
||||
min-height 16px
|
||||
background-color #d3d3d314
|
||||
border-radius 4px
|
||||
|
||||
|
||||
.sequence
|
||||
flex 1 1 auto
|
||||
opacity 0.5
|
||||
border-radius 4px
|
||||
|
||||
|
||||
&.ntbp
|
||||
background-color red
|
||||
&.raw
|
||||
@@ -48,19 +74,25 @@
|
||||
background-color blue
|
||||
&.planned
|
||||
background-color magenta
|
||||
&.reshoot
|
||||
background repeating-linear-gradient(-45deg, rgba(255,0,255,0.302), brown 5px, rgba(247, 247, 247, 0.1) 5px, rgba(242, 241, 241, 0.08) 10px), repeating-linear-gradient(45deg, rgba(255,0,255,0.302), brown 5px, rgba(247, 247, 247, 0.1) 5px, rgba(242, 241, 241, 0.08) 10px)
|
||||
</style>
|
||||
|
||||
<script>
|
||||
|
||||
export default {
|
||||
name: 'DougalLineStatus',
|
||||
|
||||
|
||||
props: {
|
||||
preplot: Object,
|
||||
sequences: Array,
|
||||
"sequence-href": Function
|
||||
"sequence-href": Function,
|
||||
"planned-sequences": Array,
|
||||
"planned-sequence-href": Function,
|
||||
"pending-reshoots": Array,
|
||||
"pending-reshoot-href": Function
|
||||
},
|
||||
|
||||
|
||||
methods: {
|
||||
style (s) {
|
||||
const values = {};
|
||||
@@ -68,46 +100,50 @@ export default {
|
||||
? s.fsp_final
|
||||
: s.status == "ntbp"
|
||||
? (s.fsp_final || s.fsp)
|
||||
: s.fsp; /* status == "raw" */
|
||||
|
||||
: s.fsp; /* status == "raw" or planned sequence or pending reshoot */
|
||||
|
||||
const lsp = s.status == "final"
|
||||
? s.lsp_final
|
||||
: s.status == "ntbp"
|
||||
? (s.lsp_final || s.lsp)
|
||||
: s.lsp; /* status == "raw" */
|
||||
|
||||
: s.lsp; /* status == "raw" or planned sequence or pending reshoot */
|
||||
|
||||
const pp0 = Math.min(this.preplot.fsp, this.preplot.lsp);
|
||||
const pp1 = Math.max(this.preplot.fsp, this.preplot.lsp);
|
||||
const len = pp1-pp0;
|
||||
const sp0 = Math.max(Math.min(fsp, lsp), pp0);
|
||||
const sp1 = Math.min(Math.max(fsp, lsp), pp1);
|
||||
|
||||
|
||||
const left = (sp0-pp0)/len;
|
||||
const right = 1-((sp1-pp0)/len);
|
||||
|
||||
|
||||
values["margin-left"] = left*100 + "%";
|
||||
values["margin-right"] = right*100 + "%";
|
||||
|
||||
|
||||
return values;
|
||||
},
|
||||
|
||||
title (s) {
|
||||
const status = s.status == "final"
|
||||
? "Final"
|
||||
: s.status == "raw"
|
||||
? "Acquired"
|
||||
: s.status == "ntbp"
|
||||
? "NTBP"
|
||||
: s.status == "planned"
|
||||
? "Planned"
|
||||
: s.status;
|
||||
|
||||
const remarks = "\n"+[s.remarks, s.remarks_final].join("\n").trim()
|
||||
|
||||
return `Sequence ${s.sequence} – ${status} (${s.fsp_final || s.fsp}−${s.lsp_final || s.lsp})${remarks}`;
|
||||
|
||||
title (s, type) {
|
||||
if (s.status || type == "planned") {
|
||||
const status = s.status == "final"
|
||||
? "Final"
|
||||
: s.status == "raw"
|
||||
? "Acquired"
|
||||
: s.status == "ntbp"
|
||||
? "NTBP"
|
||||
: type == "planned"
|
||||
? "Planned"
|
||||
: s.status;
|
||||
|
||||
const remarks = "\n"+[s.remarks, s.remarks_final].join("\n").trim()
|
||||
|
||||
return `Sequence ${s.sequence} – ${status} (${s.fsp_final || s.fsp}−${s.lsp_final || s.lsp})${remarks}`;
|
||||
} else if (type == "reshoot") {
|
||||
return `Pending reshoot (${s.fsp}‒${s.lsp})${s.remarks? "\n"+s.remarks : ""}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
</script>
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
<v-app-bar
|
||||
app
|
||||
clipped-left
|
||||
elevation="1"
|
||||
>
|
||||
<v-img src="/wgp-logo.png"
|
||||
contain
|
||||
@@ -12,7 +13,7 @@
|
||||
<v-toolbar-title class="mx-2" @click="$router.push('/')" style="cursor: pointer;">Dougal</v-toolbar-title>
|
||||
|
||||
<v-spacer></v-spacer>
|
||||
|
||||
|
||||
<v-menu bottom offset-y>
|
||||
<template v-slot:activator="{on, attrs}">
|
||||
<v-hover v-slot="{hover}">
|
||||
@@ -29,17 +30,17 @@
|
||||
</v-btn>
|
||||
</v-hover>
|
||||
</template>
|
||||
|
||||
|
||||
<v-list dense>
|
||||
<v-list-item :href="`/settings/equipment`">
|
||||
<v-list-item-title>Equipment list</v-list-item-title>
|
||||
<v-list-item-action><v-icon small>mdi-view-list</v-icon></v-list-item-action>
|
||||
</v-list-item>
|
||||
</v-list>
|
||||
|
||||
|
||||
</v-menu>
|
||||
|
||||
|
||||
|
||||
<v-breadcrumbs :items="path"></v-breadcrumbs>
|
||||
|
||||
<template v-if="$route.name != 'Login'">
|
||||
@@ -71,17 +72,10 @@
|
||||
|
||||
</v-menu>
|
||||
|
||||
<!--
|
||||
<v-btn small text class="ml-2" title="Log out" link to="/?logout=1">
|
||||
<v-icon small>mdi-logout</v-icon>
|
||||
</v-btn>
|
||||
-->
|
||||
</template>
|
||||
</template>
|
||||
<template v-slot:extension v-if="$route.matched.find(i => i.name == 'Project')">
|
||||
<v-tabs :value="tab" show-arrows align-with-title>
|
||||
<v-tab v-for="tab, index in tabs" :key="index" link :to="tabLink(tab.href)" v-text="tab.text"></v-tab>
|
||||
</v-tabs>
|
||||
<template v-slot:extension v-if="appBarExtension">
|
||||
<div :is="appBarExtension"></div>
|
||||
</template>
|
||||
</v-app-bar>
|
||||
|
||||
@@ -95,24 +89,17 @@ export default {
|
||||
data() {
|
||||
return {
|
||||
drawer: false,
|
||||
tabs: [
|
||||
{ href: "summary", text: "Summary" },
|
||||
{ href: "lines", text: "Lines" },
|
||||
{ href: "plan", text: "Plan" },
|
||||
{ href: "sequences", text: "Sequences" },
|
||||
{ href: "calendar", text: "Calendar" },
|
||||
{ href: "log", text: "Log" },
|
||||
{ href: "qc", text: "QC" },
|
||||
{ href: "graphs", text: "Graphs" },
|
||||
{ href: "map", text: "Map" }
|
||||
],
|
||||
path: []
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
tab () {
|
||||
return this.tabs.findIndex(t => t.href == this.$route.path.split(/\/+/)[3]);
|
||||
|
||||
appBarExtension () {
|
||||
return this.$route.matched
|
||||
.filter(i => i.meta?.appBarExtension)
|
||||
.map(i => i.meta.appBarExtension)
|
||||
.pop()?.component;
|
||||
},
|
||||
|
||||
...mapGetters(['user', 'loading'])
|
||||
@@ -131,9 +118,6 @@ export default {
|
||||
},
|
||||
|
||||
methods: {
|
||||
tabLink (href) {
|
||||
return `/projects/${this.$route.params.project}/${href}`;
|
||||
},
|
||||
|
||||
breadcrumbs () {
|
||||
this.path = this.$route.matched
|
||||
|
||||
135
lib/www/client/source/src/components/qc-acceptance.vue
Normal file
135
lib/www/client/source/src/components/qc-acceptance.vue
Normal file
@@ -0,0 +1,135 @@
|
||||
<template>
|
||||
|
||||
<v-hover v-slot:default="{hover}" v-if="!isEmpty(item)">
|
||||
<span>
|
||||
<v-btn v-if="!isAccepted(item)"
|
||||
:class="{'text--disabled': !hover}"
|
||||
icon
|
||||
small
|
||||
color="primary"
|
||||
:title="isMultiple(item) ? 'Accept all' : 'Accept'"
|
||||
@click.stop="accept(item)">
|
||||
<v-icon small :color="isAccepted(item) ? 'green' : ''">
|
||||
{{ isMultiple(item) ? 'mdi-check-all' : 'mdi-check' }}
|
||||
</v-icon>
|
||||
</v-btn>
|
||||
<v-btn v-if="someAccepted(item)"
|
||||
:class="{'text--disabled': !hover}"
|
||||
icon
|
||||
small
|
||||
color="primary"
|
||||
:title="isMultiple(item) ? 'Restore all' : 'Restore'"
|
||||
@click.stop="unaccept(item)">
|
||||
<v-icon small>
|
||||
{{ isMultiple(item) ? 'mdi-restore' : 'mdi-restore' }}
|
||||
</v-icon>
|
||||
</v-btn>
|
||||
</span>
|
||||
</v-hover>
|
||||
|
||||
</template>
|
||||
|
||||
<script>
|
||||
|
||||
export default {
|
||||
name: 'DougalQcAcceptance',
|
||||
|
||||
props: {
|
||||
item: { type: Object }
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
isAccepted (item) {
|
||||
if (item._children) {
|
||||
return item._children.every(child => this.isAccepted(child));
|
||||
}
|
||||
|
||||
if (item.labels) {
|
||||
return item.labels.includes("QCAccepted");
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
|
||||
someAccepted (item) {
|
||||
if (item._children) {
|
||||
return item._children.some(child => this.someAccepted(child));
|
||||
}
|
||||
|
||||
if (item.labels) {
|
||||
return item.labels.includes("QCAccepted");
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
|
||||
isEmpty (item) {
|
||||
return item._children?.length === 0;
|
||||
},
|
||||
|
||||
isMultiple (item) {
|
||||
return item._children?.length;
|
||||
},
|
||||
|
||||
action (action, item) {
|
||||
const items = [];
|
||||
|
||||
const iterate = (item) => {
|
||||
if (item._kind == "point") {
|
||||
|
||||
if (this.isAccepted(item)) {
|
||||
if (action == "unaccept") {
|
||||
items.push(item);
|
||||
}
|
||||
} else {
|
||||
if (action == "accept") {
|
||||
items.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
} else if (item._kind == "sequence" || item._kind == "test") {
|
||||
|
||||
if (item._children) {
|
||||
|
||||
for (const child of item._children) {
|
||||
iterate(child);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (item._shots) {
|
||||
|
||||
for (const child of item._children) {
|
||||
iterate(child);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
iterate(item);
|
||||
return items;
|
||||
},
|
||||
|
||||
accept (item) {
|
||||
const items = this.action('accept', item);
|
||||
if (items.length) {
|
||||
this.$emit('accept', items);
|
||||
}
|
||||
},
|
||||
|
||||
unaccept (item) {
|
||||
const items = this.action('unaccept', item);
|
||||
if (items.length) {
|
||||
this.$emit('unaccept', items);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -1,5 +1,5 @@
|
||||
|
||||
|
||||
|
||||
export default function FormatTimestamp (str) {
|
||||
const d = new Date(str);
|
||||
if (isNaN(d)) {
|
||||
|
||||
@@ -1,4 +1,33 @@
|
||||
|
||||
/** Unpacks attributes from array items.
|
||||
*
|
||||
* At it simplest, given an array of objects,
|
||||
* the call unpack(rows, "x") returns an array
|
||||
* of the "x" attribute of every item in rows.
|
||||
*
|
||||
* `key` may also be:
|
||||
*
|
||||
* - a function with the signature
|
||||
* (Object) => any
|
||||
* the result of applying the function to
|
||||
* the object will be used as the unpacked
|
||||
* value.
|
||||
*
|
||||
* - an array of strings, functions or other
|
||||
* arrays. In this case, it does a recursive
|
||||
* fold operation. NOTE: it mutates `key`.
|
||||
*
|
||||
*/
|
||||
export default function unpack(rows, key) {
|
||||
return rows && rows.map( row => row[key] );
|
||||
if (typeof key === "function") {
|
||||
return rows && rows.map( row => key(row) );
|
||||
} else if (Array.isArray(key)) {
|
||||
const car = key.shift();
|
||||
if (key.length) {
|
||||
return unpack(unpack(rows, car), key);
|
||||
} else {
|
||||
return unpack(rows, car);
|
||||
}
|
||||
} else {
|
||||
return rows && rows.map( row => row?.[key] );
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
|
||||
|
||||
|
||||
function withParentProps(item, parent, childrenKey, prop, currentValue) {
|
||||
if (!Array.isArray(parent)) {
|
||||
@@ -29,43 +29,43 @@ function withParentProps(item, parent, childrenKey, prop, currentValue) {
|
||||
function dms (lat, lon) {
|
||||
const λh = lat < 0 ? "S" : "N";
|
||||
const φh = lon < 0 ? "W" : "E";
|
||||
|
||||
|
||||
const λn = Math.abs(lat);
|
||||
const φn = Math.abs(lon);
|
||||
|
||||
|
||||
const λi = Math.trunc(λn);
|
||||
const φi = Math.trunc(φn);
|
||||
|
||||
|
||||
const λf = λn - λi;
|
||||
const φf = φn - φi;
|
||||
|
||||
|
||||
const λs = ((λf*3600)%60).toFixed(1);
|
||||
const φs = ((φf*3600)%60).toFixed(1);
|
||||
|
||||
|
||||
const λm = Math.trunc(λf*60);
|
||||
const φm = Math.trunc(φf*60);
|
||||
|
||||
|
||||
const λ =
|
||||
String(λi).padStart(2, "0") + "°" +
|
||||
String(λm).padStart(2, "0") + "'" +
|
||||
String(λs).padStart(4, "0") + '" ' +
|
||||
λh;
|
||||
|
||||
|
||||
const φ =
|
||||
String(φi).padStart(3, "0") + "°" +
|
||||
String(φm).padStart(2, "0") + "'" +
|
||||
String(φs).padStart(4, "0") + '" ' +
|
||||
φh;
|
||||
|
||||
|
||||
return λ+" "+φ;
|
||||
}
|
||||
|
||||
function geometryAsString (item, opts = {}) {
|
||||
const key = "key" in opts ? opts.key : "geometry";
|
||||
const formatDMS = opts.dms;
|
||||
|
||||
|
||||
let str = "";
|
||||
|
||||
|
||||
if (key in item) {
|
||||
const geometry = item[key];
|
||||
if (geometry && "coordinates" in geometry) {
|
||||
@@ -76,7 +76,7 @@ function geometryAsString (item, opts = {}) {
|
||||
str = `${geometry.coordinates[1].toFixed(6)}, ${geometry.coordinates[0].toFixed(6)}`;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (str) {
|
||||
if (opts.url) {
|
||||
if (typeof opts.url === 'string') {
|
||||
@@ -88,7 +88,7 @@ function geometryAsString (item, opts = {}) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
@@ -117,10 +117,10 @@ function geometryAsString (item, opts = {}) {
|
||||
* not exist or is not searched for.
|
||||
*/
|
||||
function preferencesλ (preferences) {
|
||||
|
||||
|
||||
return function (key, defaults={}) {
|
||||
const keys = Object.keys(preferences).filter(str => str.startsWith(key+".") || str == key);
|
||||
|
||||
|
||||
const settings = {...defaults};
|
||||
for (const str of keys) {
|
||||
const k = str == key ? str : str.substring(key.length+1);
|
||||
@@ -130,7 +130,7 @@ function preferencesλ (preferences) {
|
||||
|
||||
return settings;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ import Log from '../views/Log.vue'
|
||||
import QC from '../views/QC.vue'
|
||||
import Graphs from '../views/Graphs.vue'
|
||||
import Map from '../views/Map.vue'
|
||||
|
||||
import DougalAppBarExtensionProject from '../components/app-bar-extension-project'
|
||||
|
||||
Vue.use(VueRouter)
|
||||
|
||||
@@ -100,7 +100,10 @@ Vue.use(VueRouter)
|
||||
text: (ctx) => ctx.$store.state.project.projectName || "…",
|
||||
href: (ctx) => `/projects/${ctx.$store.state.project.projectId || ctx.$route.params.project || ""}/`
|
||||
}
|
||||
]
|
||||
],
|
||||
appBarExtension: {
|
||||
component: DougalAppBarExtensionProject
|
||||
}
|
||||
},
|
||||
children: [
|
||||
{
|
||||
|
||||
@@ -4,7 +4,13 @@ import Vuex from 'vuex'
|
||||
import api from './modules/api'
|
||||
import user from './modules/user'
|
||||
import snack from './modules/snack'
|
||||
import projects from './modules/projects'
|
||||
import project from './modules/project'
|
||||
import event from './modules/event'
|
||||
import label from './modules/label'
|
||||
import sequence from './modules/sequence'
|
||||
import plan from './modules/plan'
|
||||
import line from './modules/line'
|
||||
import notify from './modules/notify'
|
||||
|
||||
Vue.use(Vuex)
|
||||
@@ -14,7 +20,13 @@ export default new Vuex.Store({
|
||||
api,
|
||||
user,
|
||||
snack,
|
||||
projects,
|
||||
project,
|
||||
event,
|
||||
label,
|
||||
sequence,
|
||||
plan,
|
||||
line,
|
||||
notify
|
||||
}
|
||||
})
|
||||
|
||||
@@ -16,7 +16,7 @@ async function api ({state, commit, dispatch}, [resource, init = {}, cb]) {
|
||||
const url = /^https?:\/\//i.test(resource) ? resource : (state.apiUrl + resource);
|
||||
const res = await fetch(url, init);
|
||||
if (typeof cb === 'function') {
|
||||
cb(null, res);
|
||||
await cb(null, res);
|
||||
}
|
||||
if (res.ok) {
|
||||
|
||||
@@ -35,7 +35,14 @@ async function api ({state, commit, dispatch}, [resource, init = {}, cb]) {
|
||||
throw err;
|
||||
}
|
||||
} else {
|
||||
await dispatch('showSnack', [res.statusText, "warning"]);
|
||||
let message = res.statusText;
|
||||
if (res.headers.get("Content-Type").match(/^application\/json/i)) {
|
||||
const body = await res.json();
|
||||
if (body.message) {
|
||||
message = body.message;
|
||||
}
|
||||
}
|
||||
await dispatch('showSnack', [message, "warning"]);
|
||||
}
|
||||
} catch (err) {
|
||||
if (err && err.name == "AbortError") return;
|
||||
|
||||
129
lib/www/client/source/src/store/modules/event/actions.js
Normal file
129
lib/www/client/source/src/store/modules/event/actions.js
Normal file
@@ -0,0 +1,129 @@
|
||||
|
||||
/** Fetch events from server
|
||||
*/
|
||||
async function refreshEvents ({commit, dispatch, state, rootState}, [modifiedAfter] = []) {
|
||||
|
||||
if (!modifiedAfter) {
|
||||
modifiedAfter = state.timestamp;
|
||||
}
|
||||
|
||||
if (state.loading) {
|
||||
commit('abortEventsLoading');
|
||||
}
|
||||
|
||||
commit('setEventsLoading');
|
||||
const pid = rootState.project.projectId;
|
||||
const url = modifiedAfter
|
||||
? `/project/${pid}/event/changes/${(new Date(modifiedAfter)).toISOString()}?unique=t`
|
||||
: `/project/${pid}/event`;
|
||||
const init = {
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
|
||||
if (res) {
|
||||
if (modifiedAfter) {
|
||||
commit('setModifiedEvents', res);
|
||||
} else {
|
||||
commit('setEvents', res);
|
||||
}
|
||||
commit('setEventsTimestamp');
|
||||
}
|
||||
commit('clearEventsLoading');
|
||||
|
||||
}
|
||||
|
||||
/** Return a subset of events from state.events
|
||||
*/
|
||||
async function getEvents ({commit, dispatch, state}, [projectId, {sequence, date0, date1, sortBy, sortDesc, itemsPerPage, page, text, label}]) {
|
||||
let filteredEvents = [...state.events];
|
||||
|
||||
if (sortBy) {
|
||||
|
||||
sortBy.forEach( (key, idx) => {
|
||||
filteredEvents.sort( (el0, el1) => {
|
||||
const a = el0?.[key];
|
||||
const b = el1?.[key];
|
||||
if (a < b) {
|
||||
return -1;
|
||||
} else if (a > b) {
|
||||
return 1;
|
||||
} else if (a == b) {
|
||||
return 0;
|
||||
} else if (a && !b) {
|
||||
return 1;
|
||||
} else if (!a && b) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
if (sortDesc && sortDesc[idx] === true) {
|
||||
filteredEvents.reverse();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
if (sequence) {
|
||||
filteredEvents = filteredEvents.filter( event => event.sequence == sequence );
|
||||
}
|
||||
|
||||
if (date0 && date1) {
|
||||
filteredEvents = filteredEvents.filter( event =>
|
||||
event.tstamp.substr(0, 10) >= date0 && event.tstamp.substr(0, 10) <= date1
|
||||
);
|
||||
} else if (date0) {
|
||||
filteredEvents = filteredEvents.filter( event => event.tstamp.substr(0, 10) == date0 );
|
||||
}
|
||||
|
||||
if (text) {
|
||||
const tstampFilter = (value, search, item) => {
|
||||
return textFilter(value, search, item);
|
||||
};
|
||||
|
||||
const numberFilter = (value, search, item) => {
|
||||
return value == search;
|
||||
};
|
||||
|
||||
const textFilter = (value, search, item) => {
|
||||
return String(value).toLowerCase().includes(search.toLowerCase());
|
||||
};
|
||||
|
||||
const searchFunctions = {
|
||||
tstamp: tstampFilter,
|
||||
sequence: numberFilter,
|
||||
point: numberFilter,
|
||||
remarks: textFilter,
|
||||
labels: (value, search, item) => value.some(label => textFilter(label, search, item))
|
||||
};
|
||||
|
||||
filteredEvents = filteredEvents.filter ( event => {
|
||||
for (let key in searchFunctions) {
|
||||
const fn = searchFunctions[key];
|
||||
if (fn(event[key], text, event)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
|
||||
if (label) {
|
||||
filteredEvents = filteredEvents.filter( event => event.labels?.includes(label) );
|
||||
}
|
||||
|
||||
const count = filteredEvents.length;
|
||||
|
||||
if (itemsPerPage && itemsPerPage > 0) {
|
||||
const offset = (page > 0)
|
||||
? (page-1) * itemsPerPage
|
||||
: 0;
|
||||
|
||||
filteredEvents = filteredEvents.slice(offset, offset+itemsPerPage);
|
||||
}
|
||||
|
||||
return {events: filteredEvents, count};
|
||||
}
|
||||
|
||||
export default { refreshEvents, getEvents };
|
||||
14
lib/www/client/source/src/store/modules/event/getters.js
Normal file
14
lib/www/client/source/src/store/modules/event/getters.js
Normal file
@@ -0,0 +1,14 @@
|
||||
|
||||
function events (state) {
|
||||
return state.events;
|
||||
}
|
||||
|
||||
function eventCount (state) {
|
||||
return state.events?.length ?? 0;
|
||||
}
|
||||
|
||||
function eventsLoading (state) {
|
||||
return !!state.loading;
|
||||
}
|
||||
|
||||
export default { events, eventCount, eventsLoading };
|
||||
6
lib/www/client/source/src/store/modules/event/index.js
Normal file
6
lib/www/client/source/src/store/modules/event/index.js
Normal file
@@ -0,0 +1,6 @@
|
||||
import state from './state'
|
||||
import getters from './getters'
|
||||
import actions from './actions'
|
||||
import mutations from './mutations'
|
||||
|
||||
export default { state, getters, actions, mutations };
|
||||
73
lib/www/client/source/src/store/modules/event/mutations.js
Normal file
73
lib/www/client/source/src/store/modules/event/mutations.js
Normal file
@@ -0,0 +1,73 @@
|
||||
|
||||
function setEvents (state, events) {
|
||||
// We don't need or want the events array to be reactive, since
|
||||
// it can be tens of thousands of items long.
|
||||
state.events = Object.freeze(events);
|
||||
}
|
||||
|
||||
/** Selectively replace / insert / delete events
|
||||
* from state.events.
|
||||
*
|
||||
* modifiedEvents is the result of
|
||||
* /api/project/:project/event/changes?unique=t
|
||||
*/
|
||||
function setModifiedEvents (state, modifiedEvents) {
|
||||
const events = [...state.events];
|
||||
for (let evt of modifiedEvents) {
|
||||
const idx = events.findIndex(i => i.id == evt.id);
|
||||
if (idx != -1) {
|
||||
if (evt.is_deleted) {
|
||||
events.splice(idx, 1);
|
||||
} else {
|
||||
delete evt.is_deleted;
|
||||
events.splice(idx, 1, evt);
|
||||
}
|
||||
} else {
|
||||
if (!evt.is_deleted) {
|
||||
delete evt.is_deleted;
|
||||
events.unshift(evt);
|
||||
}
|
||||
}
|
||||
}
|
||||
setEvents(state, events);
|
||||
}
|
||||
|
||||
function setEventsLoading (state, abortController = new AbortController()) {
|
||||
state.loading = abortController;
|
||||
}
|
||||
|
||||
function clearEventsLoading (state) {
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
function setEventsTimestamp (state, timestamp = new Date()) {
|
||||
if (timestamp === true) {
|
||||
const tstamp = state.events
|
||||
.map( event => event.modified_on )
|
||||
.reduce( (acc, cur) => acc > cur ? acc : cur );
|
||||
state.timestamp = tstamp ? new Date(tstamp) : new Date();
|
||||
} else {
|
||||
state.timestamp = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
function setEventsETag (state, etag) {
|
||||
state.etag = etag;
|
||||
}
|
||||
|
||||
function abortEventsLoading (state) {
|
||||
if (state.loading) {
|
||||
state.loading.abort();
|
||||
}
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
export default {
|
||||
setEvents,
|
||||
setModifiedEvents,
|
||||
setEventsLoading,
|
||||
clearEventsLoading,
|
||||
abortEventsLoading,
|
||||
setEventsTimestamp,
|
||||
setEventsETag
|
||||
};
|
||||
8
lib/www/client/source/src/store/modules/event/state.js
Normal file
8
lib/www/client/source/src/store/modules/event/state.js
Normal file
@@ -0,0 +1,8 @@
|
||||
const state = () => ({
|
||||
events: Object.freeze([]),
|
||||
loading: null,
|
||||
timestamp: null,
|
||||
etag: null,
|
||||
});
|
||||
|
||||
export default state;
|
||||
106
lib/www/client/source/src/store/modules/label/actions.js
Normal file
106
lib/www/client/source/src/store/modules/label/actions.js
Normal file
@@ -0,0 +1,106 @@
|
||||
|
||||
/** Fetch labels from server
|
||||
*/
|
||||
async function refreshLabels ({commit, dispatch, state, rootState}) {
|
||||
|
||||
if (state.loading) {
|
||||
commit('abortLabelsLoading');
|
||||
}
|
||||
|
||||
commit('setLabelsLoading');
|
||||
const pid = rootState.project.projectId;
|
||||
const url = `/project/${pid}/label`;
|
||||
const init = {
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
|
||||
if (res) {
|
||||
commit('setLabels', res);
|
||||
commit('setLabelsTimestamp');
|
||||
}
|
||||
commit('clearLabelsLoading');
|
||||
}
|
||||
|
||||
/** Return a subset of labels from state.labels.
|
||||
*
|
||||
* Note that, unlike other actions in the get* family,
|
||||
* the return value is not isomorphic to the state.
|
||||
*
|
||||
* While state.labels is an object, getLabels() returns
|
||||
* an array with each item have the shape:
|
||||
*
|
||||
* { label: "labelName", view: {…}, model: {…} }
|
||||
*
|
||||
* This is intended to be useful, for instance, for a table
|
||||
* of labels.
|
||||
*/
|
||||
async function getLabels ({commit, dispatch, state}, [projectId, {sortBy, sortDesc, itemsPerPage, page, text, label}]) {
|
||||
|
||||
let filteredLabels = Object.entries(state.labels).map(i => {
|
||||
return {
|
||||
label: i[0],
|
||||
...i[1]
|
||||
}
|
||||
});
|
||||
|
||||
if (sortBy) {
|
||||
|
||||
sortBy.forEach( (key, idx) => {
|
||||
filteredLabels.sort( (el0, el1) => {
|
||||
const a = key == "label" ? el0[0] : el0[1].view[key];
|
||||
const b = key == "label" ? el1[0] : el1[1].view[key];
|
||||
if (a < b) {
|
||||
return -1;
|
||||
} else if (a > b) {
|
||||
return 1;
|
||||
} else if (a == b) {
|
||||
return 0;
|
||||
} else if (a && !b) {
|
||||
return 1;
|
||||
} else if (!a && b) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
if (sortDesc && sortDesc[idx] === true) {
|
||||
filteredLabels.reverse();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
if (label) {
|
||||
filteredLabels = filteredLabels.filter( label => label.label == label );
|
||||
}
|
||||
|
||||
if (text) {
|
||||
const textFilter = (value, search, item) => {
|
||||
return String(value).toLowerCase().includes(search.toLowerCase());
|
||||
};
|
||||
|
||||
const searchFunctions = {
|
||||
label: numberFilter,
|
||||
description: textFilter,
|
||||
};
|
||||
|
||||
filteredLabels = filteredLabels.filter ( item => {
|
||||
return textFilter(item.label, text, item) ?? textFilter(item.view.description, text, item);
|
||||
});
|
||||
}
|
||||
|
||||
const count = filteredLabels.length;
|
||||
|
||||
if (itemsPerPage && itemsPerPage > 0) {
|
||||
const offset = (page > 0)
|
||||
? (page-1) * itemsPerPage
|
||||
: 0;
|
||||
|
||||
filteredLabels = filteredLabels.slice(offset, offset+itemsPerPage);
|
||||
}
|
||||
|
||||
return {labels: filteredLabels, count};
|
||||
}
|
||||
|
||||
export default { refreshLabels, getLabels };
|
||||
22
lib/www/client/source/src/store/modules/label/getters.js
Normal file
22
lib/www/client/source/src/store/modules/label/getters.js
Normal file
@@ -0,0 +1,22 @@
|
||||
|
||||
function labels (state) {
|
||||
return state.labels;
|
||||
}
|
||||
|
||||
/** Return labels that can be added by users.
|
||||
*
|
||||
* As opposed to system labels.
|
||||
*/
|
||||
function userLabels (state) {
|
||||
return Object.fromEntries(Object.entries(state.labels).filter(i => i[1].model.user));
|
||||
}
|
||||
|
||||
function labelCount (state) {
|
||||
return state.labels?.length ?? 0;
|
||||
}
|
||||
|
||||
function labelsLoading (state) {
|
||||
return !!state.loading;
|
||||
}
|
||||
|
||||
export default { labels, userLabels, labelCount, labelsLoading };
|
||||
6
lib/www/client/source/src/store/modules/label/index.js
Normal file
6
lib/www/client/source/src/store/modules/label/index.js
Normal file
@@ -0,0 +1,6 @@
|
||||
import state from './state'
|
||||
import getters from './getters'
|
||||
import actions from './actions'
|
||||
import mutations from './mutations'
|
||||
|
||||
export default { state, getters, actions, mutations };
|
||||
49
lib/www/client/source/src/store/modules/label/mutations.js
Normal file
49
lib/www/client/source/src/store/modules/label/mutations.js
Normal file
@@ -0,0 +1,49 @@
|
||||
|
||||
function setLabels (state, labels) {
|
||||
// We don't need or want the events array to be reactive, since
|
||||
// it can be tens of thousands of items long.
|
||||
state.labels = Object.freeze(labels);
|
||||
}
|
||||
|
||||
function setLabelsLoading (state, abortController = new AbortController()) {
|
||||
state.loading = abortController;
|
||||
}
|
||||
|
||||
// This assumes that we know any transactions have finished or we
|
||||
// don't care about aborting.
|
||||
function clearLabelsLoading (state) {
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
function setLabelsTimestamp (state, timestamp = new Date()) {
|
||||
// NOTE: There is no `modified_on` property in the labels
|
||||
// result or in the database schema, but we could add
|
||||
// one.
|
||||
if (timestamp === true) {
|
||||
const tstamp = state.labels
|
||||
.map( i => i.modified_on )
|
||||
.reduce( (acc, cur) => acc > cur ? acc : cur );
|
||||
state.timestamp = tstamp ? new Date(tstamp) : new Date();
|
||||
} else {
|
||||
state.timestamp = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
function setLabelsETag (state, etag) {
|
||||
state.etag = etag;
|
||||
}
|
||||
|
||||
function abortLabelsLoading (state) {
|
||||
if (state.loading) {
|
||||
state.loading.abort();
|
||||
}
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
export default {
|
||||
setLabels,
|
||||
setLabelsLoading,
|
||||
clearLabelsLoading,
|
||||
setLabelsTimestamp,
|
||||
setLabelsETag
|
||||
};
|
||||
8
lib/www/client/source/src/store/modules/label/state.js
Normal file
8
lib/www/client/source/src/store/modules/label/state.js
Normal file
@@ -0,0 +1,8 @@
|
||||
const state = () => ({
|
||||
labels: Object.freeze([]),
|
||||
loading: null,
|
||||
timestamp: null,
|
||||
etag: null,
|
||||
});
|
||||
|
||||
export default state;
|
||||
117
lib/www/client/source/src/store/modules/line/actions.js
Normal file
117
lib/www/client/source/src/store/modules/line/actions.js
Normal file
@@ -0,0 +1,117 @@
|
||||
|
||||
/** Fetch lines from server
|
||||
*/
|
||||
async function refreshLines ({commit, dispatch, state, rootState}) {
|
||||
|
||||
if (state.loading) {
|
||||
commit('abortLinesLoading');
|
||||
}
|
||||
|
||||
commit('setLinesLoading');
|
||||
const pid = rootState.project.projectId;
|
||||
const url = `/project/${pid}/line`;
|
||||
const init = {
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
|
||||
if (res) {
|
||||
commit('setLines', res);
|
||||
commit('setLinesTimestamp');
|
||||
}
|
||||
commit('clearLinesLoading');
|
||||
}
|
||||
|
||||
/** Return a subset of lines from state.lines
|
||||
*/
|
||||
async function getLines ({commit, dispatch, state}, [projectId, {line, fsp, lsp, incr, sortBy, sortDesc, itemsPerPage, page, text}]) {
|
||||
let filteredLines = [...state.lines];
|
||||
|
||||
if (sortBy) {
|
||||
|
||||
sortBy.forEach( (key, idx) => {
|
||||
filteredLines.sort( (el0, el1) => {
|
||||
const a = el0?.[key];
|
||||
const b = el1?.[key];
|
||||
if (a < b) {
|
||||
return -1;
|
||||
} else if (a > b) {
|
||||
return 1;
|
||||
} else if (a == b) {
|
||||
return 0;
|
||||
} else if (a && !b) {
|
||||
return 1;
|
||||
} else if (!a && b) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
if (sortDesc && sortDesc[idx] === true) {
|
||||
filteredLines.reverse();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
if (line) {
|
||||
filteredLines = filteredLines.filter( line => line.line == line );
|
||||
}
|
||||
|
||||
if (fsp) {
|
||||
filteredLines = filteredLines.filter( line => line.fsp == fsp );
|
||||
}
|
||||
|
||||
if (lsp) {
|
||||
filteredLines = filteredLines.filter( line => line.lsp == lsp );
|
||||
}
|
||||
|
||||
if (text) {
|
||||
const numberFilter = (value, search, item) => {
|
||||
return value == search;
|
||||
};
|
||||
|
||||
const textFilter = (value, search, item) => {
|
||||
return String(value).toLowerCase().includes(search.toLowerCase());
|
||||
};
|
||||
|
||||
const incrFilter = (value, search, item) => {
|
||||
const inc = /^(incr(ement)?|↑|\+)/i;
|
||||
const dec = /^(decr(ement)?|↓|-)/i;
|
||||
return (inc.test(search) && value) || (dec.test(search) && !value)
|
||||
}
|
||||
|
||||
const searchFunctions = {
|
||||
line: numberFilter,
|
||||
fsp: numberFilter,
|
||||
lsp: numberFilter,
|
||||
remarks: textFilter,
|
||||
incr: incrFilter,
|
||||
ntba: (value, search, item) => text.toLowerCase() == "ntba" && value
|
||||
};
|
||||
|
||||
filteredLines = filteredLines.filter ( line => {
|
||||
for (let key in searchFunctions) {
|
||||
const fn = searchFunctions[key];
|
||||
if (fn(line[key], text, line)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
|
||||
const count = filteredLines.length;
|
||||
|
||||
if (itemsPerPage && itemsPerPage > 0) {
|
||||
const offset = (page > 0)
|
||||
? (page-1) * itemsPerPage
|
||||
: 0;
|
||||
|
||||
filteredLines = filteredLines.slice(offset, offset+itemsPerPage);
|
||||
}
|
||||
|
||||
return {lines: filteredLines, count};
|
||||
}
|
||||
|
||||
export default { refreshLines, getLines };
|
||||
14
lib/www/client/source/src/store/modules/line/getters.js
Normal file
14
lib/www/client/source/src/store/modules/line/getters.js
Normal file
@@ -0,0 +1,14 @@
|
||||
|
||||
function lines (state) {
|
||||
return state.lines;
|
||||
}
|
||||
|
||||
function lineCount (state) {
|
||||
return state.lines?.length ?? 0;
|
||||
}
|
||||
|
||||
function linesLoading (state) {
|
||||
return !!state.loading;
|
||||
}
|
||||
|
||||
export default { lines, lineCount, linesLoading };
|
||||
6
lib/www/client/source/src/store/modules/line/index.js
Normal file
6
lib/www/client/source/src/store/modules/line/index.js
Normal file
@@ -0,0 +1,6 @@
|
||||
import state from './state'
|
||||
import getters from './getters'
|
||||
import actions from './actions'
|
||||
import mutations from './mutations'
|
||||
|
||||
export default { state, getters, actions, mutations };
|
||||
49
lib/www/client/source/src/store/modules/line/mutations.js
Normal file
49
lib/www/client/source/src/store/modules/line/mutations.js
Normal file
@@ -0,0 +1,49 @@
|
||||
|
||||
function setLines (state, lines) {
|
||||
// We don't need or want the events array to be reactive, since
|
||||
// it can be tens of thousands of items long.
|
||||
state.lines = Object.freeze(lines);
|
||||
}
|
||||
|
||||
function setLinesLoading (state, abortController = new AbortController()) {
|
||||
state.loading = abortController;
|
||||
}
|
||||
|
||||
// This assumes that we know any transactions have finished or we
|
||||
// don't care about aborting.
|
||||
function clearLinesLoading (state) {
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
function setLinesTimestamp (state, timestamp = new Date()) {
|
||||
// NOTE: There is no `modified_on` property in the lines
|
||||
// result or in the database schema, but we could perhaps add
|
||||
// one.
|
||||
if (timestamp === true) {
|
||||
const tstamp = state.lines
|
||||
.map( event => event.modified_on )
|
||||
.reduce( (acc, cur) => acc > cur ? acc : cur );
|
||||
state.timestamp = tstamp ? new Date(tstamp) : new Date();
|
||||
} else {
|
||||
state.timestamp = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
function setLinesETag (state, etag) {
|
||||
state.etag = etag;
|
||||
}
|
||||
|
||||
function abortLinesLoading (state) {
|
||||
if (state.loading) {
|
||||
state.loading.abort();
|
||||
}
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
export default {
|
||||
setLines,
|
||||
setLinesLoading,
|
||||
clearLinesLoading,
|
||||
setLinesTimestamp,
|
||||
setLinesETag
|
||||
};
|
||||
8
lib/www/client/source/src/store/modules/line/state.js
Normal file
8
lib/www/client/source/src/store/modules/line/state.js
Normal file
@@ -0,0 +1,8 @@
|
||||
const state = () => ({
|
||||
lines: Object.freeze([]),
|
||||
loading: null,
|
||||
timestamp: null,
|
||||
etag: null,
|
||||
});
|
||||
|
||||
export default state;
|
||||
114
lib/www/client/source/src/store/modules/plan/actions.js
Normal file
114
lib/www/client/source/src/store/modules/plan/actions.js
Normal file
@@ -0,0 +1,114 @@
|
||||
|
||||
/** Fetch sequences from server
|
||||
*/
|
||||
async function refreshPlan ({commit, dispatch, state, rootState}) {
|
||||
|
||||
if (state.loading) {
|
||||
commit('abortPlanLoading');
|
||||
}
|
||||
|
||||
commit('setPlanLoading');
|
||||
const pid = rootState.project.projectId;
|
||||
const url = `/project/${pid}/plan`;
|
||||
const init = {
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
|
||||
if (res) {
|
||||
commit('setPlan', res);
|
||||
commit('setPlanTimestamp');
|
||||
}
|
||||
commit('clearPlanLoading');
|
||||
}
|
||||
|
||||
/** Return a subset of sequences from state.sequences
|
||||
*/
|
||||
async function getPlannedSequences ({commit, dispatch, state}, [projectId, {sequence, date0, date1, sortBy, sortDesc, itemsPerPage, page, text}]) {
|
||||
let filteredPlannedSequences = [...state.sequences];
|
||||
|
||||
if (sortBy) {
|
||||
|
||||
sortBy.forEach( (key, idx) => {
|
||||
filteredPlannedSequences.sort( (el0, el1) => {
|
||||
const a = el0?.[key];
|
||||
const b = el1?.[key];
|
||||
if (a < b) {
|
||||
return -1;
|
||||
} else if (a > b) {
|
||||
return 1;
|
||||
} else if (a == b) {
|
||||
return 0;
|
||||
} else if (a && !b) {
|
||||
return 1;
|
||||
} else if (!a && b) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
if (sortDesc && sortDesc[idx] === true) {
|
||||
filteredPlannedSequences.reverse();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
if (sequence) {
|
||||
filteredPlannedSequences = filteredPlannedSequences.filter( sequence => sequence.sequence == sequence );
|
||||
}
|
||||
|
||||
if (date0 && date1) {
|
||||
filteredPlannedSequences = filteredPlannedSequences.filter( sequence =>
|
||||
sequence.ts0.substr(0, 10) >= date0 && sequence.ts1.substr(0, 10) <= date1
|
||||
);
|
||||
} else if (date0) {
|
||||
filteredPlannedSequences = filteredPlannedSequences.filter( sequence => sequence.ts0.substr(0, 10) == date0 || sequence.ts1.substr(0, 10) );
|
||||
}
|
||||
|
||||
if (text) {
|
||||
const tstampFilter = (value, search, item) => {
|
||||
return textFilter(value.toISOString(), search, item);
|
||||
};
|
||||
|
||||
const numberFilter = (value, search, item) => {
|
||||
return value == search;
|
||||
};
|
||||
|
||||
const textFilter = (value, search, item) => {
|
||||
return String(value).toLowerCase().includes(search.toLowerCase());
|
||||
};
|
||||
|
||||
const searchFunctions = {
|
||||
sequence: numberFilter,
|
||||
line: numberFilter,
|
||||
remarks: textFilter,
|
||||
ts0: tstampFilter,
|
||||
ts1: tstampFilter
|
||||
};
|
||||
|
||||
filteredPlannedSequences = filteredPlannedSequences.filter ( sequence => {
|
||||
for (let key in searchFunctions) {
|
||||
const fn = searchFunctions[key];
|
||||
if (fn(sequence[key], text, sequence)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
|
||||
const count = filteredPlannedSequences.length;
|
||||
|
||||
if (itemsPerPage && itemsPerPage > 0) {
|
||||
const offset = (page > 0)
|
||||
? (page-1) * itemsPerPage
|
||||
: 0;
|
||||
|
||||
filteredPlannedSequences = filteredPlannedSequences.slice(offset, offset+itemsPerPage);
|
||||
}
|
||||
|
||||
return {sequences: filteredPlannedSequences, count};
|
||||
}
|
||||
|
||||
export default { refreshPlan, getPlannedSequences };
|
||||
18
lib/www/client/source/src/store/modules/plan/getters.js
Normal file
18
lib/www/client/source/src/store/modules/plan/getters.js
Normal file
@@ -0,0 +1,18 @@
|
||||
|
||||
function planRemarks (state) {
|
||||
return state.remarks;
|
||||
}
|
||||
|
||||
function plannedSequences (state) {
|
||||
return state.sequences;
|
||||
}
|
||||
|
||||
function plannedSequenceCount (state) {
|
||||
return state.sequences?.length ?? 0;
|
||||
}
|
||||
|
||||
function plannedSequencesLoading (state) {
|
||||
return !!state.loading;
|
||||
}
|
||||
|
||||
export default { planRemarks, plannedSequences, plannedSequenceCount, plannedSequencesLoading };
|
||||
6
lib/www/client/source/src/store/modules/plan/index.js
Normal file
6
lib/www/client/source/src/store/modules/plan/index.js
Normal file
@@ -0,0 +1,6 @@
|
||||
import state from './state'
|
||||
import getters from './getters'
|
||||
import actions from './actions'
|
||||
import mutations from './mutations'
|
||||
|
||||
export default { state, getters, actions, mutations };
|
||||
59
lib/www/client/source/src/store/modules/plan/mutations.js
Normal file
59
lib/www/client/source/src/store/modules/plan/mutations.js
Normal file
@@ -0,0 +1,59 @@
|
||||
|
||||
|
||||
function transform (item) {
|
||||
item.ts0 = new Date(item.ts0);
|
||||
item.ts1 = new Date(item.ts1);
|
||||
return item;
|
||||
}
|
||||
|
||||
// ATTENTION: This relies on the new planner endpoint
|
||||
// as per issue #281.
|
||||
|
||||
function setPlan (state, plan) {
|
||||
// We don't need or want the planned sequences array to be reactive
|
||||
state.sequences = Object.freeze(plan.sequences.map(transform));
|
||||
state.remarks = plan.remarks;
|
||||
}
|
||||
|
||||
function setPlanLoading (state, abortController = new AbortController()) {
|
||||
state.loading = abortController;
|
||||
}
|
||||
|
||||
// This assumes that we know any transactions have finished or we
|
||||
// don't care about aborting.
|
||||
function clearPlanLoading (state) {
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
function setPlanTimestamp (state, timestamp = new Date()) {
|
||||
// NOTE: There is no `modified_on` property in the plan
|
||||
// result or in the database schema, but we should probably add
|
||||
// one.
|
||||
if (timestamp === true) {
|
||||
const tstamp = state.plan
|
||||
.map( item => item.modified_on )
|
||||
.reduce( (acc, cur) => acc > cur ? acc : cur );
|
||||
state.timestamp = tstamp ? new Date(tstamp) : new Date();
|
||||
} else {
|
||||
state.timestamp = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
function setPlanETag (state, etag) {
|
||||
state.etag = etag;
|
||||
}
|
||||
|
||||
function abortPlanLoading (state) {
|
||||
if (state.loading) {
|
||||
state.loading.abort();
|
||||
}
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
export default {
|
||||
setPlan,
|
||||
setPlanLoading,
|
||||
clearPlanLoading,
|
||||
setPlanTimestamp,
|
||||
setPlanETag
|
||||
};
|
||||
9
lib/www/client/source/src/store/modules/plan/state.js
Normal file
9
lib/www/client/source/src/store/modules/plan/state.js
Normal file
@@ -0,0 +1,9 @@
|
||||
const state = () => ({
|
||||
sequences: Object.freeze([]),
|
||||
remarks: null,
|
||||
loading: null,
|
||||
timestamp: null,
|
||||
etag: null,
|
||||
});
|
||||
|
||||
export default state;
|
||||
@@ -1,13 +1,19 @@
|
||||
|
||||
async function getProject ({commit, dispatch}, projectId) {
|
||||
const res = await dispatch('api', [`/project/${String(projectId).toLowerCase()}`]);
|
||||
const res = await dispatch('api', [`/project/${String(projectId).toLowerCase()}/configuration`]);
|
||||
if (res) {
|
||||
commit('setProjectName', res.name);
|
||||
commit('setProjectId', res.pid);
|
||||
commit('setProjectId', res.id?.toLowerCase());
|
||||
commit('setProjectSchema', res.schema);
|
||||
commit('setProjectConfiguration', res);
|
||||
const recentProjects = JSON.parse(localStorage.getItem("recentProjects") || "[]")
|
||||
recentProjects.unshift(res);
|
||||
localStorage.setItem("recentProjects", JSON.stringify(recentProjects.slice(0, 3)));
|
||||
} else {
|
||||
commit('setProjectName', null);
|
||||
commit('setProjectId', null);
|
||||
commit('setProjectSchema', null);
|
||||
commit('setProjectConfiguration', {});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
|
||||
function projectId (state) {
|
||||
return state.projectId;
|
||||
}
|
||||
|
||||
function projectName (state) {
|
||||
return state.projectName;
|
||||
}
|
||||
|
||||
function projectSchema (state) {
|
||||
return state.projectSchema;
|
||||
}
|
||||
|
||||
function projectConfiguration (state) {
|
||||
return state.projectConfiguration;
|
||||
}
|
||||
|
||||
export default { projectId, projectName, projectSchema, projectConfiguration };
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user