mirror of
https://gitlab.com/wgp/dougal/software.git
synced 2025-12-06 06:27:07 +00:00
Compare commits
339 Commits
76-add-con
...
673c60a359
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
673c60a359 | ||
|
|
99e425270c | ||
|
|
63633715e2 | ||
|
|
8afac5c150 | ||
|
|
11168def68 | ||
|
|
0f477b8e65 | ||
|
|
03b00a4ea7 | ||
|
|
c5faa53bee | ||
|
|
46b2512530 | ||
|
|
db4c9a0235 | ||
|
|
1a12ea13ed | ||
|
|
81717c37f1 | ||
|
|
6377e8854c | ||
|
|
d3446d03bd | ||
|
|
a52f7811f2 | ||
|
|
ef2bd4888e | ||
|
|
8801442c92 | ||
|
|
30f65dbeaa | ||
|
|
c2f53ac150 | ||
|
|
4328fc4d2a | ||
|
|
2c2eb8fceb | ||
|
|
767c2f2cb1 | ||
|
|
57a73f7d1c | ||
|
|
9f299056d8 | ||
|
|
5d3c59867c | ||
|
|
76b8355ede | ||
|
|
76b55f514d | ||
|
|
4e1d3209df | ||
|
|
f21ff7ee38 | ||
|
|
2446b42785 | ||
|
|
196e772004 | ||
|
|
674d818fee | ||
|
|
5527576679 | ||
|
|
fe7c016dea | ||
|
|
b7543aa6c4 | ||
|
|
b48a060dc0 | ||
|
|
c0f9a2de5a | ||
|
|
32a9c7a5f2 | ||
|
|
f1f74080f6 | ||
|
|
c5eb8e45f1 | ||
|
|
caab968fd6 | ||
|
|
5f28d1be7b | ||
|
|
22c9537889 | ||
|
|
e95aaa7de7 | ||
|
|
4f44f5a10c | ||
|
|
0ba467d34c | ||
|
|
2b5b302e54 | ||
|
|
28938e27a9 | ||
|
|
97f96fdc1e | ||
|
|
1e3ce35f76 | ||
|
|
619a886781 | ||
|
|
c054e63325 | ||
|
|
fd94b3b6f4 | ||
|
|
7b67b4afc9 | ||
|
|
7c52ada922 | ||
|
|
9072bbe389 | ||
|
|
6639b7110b | ||
|
|
be6652b539 | ||
|
|
bf054d3902 | ||
|
|
2734870871 | ||
|
|
52f49e6799 | ||
|
|
30150a8728 | ||
|
|
ef8466992c | ||
|
|
8e4e70cbdc | ||
|
|
4dadffbbe7 | ||
|
|
24dcebd0d9 | ||
|
|
12a762f44f | ||
|
|
ebf13abc28 | ||
|
|
b3552db02f | ||
|
|
cd882c0611 | ||
|
|
6fc9c020a4 | ||
|
|
75284322f1 | ||
|
|
e849c47f01 | ||
|
|
387d20a4f0 | ||
|
|
2fab06d340 | ||
|
|
7d2fb5558a | ||
|
|
764e2cfb23 | ||
|
|
bf1af1f76c | ||
|
|
09e4cd2467 | ||
|
|
2009d73a2b | ||
|
|
083ee812de | ||
|
|
84510e8dc9 | ||
|
|
7205ec42a8 | ||
|
|
73d85ef81f | ||
|
|
6c4dc35461 | ||
|
|
a5ebff077d | ||
|
|
2a894692ce | ||
|
|
25690eeb52 | ||
|
|
3f9776b61d | ||
|
|
8c81daefc0 | ||
|
|
c173610e87 | ||
|
|
301e5c0731 | ||
|
|
48d9f45fe0 | ||
|
|
cd23a78592 | ||
|
|
e368183bf0 | ||
|
|
02477b071b | ||
|
|
6651868ea7 | ||
|
|
c0b52a8245 | ||
|
|
90ce6f063e | ||
|
|
b2fa0c3d40 | ||
|
|
83ecaad4fa | ||
|
|
1c5fd2e34d | ||
|
|
aabcc74891 | ||
|
|
2a7b51b995 | ||
|
|
5d19ca7ca7 | ||
|
|
910195fc0f | ||
|
|
6e5570aa7c | ||
|
|
595c20f504 | ||
|
|
40d0038d80 | ||
|
|
acdf118a67 | ||
|
|
b9e0975d3d | ||
|
|
39d9c9d748 | ||
|
|
b8b25dcd62 | ||
|
|
db97382758 | ||
|
|
ae8e5d4ef6 | ||
|
|
2c1a24e4a5 | ||
|
|
0b83187372 | ||
|
|
3dd51c82ea | ||
|
|
17e6564e70 | ||
|
|
3a769e7fd0 | ||
|
|
7dde0a15c6 | ||
|
|
2872af8d60 | ||
|
|
4e581d5664 | ||
|
|
a188e9a099 | ||
|
|
cd6ad92d5c | ||
|
|
08dfe7ef0a | ||
|
|
6a5238496e | ||
|
|
bc237cb685 | ||
|
|
4957142fb1 | ||
|
|
5a19c81ed1 | ||
|
|
b583dc6c02 | ||
|
|
134e3bce4e | ||
|
|
f5ad9d7182 | ||
|
|
07874ffe0b | ||
|
|
695add5da6 | ||
|
|
6a94287cba | ||
|
|
c2ec2970f0 | ||
|
|
95d6d0054b | ||
|
|
5070be5ff3 | ||
|
|
d5e77bc946 | ||
|
|
f6faad17db | ||
|
|
94cdf83b13 | ||
|
|
6a788ae28b | ||
|
|
544117eec3 | ||
|
|
e5679ec14b | ||
|
|
a1c174994c | ||
|
|
2db8cc3116 | ||
|
|
99b1a841c5 | ||
|
|
6629e25644 | ||
|
|
7f5f64acb1 | ||
|
|
8f87df1e2f | ||
|
|
8399782409 | ||
|
|
9c86018653 | ||
|
|
a15c97078b | ||
|
|
d769ec48dd | ||
|
|
fe421f545c | ||
|
|
caa8fec8cc | ||
|
|
49fc260ace | ||
|
|
b7038f542c | ||
|
|
40ad0e7650 | ||
|
|
9006deb8be | ||
|
|
6e19b8e18f | ||
|
|
3d474ad8f8 | ||
|
|
821af18f29 | ||
|
|
9cf15ce9dd | ||
|
|
78838cbc41 | ||
|
|
8855da743b | ||
|
|
c67a60a7e6 | ||
|
|
81e06930f0 | ||
|
|
0263eab6d1 | ||
|
|
931219850e | ||
|
|
12369d5419 | ||
|
|
447003c3b5 | ||
|
|
be7157b62c | ||
|
|
8ef56f9946 | ||
|
|
f2df16fe55 | ||
|
|
96db6b1376 | ||
|
|
36d86c176a | ||
|
|
9c38af4bc0 | ||
|
|
be5c6f1fa3 | ||
|
|
17b9d60715 | ||
|
|
e2dd563054 | ||
|
|
67dcc2922b | ||
|
|
11e84f47eb | ||
|
|
1066a03b25 | ||
|
|
08440e3e21 | ||
|
|
d46eb3b455 | ||
|
|
864b430320 | ||
|
|
61cbefd0e9 | ||
|
|
29c484affa | ||
|
|
0806b80445 | ||
|
|
b5a3a22892 | ||
|
|
c13aa23e2f | ||
|
|
3366377ab0 | ||
|
|
59a90e352c | ||
|
|
0f207f8c2d | ||
|
|
c97eaa64f5 | ||
|
|
5b82f8540d | ||
|
|
d977d9c40b | ||
|
|
d16fb41f24 | ||
|
|
c376896ea6 | ||
|
|
2bcdee03d5 | ||
|
|
44113c89c0 | ||
|
|
17c6d9d1e5 | ||
|
|
06cc16721f | ||
|
|
af7485370c | ||
|
|
ad013ea642 | ||
|
|
48d5986415 | ||
|
|
471f4e8e64 | ||
|
|
4be99370e6 | ||
|
|
e464f5f887 | ||
|
|
cc8d790ad8 | ||
|
|
32c6e2c79f | ||
|
|
ba7221ae10 | ||
|
|
1cb9d4b1e2 | ||
|
|
2a0025cdbf | ||
|
|
f768f31b62 | ||
|
|
9f91b1317f | ||
|
|
3b69a15703 | ||
|
|
cd3bd8ab79 | ||
|
|
df193a99cd | ||
|
|
580e94a591 | ||
|
|
3413641c10 | ||
|
|
f092aff015 | ||
|
|
94c6406ea2 | ||
|
|
244d84a3bd | ||
|
|
89c565a0f5 | ||
|
|
31ac8d3c01 | ||
|
|
3bb78040b0 | ||
|
|
1433bda14e | ||
|
|
c0ae033de8 | ||
|
|
05eed7ef26 | ||
|
|
5d2ca513a6 | ||
|
|
b9c8069828 | ||
|
|
b80b8ffb52 | ||
|
|
c2eb82ffe7 | ||
|
|
e517e2f771 | ||
|
|
0afd54447f | ||
|
|
e6004dd62f | ||
|
|
f623954399 | ||
|
|
f8d882da5d | ||
|
|
808c9987af | ||
|
|
4db6d8dd7a | ||
|
|
9a47977f5f | ||
|
|
a58cce8565 | ||
|
|
5487a3a49b | ||
|
|
731778206c | ||
|
|
08e65b512d | ||
|
|
9b05388113 | ||
|
|
1b44389a1a | ||
|
|
0b3711b759 | ||
|
|
5a523d4941 | ||
|
|
122951e3a2 | ||
|
|
90216c12e4 | ||
|
|
9c26909a59 | ||
|
|
0427a3c18c | ||
|
|
c32e6f2b38 | ||
|
|
546d199c52 | ||
|
|
6562de97b9 | ||
|
|
c666a6368e | ||
|
|
d5af6df052 | ||
|
|
0c5ea7f30a | ||
|
|
302642f88d | ||
|
|
48e1369088 | ||
|
|
daa700e7dc | ||
|
|
8db2c8ce25 | ||
|
|
890e48e078 | ||
|
|
11829555cf | ||
|
|
07d8e97f74 | ||
|
|
fc379aba14 | ||
|
|
8cbacb9aa7 | ||
|
|
acb59035e4 | ||
|
|
b7d0ee7da7 | ||
|
|
3a0f720f2f | ||
|
|
6cf6fe29f4 | ||
|
|
6f0f2dadcc | ||
|
|
64fba1adc3 | ||
|
|
3ea82cb660 | ||
|
|
84c1385f88 | ||
|
|
b1b7332216 | ||
|
|
8e7451e17a | ||
|
|
bdeb2b8742 | ||
|
|
ccfabf84f7 | ||
|
|
5d4e219403 | ||
|
|
3b7e4c9f0b | ||
|
|
683f5680b1 | ||
|
|
ce901a03a1 | ||
|
|
f8e5b74c1a | ||
|
|
ec41d26a7a | ||
|
|
386fd59900 | ||
|
|
e47020a21e | ||
|
|
b8f58ac67c | ||
|
|
b3e27ed1b9 | ||
|
|
f5441d186f | ||
|
|
d58bc4d62e | ||
|
|
01d1691def | ||
|
|
bc444fc066 | ||
|
|
989ec84852 | ||
|
|
065f6617af | ||
|
|
825530c1fe | ||
|
|
1ef8eb871f | ||
|
|
2e9c603ab8 | ||
|
|
7f067ff760 | ||
|
|
487c297747 | ||
|
|
cfa771a830 | ||
|
|
3905e6f5d8 | ||
|
|
2657c42dcc | ||
|
|
63e6af545a | ||
|
|
d6fb7404b1 | ||
|
|
8188766a81 | ||
|
|
b7ae657137 | ||
|
|
1295ec2ee3 | ||
|
|
7c6d3fe5ee | ||
|
|
15570e0f3d | ||
|
|
d551e67042 | ||
|
|
6b216f7406 | ||
|
|
a7e02c526b | ||
|
|
55855d66e9 | ||
|
|
ae79d90fef | ||
|
|
c8b2047483 | ||
|
|
d21cde20fc | ||
|
|
10580ea3ec | ||
|
|
25f83d1eb3 | ||
|
|
dc294b5b50 | ||
|
|
b035d3481c | ||
|
|
ca4a14ffd9 | ||
|
|
d77f7f66db | ||
|
|
6b6f545b9f | ||
|
|
bdf62e2d8b | ||
|
|
1895168889 | ||
|
|
8c875ea2f9 | ||
|
|
addbe2d572 | ||
|
|
85f092b9e1 | ||
|
|
eb99d74e4a | ||
|
|
e65afdcaa1 | ||
|
|
0b7e9e1d01 | ||
|
|
9ad17de4cb | ||
|
|
dc22bb95fd | ||
|
|
3189a06d75 |
@@ -274,11 +274,27 @@ class Datastore:
|
||||
with self.conn.cursor() as cursor:
|
||||
cursor.execute("BEGIN;")
|
||||
|
||||
# Check which preplot lines we actually have already imported,
|
||||
# as the line info file may contain extra lines.
|
||||
|
||||
qry = """
|
||||
SELECT line, class
|
||||
FROM preplot_lines
|
||||
ORDER BY line, class;
|
||||
"""
|
||||
cursor.execute(qry)
|
||||
preplot_lines = cursor.fetchall()
|
||||
|
||||
hash = self.add_file(filepath, cursor)
|
||||
count=0
|
||||
for line in lines:
|
||||
count += 1
|
||||
print(f"\u001b[2KSaving line {count} / {len(lines)}", end="\r", flush=True)
|
||||
|
||||
if not (line["sail_line"], "V") in preplot_lines:
|
||||
print(f"\u001b[2KSkipping line {count} / {len(lines)}", end="\n", flush=True)
|
||||
continue
|
||||
|
||||
print(f"\u001b[2KSaving line {count} / {len(lines)} ", end="\n", flush=True)
|
||||
|
||||
sail_line = line["sail_line"]
|
||||
incr = line.get("incr", True)
|
||||
|
||||
@@ -23,6 +23,7 @@ transform = {
|
||||
}
|
||||
|
||||
def parse_line (line, fields, fixed = None):
|
||||
# print("parse_line", line, fields, fixed)
|
||||
data = dict()
|
||||
|
||||
if fixed:
|
||||
@@ -51,6 +52,7 @@ def parse_line (line, fields, fixed = None):
|
||||
|
||||
data[key] = value
|
||||
|
||||
# print("parse_line data =", data)
|
||||
return data
|
||||
|
||||
|
||||
|
||||
@@ -73,6 +73,12 @@ if __name__ == '__main__':
|
||||
|
||||
lineNameInfo = final_p111.get("lineNameInfo")
|
||||
pattern = final_p111.get("pattern")
|
||||
if not lineNameInfo:
|
||||
if not pattern:
|
||||
print("ERROR! Missing final.p111.lineNameInfo in project configuration. Cannot import final P111")
|
||||
raise Exception("Missing final.p111.lineNameInfo")
|
||||
else:
|
||||
print("WARNING! No `lineNameInfo` in project configuration (final.p111). You should add it to the settings.")
|
||||
rx = None
|
||||
if pattern and pattern.get("regex"):
|
||||
rx = re.compile(pattern["regex"])
|
||||
@@ -114,27 +120,27 @@ if __name__ == '__main__':
|
||||
file_info = dict(zip(pattern["captures"], match.groups()))
|
||||
file_info["meta"] = {}
|
||||
|
||||
if lineNameInfo:
|
||||
basename = os.path.basename(physical_filepath)
|
||||
fields = lineNameInfo.get("fields", {})
|
||||
fixed = lineNameInfo.get("fixed")
|
||||
try:
|
||||
parsed_line = fwr.parse_line(basename, fields, fixed)
|
||||
except ValueError as err:
|
||||
parsed_line = "Line format error: " + str(err)
|
||||
if type(parsed_line) == str:
|
||||
print(parsed_line, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
if lineNameInfo:
|
||||
basename = os.path.basename(physical_filepath)
|
||||
fields = lineNameInfo.get("fields", {})
|
||||
fixed = lineNameInfo.get("fixed")
|
||||
try:
|
||||
parsed_line = fwr.parse_line(basename, fields, fixed)
|
||||
except ValueError as err:
|
||||
parsed_line = "Line format error: " + str(err)
|
||||
if type(parsed_line) == str:
|
||||
print(parsed_line, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
|
||||
file_info = {}
|
||||
file_info["sequence"] = parsed_line["sequence"]
|
||||
file_info["line"] = parsed_line["line"]
|
||||
del(parsed_line["sequence"])
|
||||
del(parsed_line["line"])
|
||||
file_info["meta"] = {
|
||||
"fileInfo": parsed_line
|
||||
}
|
||||
file_info = {}
|
||||
file_info["sequence"] = parsed_line["sequence"]
|
||||
file_info["line"] = parsed_line["line"]
|
||||
del(parsed_line["sequence"])
|
||||
del(parsed_line["line"])
|
||||
file_info["meta"] = {
|
||||
"fileInfo": parsed_line
|
||||
}
|
||||
|
||||
if pending:
|
||||
print("Skipping / removing final file because marked as PENDING", logical_filepath)
|
||||
|
||||
@@ -41,6 +41,12 @@ if __name__ == '__main__':
|
||||
|
||||
lineNameInfo = raw_p111.get("lineNameInfo")
|
||||
pattern = raw_p111.get("pattern")
|
||||
if not lineNameInfo:
|
||||
if not pattern:
|
||||
print("ERROR! Missing raw.p111.lineNameInfo in project configuration. Cannot import raw P111")
|
||||
raise Exception("Missing raw.p111.lineNameInfo")
|
||||
else:
|
||||
print("WARNING! No `lineNameInfo` in project configuration (raw.p111). You should add it to the settings.")
|
||||
rx = None
|
||||
if pattern and pattern.get("regex"):
|
||||
rx = re.compile(pattern["regex"])
|
||||
@@ -96,14 +102,15 @@ if __name__ == '__main__':
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
|
||||
file_info = {}
|
||||
file_info["sequence"] = parsed_line["sequence"]
|
||||
file_info["line"] = parsed_line["line"]
|
||||
del(parsed_line["sequence"])
|
||||
del(parsed_line["line"])
|
||||
file_info["meta"] = {
|
||||
"fileInfo": parsed_line
|
||||
}
|
||||
file_info = {}
|
||||
file_info["sequence"] = parsed_line["sequence"]
|
||||
file_info["line"] = parsed_line["line"]
|
||||
del(parsed_line["sequence"])
|
||||
del(parsed_line["line"])
|
||||
file_info["meta"] = {
|
||||
"fileInfo": parsed_line
|
||||
}
|
||||
|
||||
p111_data = p111.from_file(physical_filepath)
|
||||
|
||||
print("Saving")
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Maximum runtime in seconds before killing an overdue instance (e.g., 10 minutes)
|
||||
MAX_RUNTIME_SECONDS=$((15 * 60))
|
||||
|
||||
DOUGAL_ROOT=${DOUGAL_ROOT:-$(dirname "$0")/..}
|
||||
|
||||
@@ -80,8 +82,9 @@ function run () {
|
||||
# DESCRIPTION=""
|
||||
SERVICE="deferred_imports"
|
||||
|
||||
$BINDIR/send_alert.py -t "$TITLE" -s "$SERVICE" -l "critical" \
|
||||
-O "$(cat $STDOUTLOG)" -E "$(cat $STDERRLOG)"
|
||||
# Disable GitLab alerts. They're just not very practical
|
||||
# $BINDIR/send_alert.py -t "$TITLE" -s "$SERVICE" -l "critical" \
|
||||
# -O "$(cat $STDOUTLOG)" -E "$(cat $STDERRLOG)"
|
||||
|
||||
exit 2
|
||||
}
|
||||
@@ -97,14 +100,37 @@ function cleanup () {
|
||||
}
|
||||
|
||||
if [[ -f $LOCKFILE ]]; then
|
||||
PID=$(cat "$LOCKFILE")
|
||||
if pgrep -F "$LOCKFILE"; then
|
||||
print_warning $(printf "The previous process is still running (%d)" $PID)
|
||||
exit 1
|
||||
else
|
||||
rm "$LOCKFILE"
|
||||
print_warning $(printf "Previous process (%d) not found. Must have died unexpectedly" $PID)
|
||||
fi
|
||||
PID=$(cat "$LOCKFILE")
|
||||
if kill -0 "$PID" 2>/dev/null; then # Check if process is running
|
||||
# Get elapsed time in D-HH:MM:SS format and convert to seconds
|
||||
ELAPSED_STR=$(ps -p "$PID" -o etime= | tr -d '[:space:]')
|
||||
if [ -n "$ELAPSED_STR" ]; then
|
||||
# Convert D-HH:MM:SS to seconds
|
||||
ELAPSED_SECONDS=$(echo "$ELAPSED_STR" | awk -F'[-:]' '{
|
||||
seconds = 0
|
||||
if (NF == 4) { seconds += $1 * 86400 } # Days
|
||||
if (NF >= 3) { seconds += $NF-2 * 3600 } # Hours
|
||||
if (NF >= 2) { seconds += $NF-1 * 60 } # Minutes
|
||||
seconds += $NF # Seconds
|
||||
print seconds
|
||||
}')
|
||||
if [ "$ELAPSED_SECONDS" -gt "$MAX_RUNTIME_SECONDS" ]; then
|
||||
# Kill the overdue process (SIGTERM; use -9 for SIGKILL if needed)
|
||||
kill "$PID" 2>/dev/null
|
||||
print_warning $(printf "Killed overdue process (%d) that ran for %s (%d seconds)" "$PID" "$ELAPSED_STR" "$ELAPSED_SECONDS")
|
||||
rm "$LOCKFILE"
|
||||
else
|
||||
print_warning $(printf "Previous process is still running (%d) for %s (%d seconds)" "$PID" "$ELAPSED_STR" "$ELAPSED_SECONDS")
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
print_warning $(printf "Could not retrieve elapsed time for process (%d)" "$PID")
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
rm "$LOCKFILE"
|
||||
print_warning $(printf "Previous process (%d) not found. Must have died unexpectedly" "$PID")
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "$$" > "$LOCKFILE" || {
|
||||
|
||||
89
bin/update_comparisons.js
Executable file
89
bin/update_comparisons.js
Executable file
@@ -0,0 +1,89 @@
|
||||
#!/usr/bin/node
|
||||
|
||||
const cmp = require('../lib/www/server/lib/comparisons');
|
||||
|
||||
async function purgeComparisons () {
|
||||
const groups = await cmp.groups();
|
||||
const comparisons = await cmp.getGroup();
|
||||
|
||||
const pids = new Set(Object.values(groups).flat().map( p => p.pid ));
|
||||
const comparison_pids = new Set(comparisons.map( c => [ c.baseline_pid, c.monitor_pid ] ).flat());
|
||||
|
||||
for (const pid of comparison_pids) {
|
||||
if (!pids.has(pid)) {
|
||||
console.log(`${pid} no longer par of a group. Deleting comparisons`);
|
||||
|
||||
staleComps = comparisons.filter( c => c.baseline_pid == pid || c.monitor_pid == pid );
|
||||
for (c of staleComps) {
|
||||
console.log(`Deleting comparison ${c.baseline_pid} → ${c.monitor_pid}`);
|
||||
await cmp.remove(c.baseline_pid, c.monitor_pid);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async function main () {
|
||||
|
||||
console.log("Looking for unreferenced comparisons to purge");
|
||||
await purgeComparisons();
|
||||
|
||||
console.log("Retrieving project groups");
|
||||
const groups = await cmp.groups();
|
||||
|
||||
if (!Object.keys(groups??{})?.length) {
|
||||
console.log("No groups found");
|
||||
return 0;
|
||||
}
|
||||
|
||||
console.log(`Found ${Object.keys(groups)?.length} groups: ${Object.keys(groups).join(", ")}`);
|
||||
|
||||
for (const groupName of Object.keys(groups)) {
|
||||
const projects = groups[groupName];
|
||||
|
||||
console.log(`Fetching saved comparisons for ${groupName}`);
|
||||
|
||||
const comparisons = await cmp.getGroup(groupName);
|
||||
|
||||
if (!comparisons || !comparisons.length) {
|
||||
console.log(`No comparisons found for ${groupName}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if there are any projects that have been modified since last comparison
|
||||
// or if there are any pairs that are no longer part of the group
|
||||
|
||||
const outdated = comparisons.filter( c => {
|
||||
const baseline_tstamp = projects.find( p => p.pid === c.baseline_pid )?.tstamp;
|
||||
const monitor_tstamp = projects.find( p => p.pid === c.monitor_pid )?.tstamp;
|
||||
return (c.tstamp < baseline_tstamp) || (c.tstamp < monitor_tstamp) ||
|
||||
baseline_tstamp == null || monitor_tstamp == null;
|
||||
});
|
||||
|
||||
for (const comparison of outdated) {
|
||||
console.log(`Removing stale comparison: ${comparison.baseline_pid} → ${comparison.monitor_pid}`);
|
||||
await cmp.remove(comparison.baseline_pid, comparison.monitor_pid);
|
||||
}
|
||||
|
||||
if (projects?.length < 2) {
|
||||
console.log(`Group ${groupName} has less than two projects. No comparisons are possible`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Re-run the comparisons that are not in the database. They may
|
||||
// be missing either beacause they were not there to start with
|
||||
// or because we just removed them due to being stale
|
||||
|
||||
console.log(`Recalculating group ${groupName}`);
|
||||
await cmp.saveGroup(groupName);
|
||||
}
|
||||
|
||||
console.log("Comparisons update done");
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
main();
|
||||
} else {
|
||||
module.exports = main;
|
||||
}
|
||||
@@ -2,6 +2,9 @@
|
||||
--
|
||||
-- New schema version: 0.5.2
|
||||
--
|
||||
-- WARNING: This update is buggy and does not give the desired
|
||||
-- results. Schema version 0.5.4 fixes this.
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
|
||||
@@ -0,0 +1,145 @@
|
||||
-- Fix preplot_lines_summary view
|
||||
--
|
||||
-- New schema version: 0.5.4
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- Fixes upgrade 35 (0.5.2). The original description of 0.5.2 is included
|
||||
-- below for ease of reference:
|
||||
--
|
||||
-- Following introduction of `preplot_saillines` (0.5.0), the incr and
|
||||
-- ntba statuses are stored in a separate table, not in `preplot_lines`
|
||||
-- (TODO: a future upgrade should remove those columns from `preplot_lines`)
|
||||
--
|
||||
-- Now any views referencing `incr` and `ntba` must be updated to point to
|
||||
-- the new location of those attributes.
|
||||
--
|
||||
-- This update fixes #312.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE VIEW preplot_lines_summary
|
||||
AS
|
||||
WITH summary AS (
|
||||
SELECT DISTINCT pp.line,
|
||||
pp.class,
|
||||
first_value(pp.point) OVER w AS p0,
|
||||
last_value(pp.point) OVER w AS p1,
|
||||
count(pp.point) OVER w AS num_points,
|
||||
st_distance(first_value(pp.geometry) OVER w, last_value(pp.geometry) OVER w) AS length,
|
||||
st_azimuth(first_value(pp.geometry) OVER w, last_value(pp.geometry) OVER w) * 180::double precision / pi() AS azimuth0,
|
||||
st_azimuth(last_value(pp.geometry) OVER w, first_value(pp.geometry) OVER w) * 180::double precision / pi() AS azimuth1
|
||||
FROM preplot_points pp
|
||||
WHERE pp.class = 'V'::bpchar
|
||||
WINDOW w AS (PARTITION BY pp.line ORDER BY pp.point ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
|
||||
)
|
||||
SELECT DISTINCT psl.sailline AS line,
|
||||
CASE
|
||||
WHEN psl.incr THEN s.p0
|
||||
ELSE s.p1
|
||||
END AS fsp,
|
||||
CASE
|
||||
WHEN psl.incr THEN s.p1
|
||||
ELSE s.p0
|
||||
END AS lsp,
|
||||
s.num_points,
|
||||
s.length,
|
||||
CASE
|
||||
WHEN psl.incr THEN s.azimuth0
|
||||
ELSE s.azimuth1
|
||||
END AS azimuth,
|
||||
psl.incr,
|
||||
psl.remarks
|
||||
FROM summary s
|
||||
JOIN preplot_saillines psl ON psl.sailline_class = s.class AND s.line = psl.sailline
|
||||
ORDER BY psl.sailline, psl.incr;
|
||||
|
||||
ALTER TABLE preplot_lines_summary
|
||||
OWNER TO postgres;
|
||||
COMMENT ON VIEW preplot_lines_summary
|
||||
IS 'Summarises ''V'' (vessel sailline) preplot lines.';
|
||||
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.5.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.5.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.5.4"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.5.4"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
110
etc/db/upgrades/upgrade38-v0.6.0-add-keystore-table.sql
Normal file
110
etc/db/upgrades/upgrade38-v0.6.0-add-keystore-table.sql
Normal file
@@ -0,0 +1,110 @@
|
||||
-- Fix final_lines_summary view
|
||||
--
|
||||
-- New schema version: 0.6.0
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade only affects the `public` schema.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update adds a `keystore` table, intended for storing arbitrary
|
||||
-- key / value pairs which, unlike, the `info` tables, is not meant to
|
||||
-- be directly accessible via the API. Its main purpose as of this writing
|
||||
-- is to store user definitions (see #176, #177, #180).
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS keystore (
|
||||
type TEXT NOT NULL, -- A class of data to be stored
|
||||
key TEXT NOT NULL, -- A key that is unique for the class and access type
|
||||
last_modified TIMESTAMP -- To detect update conflicts
|
||||
DEFAULT CURRENT_TIMESTAMP,
|
||||
data jsonb,
|
||||
PRIMARY KEY (type, key) -- Composite primary key
|
||||
);
|
||||
|
||||
-- Create a function to update the last_modified timestamp
|
||||
CREATE OR REPLACE FUNCTION update_last_modified()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.last_modified = CURRENT_TIMESTAMP;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Create a trigger that calls the function before each update
|
||||
CREATE OR REPLACE TRIGGER update_keystore_last_modified
|
||||
BEFORE UPDATE ON keystore
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_last_modified();
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.5.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.0"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.0"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
108
etc/db/upgrades/upgrade39-v0.6.1-add-default-user.sql
Normal file
108
etc/db/upgrades/upgrade39-v0.6.1-add-default-user.sql
Normal file
@@ -0,0 +1,108 @@
|
||||
-- Fix final_lines_summary view
|
||||
--
|
||||
-- New schema version: 0.6.1
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade only affects the `public` schema.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update adds a default user to the system (see #176, #177, #180).
|
||||
-- The default user can only be invoked by connecting from localhost.
|
||||
--
|
||||
-- This user has full access to every project via the organisations
|
||||
-- permissions wildcard: `{"*": {read: true, write: true, edit: true}}`
|
||||
-- and can be used to bootstrap the system by creating other users
|
||||
-- and assigning organisational permissions.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
INSERT INTO keystore (type, key, data)
|
||||
VALUES ('user', '6f1e7159-4ca0-4ae4-ab4e-89078166cc10', '
|
||||
{
|
||||
"id": "6f1e7159-4ca0-4ae4-ab4e-89078166cc10",
|
||||
"ip": "127.0.0.0/24",
|
||||
"name": "☠️",
|
||||
"colour": "red",
|
||||
"active": true,
|
||||
"organisations": {
|
||||
"*": {
|
||||
"read": true,
|
||||
"write": true,
|
||||
"edit": true
|
||||
}
|
||||
}
|
||||
}
|
||||
'::jsonb)
|
||||
ON CONFLICT (type, key) DO NOTHING;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.1"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.1"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,106 @@
|
||||
-- Fix final_lines_summary view
|
||||
--
|
||||
-- New schema version: 0.6.2
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade only affects the `public` schema.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update adds an "organisations" section to the configuration,
|
||||
-- with a default configured organisation of "WGP" with full access.
|
||||
-- This is so that projects can be made accessible after migrating
|
||||
-- to the new permissions architecture.
|
||||
--
|
||||
-- In addition, projects with an id starting with "eq" are assumed to
|
||||
-- be Equinor projects, and an additional organisation is added with
|
||||
-- read-only access. This is intended for clients, which should be
|
||||
-- assigned to the "Equinor organisation".
|
||||
--
|
||||
-- Finally, we assign the vessel to the "WGP" organisation (full access)
|
||||
-- so that we can actually use administrative endpoints.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
-- Add "organisations" section to configurations, if not already present
|
||||
UPDATE projects
|
||||
SET
|
||||
meta = jsonb_set(meta, '{organisations}', '{"WGP": {"read": true, "write": true, "edit": true}}'::jsonb, true)
|
||||
WHERE meta->'organisations' IS NULL;
|
||||
|
||||
-- Add (or overwrite!) "organisations.Equinor" giving read-only access (can be changed later via API)
|
||||
UPDATE projects
|
||||
SET
|
||||
meta = jsonb_set(meta, '{organisations, Equinor}', '{"read": true, "write": false, "edit": false}'::jsonb, true)
|
||||
WHERE pid LIKE 'eq%';
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.2"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.2"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
109
etc/db/upgrades/upgrade41-v0.6.3-add-comparisons.sql
Normal file
109
etc/db/upgrades/upgrade41-v0.6.3-add-comparisons.sql
Normal file
@@ -0,0 +1,109 @@
|
||||
-- Add procedure to decimate old nav data
|
||||
--
|
||||
-- New schema version: 0.6.3
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade creates a new schema called `comparisons`.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update adds a `comparisons` table to a `comparisons` schema.
|
||||
-- The `comparisons.comparisons` table holds 4D prospect comparison data.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
-- BEGIN
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS comparisons
|
||||
AUTHORIZATION postgres;
|
||||
|
||||
COMMENT ON SCHEMA comparisons
|
||||
IS 'Holds 4D comparison data and logic';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS comparisons.comparisons
|
||||
(
|
||||
type text COLLATE pg_catalog."default" NOT NULL,
|
||||
baseline_pid text COLLATE pg_catalog."default" NOT NULL,
|
||||
monitor_pid text COLLATE pg_catalog."default" NOT NULL,
|
||||
data bytea,
|
||||
meta jsonb NOT NULL DEFAULT '{}'::jsonb,
|
||||
CONSTRAINT comparisons_pkey PRIMARY KEY (baseline_pid, monitor_pid, type)
|
||||
)
|
||||
|
||||
TABLESPACE pg_default;
|
||||
|
||||
ALTER TABLE IF EXISTS comparisons.comparisons
|
||||
OWNER to postgres;
|
||||
|
||||
-- END
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.3"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.3"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
169
etc/db/upgrades/upgrade42-v0.6.4-notify-exclude-columns.sql
Normal file
169
etc/db/upgrades/upgrade42-v0.6.4-notify-exclude-columns.sql
Normal file
@@ -0,0 +1,169 @@
|
||||
-- Add procedure to decimate old nav data
|
||||
--
|
||||
-- New schema version: 0.6.4
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects the public schema only.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update modifies notify() to accept, as optional arguments, the
|
||||
-- names of columns that are to be *excluded* from the notification.
|
||||
-- It is intended for tables with large columns which are however of
|
||||
-- no particular interest in a notification.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
-- BEGIN
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.notify()
|
||||
RETURNS trigger
|
||||
LANGUAGE 'plpgsql'
|
||||
COST 100
|
||||
VOLATILE NOT LEAKPROOF
|
||||
AS $BODY$
|
||||
DECLARE
|
||||
channel text := TG_ARGV[0];
|
||||
pid text;
|
||||
payload text;
|
||||
notification text;
|
||||
payload_id integer;
|
||||
old_json jsonb;
|
||||
new_json jsonb;
|
||||
excluded_col text;
|
||||
i integer;
|
||||
BEGIN
|
||||
|
||||
-- Fetch pid
|
||||
SELECT projects.pid INTO pid FROM projects WHERE schema = TG_TABLE_SCHEMA;
|
||||
|
||||
-- Build old and new as jsonb, excluding specified columns if provided
|
||||
IF OLD IS NOT NULL THEN
|
||||
old_json := row_to_json(OLD)::jsonb;
|
||||
FOR i IN 1 .. TG_NARGS - 1 LOOP
|
||||
excluded_col := TG_ARGV[i];
|
||||
old_json := old_json - excluded_col;
|
||||
END LOOP;
|
||||
ELSE
|
||||
old_json := NULL;
|
||||
END IF;
|
||||
|
||||
IF NEW IS NOT NULL THEN
|
||||
new_json := row_to_json(NEW)::jsonb;
|
||||
FOR i IN 1 .. TG_NARGS - 1 LOOP
|
||||
excluded_col := TG_ARGV[i];
|
||||
new_json := new_json - excluded_col;
|
||||
END LOOP;
|
||||
ELSE
|
||||
new_json := NULL;
|
||||
END IF;
|
||||
|
||||
-- Build payload
|
||||
payload := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'old', old_json,
|
||||
'new', new_json,
|
||||
'pid', pid
|
||||
)::text;
|
||||
|
||||
-- Handle large payloads
|
||||
IF octet_length(payload) < 1000 THEN
|
||||
PERFORM pg_notify(channel, payload);
|
||||
ELSE
|
||||
-- Store large payload and notify with ID (as before)
|
||||
INSERT INTO notify_payloads (payload) VALUES (payload) RETURNING id INTO payload_id;
|
||||
|
||||
notification := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'pid', pid,
|
||||
'payload_id', payload_id
|
||||
)::text;
|
||||
|
||||
PERFORM pg_notify(channel, notification);
|
||||
RAISE INFO 'Payload over limit';
|
||||
END IF;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$BODY$;
|
||||
|
||||
ALTER FUNCTION public.notify()
|
||||
OWNER TO postgres;
|
||||
|
||||
-- END
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.4"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.4"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,96 @@
|
||||
-- Add procedure to decimate old nav data
|
||||
--
|
||||
-- New schema version: 0.6.5
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects the public schema only.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update modifies notify() to accept, as optional arguments, the
|
||||
-- names of columns that are to be *excluded* from the notification.
|
||||
-- It is intended for tables with large columns which are however of
|
||||
-- no particular interest in a notification.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
-- BEGIN
|
||||
|
||||
CREATE OR REPLACE TRIGGER comparisons_tg
|
||||
AFTER INSERT OR DELETE OR UPDATE
|
||||
ON comparisons.comparisons
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.notify('comparisons', 'data');
|
||||
|
||||
-- END
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.5' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.5"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.5"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,157 @@
|
||||
-- Add procedure to decimate old nav data
|
||||
--
|
||||
-- New schema version: 0.6.6
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects the public schema only.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This adds a last_project_update(pid) function. It takes a project ID
|
||||
-- and returns the last known timestamp from that project. Timestamps
|
||||
-- are derived from multiple sources:
|
||||
--
|
||||
-- - raw_shots table
|
||||
-- - final_shots table
|
||||
-- - events_log_full table
|
||||
-- - info table where key = 'qc'
|
||||
-- - files table, from the hashes (which contain the file's mtime)
|
||||
-- - project configuration, looking for an _updatedOn property
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
-- BEGIN
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.last_project_update(p_pid text)
|
||||
RETURNS timestamp with time zone
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
DECLARE
|
||||
v_last_ts timestamptz := NULL;
|
||||
v_current_ts timestamptz;
|
||||
v_current_str text;
|
||||
v_current_unix numeric;
|
||||
v_sid_rec record;
|
||||
BEGIN
|
||||
-- From raw_shots, final_shots, info, and files
|
||||
FOR v_sid_rec IN SELECT schema FROM public.projects WHERE pid = p_pid
|
||||
LOOP
|
||||
-- From raw_shots
|
||||
EXECUTE 'SELECT max(tstamp) FROM ' || v_sid_rec.schema || '.raw_shots' INTO v_current_ts;
|
||||
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
|
||||
v_last_ts := v_current_ts;
|
||||
END IF;
|
||||
|
||||
-- From final_shots
|
||||
EXECUTE 'SELECT max(tstamp) FROM ' || v_sid_rec.schema || '.final_shots' INTO v_current_ts;
|
||||
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
|
||||
v_last_ts := v_current_ts;
|
||||
END IF;
|
||||
|
||||
-- From info where key = 'qc'
|
||||
EXECUTE 'SELECT value->>''updatedOn'' FROM ' || v_sid_rec.schema || '.info WHERE key = ''qc''' INTO v_current_str;
|
||||
IF v_current_str IS NOT NULL THEN
|
||||
v_current_ts := v_current_str::timestamptz;
|
||||
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
|
||||
v_last_ts := v_current_ts;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- From files hash second part, only for valid colon-separated hashes
|
||||
EXECUTE 'SELECT max( split_part(hash, '':'', 2)::numeric ) FROM ' || v_sid_rec.schema || '.files WHERE hash ~ ''^[0-9]+:[0-9]+\\.[0-9]+:[0-9]+\\.[0-9]+:[0-9a-f]+$''' INTO v_current_unix;
|
||||
IF v_current_unix IS NOT NULL THEN
|
||||
v_current_ts := to_timestamp(v_current_unix);
|
||||
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
|
||||
v_last_ts := v_current_ts;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- From event_log_full
|
||||
EXECUTE 'SELECT max(tstamp) FROM ' || v_sid_rec.schema || '.event_log_full' INTO v_current_ts;
|
||||
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
|
||||
v_last_ts := v_current_ts;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
-- From projects.meta->_updatedOn
|
||||
SELECT (meta->>'_updatedOn')::timestamptz FROM public.projects WHERE pid = p_pid INTO v_current_ts;
|
||||
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
|
||||
v_last_ts := v_current_ts;
|
||||
END IF;
|
||||
|
||||
RETURN v_last_ts;
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- END
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.6' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.5' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.6"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.6"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -45,11 +45,13 @@
|
||||
name: "No fire"
|
||||
id: no_fire
|
||||
check: |
|
||||
const currentShot = currentItem;
|
||||
const gunData = currentItem._("raw_meta.smsrc");
|
||||
(gunData && gunData.guns && gunData.guns.length != gunData.num_active)
|
||||
? `Source ${gunData.src_number}: No fire (${gunData.guns.length - gunData.num_active} guns)`
|
||||
: true;
|
||||
// const currentShot = currentItem;
|
||||
// const gunData = currentItem._("raw_meta.smsrc");
|
||||
// (gunData && gunData.guns && gunData.guns.length != gunData.num_active)
|
||||
// ? `Source ${gunData.src_number}: No fire (${gunData.guns.length - gunData.num_active} guns)`
|
||||
// : true;
|
||||
// Disabled due to changes in Smartsource software. It now returns all guns on every shot, not just active ones.
|
||||
true
|
||||
|
||||
-
|
||||
name: "Pressure errors"
|
||||
|
||||
968
lib/modules/@dougal/binary/classes.js
Normal file
968
lib/modules/@dougal/binary/classes.js
Normal file
@@ -0,0 +1,968 @@
|
||||
const codeToType = {
|
||||
0: Int8Array,
|
||||
1: Uint8Array,
|
||||
2: Int16Array,
|
||||
3: Uint16Array,
|
||||
4: Int32Array,
|
||||
5: Uint32Array,
|
||||
7: Float32Array,
|
||||
8: Float64Array,
|
||||
9: BigInt64Array,
|
||||
10: BigUint64Array
|
||||
};
|
||||
|
||||
const typeToBytes = {
|
||||
Int8Array: 1,
|
||||
Uint8Array: 1,
|
||||
Int16Array: 2,
|
||||
Uint16Array: 2,
|
||||
Int32Array: 4,
|
||||
Uint32Array: 4,
|
||||
Float32Array: 4,
|
||||
Float64Array: 8,
|
||||
BigInt64Array: 8,
|
||||
BigUint64Array: 8
|
||||
};
|
||||
|
||||
function readTypedValue(view, offset, type) {
|
||||
switch (type) {
|
||||
case Int8Array: return view.getInt8(offset);
|
||||
case Uint8Array: return view.getUint8(offset);
|
||||
case Int16Array: return view.getInt16(offset, true);
|
||||
case Uint16Array: return view.getUint16(offset, true);
|
||||
case Int32Array: return view.getInt32(offset, true);
|
||||
case Uint32Array: return view.getUint32(offset, true);
|
||||
case Float32Array: return view.getFloat32(offset, true);
|
||||
case Float64Array: return view.getFloat64(offset, true);
|
||||
case BigInt64Array: return view.getBigInt64(offset, true);
|
||||
case BigUint64Array: return view.getBigUint64(offset, true);
|
||||
default: throw new Error(`Unsupported type: ${type.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
function writeTypedValue(view, offset, value, type) {
|
||||
switch (type) {
|
||||
case Int8Array: view.setInt8(offset, value); break;
|
||||
case Uint8Array: view.setUint8(offset, value); break;
|
||||
case Int16Array: view.setInt16(offset, value, true); break;
|
||||
case Uint16Array: view.setUint16(offset, value, true); break;
|
||||
case Int32Array: view.setInt32(offset, value, true); break;
|
||||
case Uint32Array: view.setUint32(offset, value, true); break;
|
||||
case Float32Array: view.setFloat32(offset, value, true); break;
|
||||
case Float64Array: view.setFloat64(offset, value, true); break;
|
||||
case BigInt64Array: view.setBigInt64(offset, BigInt(value), true); break;
|
||||
case BigUint64Array: view.setBigUint64(offset, BigInt(value), true); break;
|
||||
default: throw new Error(`Unsupported type: ${type.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
class DougalBinaryBundle extends ArrayBuffer {
|
||||
|
||||
static HEADER_LENGTH = 4; // Length of a bundle header
|
||||
|
||||
/** Clone an existing ByteArray into a DougalBinaryBundle
|
||||
*/
|
||||
static clone (buffer) {
|
||||
const clone = new DougalBinaryBundle(buffer.byteLength);
|
||||
const uint8Array = new Uint8Array(buffer);
|
||||
const uint8ArrayClone = new Uint8Array(clone);
|
||||
uint8ArrayClone.set(uint8Array);
|
||||
return clone;
|
||||
}
|
||||
|
||||
constructor (length, options) {
|
||||
super (length, options);
|
||||
}
|
||||
|
||||
/** Get the count of bundles in this ByteArray.
|
||||
*
|
||||
* Stops at the first non-bundle looking offset
|
||||
*/
|
||||
get bundleCount () {
|
||||
let count = 0;
|
||||
let currentBundleOffset = 0;
|
||||
const view = new DataView(this);
|
||||
|
||||
while (currentBundleOffset < this.byteLength) {
|
||||
|
||||
const currentBundleHeader = view.getUint32(currentBundleOffset, true);
|
||||
if ((currentBundleHeader & 0xff) !== 0x1c) {
|
||||
// This is not a bundle
|
||||
return count;
|
||||
}
|
||||
let currentBundleLength = currentBundleHeader >>> 8;
|
||||
|
||||
currentBundleOffset += currentBundleLength + DougalBinaryBundle.HEADER_LENGTH;
|
||||
count++;
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
|
||||
/** Get the number of chunks in the bundles of this ByteArray
|
||||
*/
|
||||
get chunkCount () {
|
||||
let count = 0;
|
||||
let bundleOffset = 0;
|
||||
const view = new DataView(this);
|
||||
|
||||
while (bundleOffset < this.byteLength) {
|
||||
const header = view.getUint32(bundleOffset, true);
|
||||
if ((header & 0xFF) !== 0x1C) break;
|
||||
const length = header >>> 8;
|
||||
if (bundleOffset + 4 + length > this.byteLength) break;
|
||||
|
||||
let chunkOffset = bundleOffset + 4; // relative to buffer start
|
||||
|
||||
while (chunkOffset < bundleOffset + 4 + length) {
|
||||
const chunkType = view.getUint8(chunkOffset);
|
||||
if (chunkType !== 0x11 && chunkType !== 0x12) break;
|
||||
|
||||
const cCount = view.getUint16(chunkOffset + 2, true);
|
||||
const ΔelemC = view.getUint8(chunkOffset + 10);
|
||||
const elemC = view.getUint8(chunkOffset + 11);
|
||||
|
||||
let localOffset = 12; // header size
|
||||
|
||||
localOffset += ΔelemC + elemC; // preface
|
||||
|
||||
// initial values
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(chunkOffset + 12 + k);
|
||||
const baseCode = typeByte & 0xF;
|
||||
const baseType = codeToType[baseCode];
|
||||
if (!baseType) throw new Error('Invalid base type code');
|
||||
localOffset += typeToBytes[baseType.name];
|
||||
}
|
||||
|
||||
// pad after initial
|
||||
while (localOffset % 4 !== 0) localOffset++;
|
||||
|
||||
if (chunkType === 0x11) { // Sequential
|
||||
// record data: Δelems incrs
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(chunkOffset + 12 + k);
|
||||
const incrCode = typeByte >> 4;
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!incrType) throw new Error('Invalid incr type code');
|
||||
localOffset += cCount * typeToBytes[incrType.name];
|
||||
}
|
||||
|
||||
// elems
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
localOffset += cCount * typeToBytes[type.name];
|
||||
}
|
||||
} else { // Interleaved
|
||||
// Compute exact stride for interleaved record data
|
||||
let ΔelemStride = 0;
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(chunkOffset + 12 + k);
|
||||
const incrCode = typeByte >> 4;
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!incrType) throw new Error('Invalid incr type code');
|
||||
ΔelemStride += typeToBytes[incrType.name];
|
||||
}
|
||||
let elemStride = 0;
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
elemStride += typeToBytes[type.name];
|
||||
}
|
||||
const recordStride = ΔelemStride + elemStride;
|
||||
localOffset += cCount * recordStride;
|
||||
}
|
||||
|
||||
// pad after record
|
||||
while (localOffset % 4 !== 0) localOffset++;
|
||||
|
||||
chunkOffset += localOffset;
|
||||
count++;
|
||||
}
|
||||
|
||||
bundleOffset += 4 + length;
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
/** Return an array of DougalBinaryChunkSequential or DougalBinaryChunkInterleaved instances
|
||||
*/
|
||||
chunks () {
|
||||
const chunks = [];
|
||||
let bundleOffset = 0;
|
||||
const view = new DataView(this);
|
||||
|
||||
while (bundleOffset < this.byteLength) {
|
||||
const header = view.getUint32(bundleOffset, true);
|
||||
if ((header & 0xFF) !== 0x1C) break;
|
||||
const length = header >>> 8;
|
||||
if (bundleOffset + 4 + length > this.byteLength) break;
|
||||
|
||||
let chunkOffset = bundleOffset + 4;
|
||||
|
||||
while (chunkOffset < bundleOffset + 4 + length) {
|
||||
const chunkType = view.getUint8(chunkOffset);
|
||||
if (chunkType !== 0x11 && chunkType !== 0x12) break;
|
||||
|
||||
const cCount = view.getUint16(chunkOffset + 2, true);
|
||||
const ΔelemC = view.getUint8(chunkOffset + 10);
|
||||
const elemC = view.getUint8(chunkOffset + 11);
|
||||
|
||||
let localOffset = 12;
|
||||
|
||||
localOffset += ΔelemC + elemC;
|
||||
|
||||
// initial values
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(chunkOffset + 12 + k);
|
||||
const baseCode = typeByte & 0xF;
|
||||
const baseType = codeToType[baseCode];
|
||||
if (!baseType) throw new Error('Invalid base type code');
|
||||
localOffset += typeToBytes[baseType.name];
|
||||
}
|
||||
|
||||
// pad after initial
|
||||
while (localOffset % 4 !== 0) localOffset++;
|
||||
|
||||
if (chunkType === 0x11) { // Sequential
|
||||
// record data: Δelems incrs
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(chunkOffset + 12 + k);
|
||||
const incrCode = typeByte >> 4;
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!incrType) throw new Error('Invalid incr type code');
|
||||
localOffset += cCount * typeToBytes[incrType.name];
|
||||
}
|
||||
|
||||
// elems
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
localOffset += cCount * typeToBytes[type.name];
|
||||
}
|
||||
} else { // Interleaved
|
||||
// Compute exact stride for interleaved record data
|
||||
let ΔelemStride = 0;
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(chunkOffset + 12 + k);
|
||||
const incrCode = typeByte >> 4;
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!incrType) throw new Error('Invalid incr type code');
|
||||
ΔelemStride += typeToBytes[incrType.name];
|
||||
}
|
||||
let elemStride = 0;
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
elemStride += typeToBytes[type.name];
|
||||
}
|
||||
const recordStride = ΔelemStride + elemStride;
|
||||
localOffset += cCount * recordStride;
|
||||
}
|
||||
|
||||
// pad after record
|
||||
while (localOffset % 4 !== 0) localOffset++;
|
||||
|
||||
switch (chunkType) {
|
||||
case 0x11:
|
||||
chunks.push(new DougalBinaryChunkSequential(this, chunkOffset, localOffset));
|
||||
break;
|
||||
case 0x12:
|
||||
chunks.push(new DougalBinaryChunkInterleaved(this, chunkOffset, localOffset));
|
||||
break;
|
||||
default:
|
||||
throw new Error('Invalid chunk type');
|
||||
}
|
||||
|
||||
chunkOffset += localOffset;
|
||||
}
|
||||
|
||||
bundleOffset += 4 + length;
|
||||
}
|
||||
|
||||
return chunks;
|
||||
}
|
||||
|
||||
/** Return a ByteArray containing all data from all
|
||||
* chunks including reconstructed i, j and incremental
|
||||
* values as follows:
|
||||
*
|
||||
* <i_0> <i_1> … <i_x> // i values (constant)
|
||||
* <j_0> <j_1> … <j_x> // j values (j0 + Δj*i)
|
||||
* <Δelem_0_0> <Δelem_0_1> … <Δelem_0_x> // reconstructed Δelem0 (uses baseType)
|
||||
* <Δelem_1_0> <Δelem_1_1> … <Δelem_1_x> // reconstructed Δelem1
|
||||
* …
|
||||
* <Δelem_y_0> <Δelem_y_1> … <Δelem_y_x> // reconstructed Δelem1
|
||||
* <elem_0_0> <elem_0_1> … <elem_0_x> // First elem
|
||||
* <elem_1_0> <elem_1_1> … <elem_1_x> // Second elem
|
||||
* …
|
||||
* <elem_z_0> <elem_z_1> … <elem_z_x> // Last elem
|
||||
*
|
||||
* It does not matter whether the underlying chunks are
|
||||
* sequential or interleaved. This function will transform
|
||||
* as necessary.
|
||||
*
|
||||
*/
|
||||
getDataSequentially () {
|
||||
const chunks = this.chunks();
|
||||
if (chunks.length === 0) return new ArrayBuffer(0);
|
||||
|
||||
const firstChunk = chunks[0];
|
||||
const ΔelemC = firstChunk.ΔelemCount;
|
||||
const elemC = firstChunk.elemCount;
|
||||
|
||||
// Check consistency across chunks
|
||||
for (const chunk of chunks) {
|
||||
if (chunk.ΔelemCount !== ΔelemC || chunk.elemCount !== elemC) {
|
||||
throw new Error('Inconsistent chunk structures');
|
||||
}
|
||||
}
|
||||
|
||||
// Get types from first chunk
|
||||
const view = new DataView(firstChunk);
|
||||
const ΔelemBaseTypes = [];
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(12 + k);
|
||||
const baseCode = typeByte & 0xF;
|
||||
const baseType = codeToType[baseCode];
|
||||
if (!baseType) throw new Error('Invalid base type code');
|
||||
ΔelemBaseTypes.push(baseType);
|
||||
}
|
||||
const elemTypes = [];
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
elemTypes.push(type);
|
||||
}
|
||||
|
||||
// Compute total records
|
||||
const totalN = chunks.reduce((sum, c) => sum + c.jCount, 0);
|
||||
|
||||
// Compute sizes
|
||||
const size_i = totalN * 2; // Uint16 for i
|
||||
const size_j = totalN * 4; // Int32 for j
|
||||
let size_Δelems = 0;
|
||||
for (const t of ΔelemBaseTypes) {
|
||||
size_Δelems += totalN * typeToBytes[t.name];
|
||||
}
|
||||
let size_elems = 0;
|
||||
for (const t of elemTypes) {
|
||||
size_elems += totalN * typeToBytes[t.name];
|
||||
}
|
||||
const totalSize = size_i + size_j + size_Δelems + size_elems;
|
||||
|
||||
const ab = new ArrayBuffer(totalSize);
|
||||
const dv = new DataView(ab);
|
||||
|
||||
// Write i's
|
||||
let off = 0;
|
||||
for (const chunk of chunks) {
|
||||
const i = chunk.i;
|
||||
for (let idx = 0; idx < chunk.jCount; idx++) {
|
||||
dv.setUint16(off, i, true);
|
||||
off += 2;
|
||||
}
|
||||
}
|
||||
|
||||
// Write j's
|
||||
off = size_i;
|
||||
for (const chunk of chunks) {
|
||||
const j0 = chunk.j0;
|
||||
const Δj = chunk.Δj;
|
||||
for (let idx = 0; idx < chunk.jCount; idx++) {
|
||||
const j = j0 + idx * Δj;
|
||||
dv.setInt32(off, j, true);
|
||||
off += 4;
|
||||
}
|
||||
}
|
||||
|
||||
// Write Δelems
|
||||
off = size_i + size_j;
|
||||
for (let m = 0; m < ΔelemC; m++) {
|
||||
const type = ΔelemBaseTypes[m];
|
||||
const bytes = typeToBytes[type.name];
|
||||
for (const chunk of chunks) {
|
||||
const arr = chunk.Δelem(m);
|
||||
for (let idx = 0; idx < chunk.jCount; idx++) {
|
||||
writeTypedValue(dv, off, arr[idx], type);
|
||||
off += bytes;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Write elems
|
||||
for (let m = 0; m < elemC; m++) {
|
||||
const type = elemTypes[m];
|
||||
const bytes = typeToBytes[type.name];
|
||||
for (const chunk of chunks) {
|
||||
const arr = chunk.elem(m);
|
||||
for (let idx = 0; idx < chunk.jCount; idx++) {
|
||||
writeTypedValue(dv, off, arr[idx], type);
|
||||
off += bytes;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ab;
|
||||
}
|
||||
|
||||
/** Return a ByteArray containing all data from all
|
||||
* chunks including reconstructed i, j and incremental
|
||||
* values, interleaved as follows:
|
||||
*
|
||||
* <i_0> <j_0> <Δelem_0_0> <Δelem_1_0> … <Δelem_y_0> <elem_0_0> <elem_1_0> … <elem_z_0>
|
||||
* <i_1> <j_1> <Δelem_0_1> <Δelem_1_1> … <Δelem_y_1> <elem_0_1> <elem_1_1> … <elem_z_1>
|
||||
* <i_x> <j_x> <Δelem_0_x> <Δelem_1_x> … <Δelem_y_x> <elem_0_x> <elem_1_x> … <elem_z_x>
|
||||
*
|
||||
* It does not matter whether the underlying chunks are
|
||||
* sequential or interleaved. This function will transform
|
||||
* as necessary.
|
||||
*
|
||||
*/
|
||||
getDataInterleaved () {
|
||||
const chunks = this.chunks();
|
||||
if (chunks.length === 0) return new ArrayBuffer(0);
|
||||
|
||||
const firstChunk = chunks[0];
|
||||
const ΔelemC = firstChunk.ΔelemCount;
|
||||
const elemC = firstChunk.elemCount;
|
||||
|
||||
// Check consistency across chunks
|
||||
for (const chunk of chunks) {
|
||||
if (chunk.ΔelemCount !== ΔelemC || chunk.elemCount !== elemC) {
|
||||
throw new Error('Inconsistent chunk structures');
|
||||
}
|
||||
}
|
||||
|
||||
// Get types from first chunk
|
||||
const view = new DataView(firstChunk);
|
||||
const ΔelemBaseTypes = [];
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(12 + k);
|
||||
const baseCode = typeByte & 0xF;
|
||||
const baseType = codeToType[baseCode];
|
||||
if (!baseType) throw new Error('Invalid base type code');
|
||||
ΔelemBaseTypes.push(baseType);
|
||||
}
|
||||
const elemTypes = [];
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
elemTypes.push(type);
|
||||
}
|
||||
|
||||
// Compute total records
|
||||
const totalN = chunks.reduce((sum, c) => sum + c.jCount, 0);
|
||||
|
||||
// Compute record size
|
||||
const recordSize = 2 + 4 + // i (Uint16) + j (Int32)
|
||||
ΔelemBaseTypes.reduce((sum, t) => sum + typeToBytes[t.name], 0) +
|
||||
elemTypes.reduce((sum, t) => sum + typeToBytes[t.name], 0);
|
||||
const totalSize = totalN * recordSize;
|
||||
|
||||
const ab = new ArrayBuffer(totalSize);
|
||||
const dv = new DataView(ab);
|
||||
|
||||
let off = 0;
|
||||
for (const chunk of chunks) {
|
||||
const i = chunk.i;
|
||||
const j0 = chunk.j0;
|
||||
const Δj = chunk.Δj;
|
||||
for (let idx = 0; idx < chunk.jCount; idx++) {
|
||||
dv.setUint16(off, i, true);
|
||||
off += 2;
|
||||
const j = j0 + idx * Δj;
|
||||
dv.setInt32(off, j, true);
|
||||
off += 4;
|
||||
for (let m = 0; m < ΔelemC; m++) {
|
||||
const type = ΔelemBaseTypes[m];
|
||||
const bytes = typeToBytes[type.name];
|
||||
const arr = chunk.Δelem(m);
|
||||
writeTypedValue(dv, off, arr[idx], type);
|
||||
off += bytes;
|
||||
}
|
||||
for (let m = 0; m < elemC; m++) {
|
||||
const type = elemTypes[m];
|
||||
const bytes = typeToBytes[type.name];
|
||||
const arr = chunk.elem(m);
|
||||
writeTypedValue(dv, off, arr[idx], type);
|
||||
off += bytes;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ab;
|
||||
}
|
||||
|
||||
get records () {
|
||||
const data = [];
|
||||
for (const record of this) {
|
||||
data.push(record.slice(1));
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
const chunks = this.chunks();
|
||||
let chunkIndex = 0;
|
||||
let chunkIterator = chunks.length > 0 ? chunks[0][Symbol.iterator]() : null;
|
||||
|
||||
return {
|
||||
next() {
|
||||
if (!chunkIterator) {
|
||||
return { done: true };
|
||||
}
|
||||
|
||||
let result = chunkIterator.next();
|
||||
while (result.done && chunkIndex < chunks.length - 1) {
|
||||
chunkIndex++;
|
||||
chunkIterator = chunks[chunkIndex][Symbol.iterator]();
|
||||
result = chunkIterator.next();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
class DougalBinaryChunkSequential extends ArrayBuffer {
|
||||
|
||||
constructor (buffer, offset, length) {
|
||||
super(length);
|
||||
new Uint8Array(this).set(new Uint8Array(buffer, offset, length));
|
||||
this._ΔelemCaches = new Array(this.ΔelemCount);
|
||||
this._elemCaches = new Array(this.elemCount);
|
||||
this._ΔelemBlockOffsets = null;
|
||||
this._elemBlockOffsets = null;
|
||||
this._recordOffset = null;
|
||||
}
|
||||
|
||||
_getRecordOffset() {
|
||||
if (this._recordOffset !== null) return this._recordOffset;
|
||||
const view = new DataView(this);
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
const elemC = this.elemCount;
|
||||
|
||||
let recordOffset = 12 + ΔelemC + elemC;
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const tb = view.getUint8(12 + k);
|
||||
const bc = tb & 0xF;
|
||||
const bt = codeToType[bc];
|
||||
recordOffset += typeToBytes[bt.name];
|
||||
}
|
||||
while (recordOffset % 4 !== 0) recordOffset++;
|
||||
this._recordOffset = recordOffset;
|
||||
return recordOffset;
|
||||
}
|
||||
|
||||
_initBlockOffsets() {
|
||||
if (this._ΔelemBlockOffsets !== null) return;
|
||||
const view = new DataView(this);
|
||||
const count = this.jCount;
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
const elemC = this.elemCount;
|
||||
|
||||
const recordOffset = this._getRecordOffset();
|
||||
|
||||
this._ΔelemBlockOffsets = [];
|
||||
let o = recordOffset;
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
this._ΔelemBlockOffsets[k] = o;
|
||||
const tb = view.getUint8(12 + k);
|
||||
const ic = tb >> 4;
|
||||
const it = codeToType[ic];
|
||||
o += count * typeToBytes[it.name];
|
||||
}
|
||||
|
||||
this._elemBlockOffsets = [];
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
this._elemBlockOffsets[k] = o;
|
||||
const tc = view.getUint8(12 + ΔelemC + k);
|
||||
const t = codeToType[tc];
|
||||
o += count * typeToBytes[t.name];
|
||||
}
|
||||
}
|
||||
|
||||
/** Return the user-defined value
|
||||
*/
|
||||
get udv () {
|
||||
return new DataView(this).getUint8(1);
|
||||
}
|
||||
|
||||
/** Return the number of j elements in this chunk
|
||||
*/
|
||||
get jCount () {
|
||||
return new DataView(this).getUint16(2, true);
|
||||
}
|
||||
|
||||
/** Return the i value in this chunk
|
||||
*/
|
||||
get i () {
|
||||
return new DataView(this).getUint16(4, true);
|
||||
}
|
||||
|
||||
/** Return the j0 value in this chunk
|
||||
*/
|
||||
get j0 () {
|
||||
return new DataView(this).getUint16(6, true);
|
||||
}
|
||||
|
||||
/** Return the Δj value in this chunk
|
||||
*/
|
||||
get Δj () {
|
||||
return new DataView(this).getInt16(8, true);
|
||||
}
|
||||
|
||||
/** Return the Δelem_count value in this chunk
|
||||
*/
|
||||
get ΔelemCount () {
|
||||
return new DataView(this).getUint8(10);
|
||||
}
|
||||
|
||||
/** Return the elem_count value in this chunk
|
||||
*/
|
||||
get elemCount () {
|
||||
return new DataView(this).getUint8(11);
|
||||
}
|
||||
|
||||
/** Return a TypedArray (e.g., Uint16Array, …) for the n-th Δelem in the chunk
|
||||
*/
|
||||
Δelem (n) {
|
||||
if (this._ΔelemCaches[n]) return this._ΔelemCaches[n];
|
||||
|
||||
if (n < 0 || n >= this.ΔelemCount) throw new Error(`Invalid Δelem index: ${n}`);
|
||||
const view = new DataView(this);
|
||||
const count = this.jCount;
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
|
||||
const typeByte = view.getUint8(12 + n);
|
||||
const baseCode = typeByte & 0xF;
|
||||
const incrCode = typeByte >> 4;
|
||||
const baseType = codeToType[baseCode];
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!baseType || !incrType) throw new Error('Invalid type codes for Δelem');
|
||||
|
||||
// Find offset for initial value of this Δelem
|
||||
let initialOffset = 12 + ΔelemC + this.elemCount;
|
||||
for (let k = 0; k < n; k++) {
|
||||
const tb = view.getUint8(12 + k);
|
||||
const bc = tb & 0xF;
|
||||
const bt = codeToType[bc];
|
||||
initialOffset += typeToBytes[bt.name];
|
||||
}
|
||||
|
||||
let current = readTypedValue(view, initialOffset, baseType);
|
||||
|
||||
// Advance to start of record data (after all initials and pad)
|
||||
const recordOffset = this._getRecordOffset();
|
||||
|
||||
// Find offset for deltas of this Δelem (skip previous Δelems' delta blocks)
|
||||
this._initBlockOffsets();
|
||||
const deltaOffset = this._ΔelemBlockOffsets[n];
|
||||
|
||||
// Reconstruct the array
|
||||
const arr = new baseType(count);
|
||||
const isBigInt = baseType === BigInt64Array || baseType === BigUint64Array;
|
||||
arr[0] = current;
|
||||
for (let idx = 1; idx < count; idx++) {
|
||||
let delta = readTypedValue(view, deltaOffset + idx * typeToBytes[incrType.name], incrType);
|
||||
if (isBigInt) {
|
||||
delta = BigInt(delta);
|
||||
current += delta;
|
||||
} else {
|
||||
current += delta;
|
||||
}
|
||||
arr[idx] = current;
|
||||
}
|
||||
|
||||
this._ΔelemCaches[n] = arr;
|
||||
return arr;
|
||||
}
|
||||
|
||||
/** Return a TypedArray (e.g., Uint16Array, …) for the n-th elem in the chunk
|
||||
*/
|
||||
elem (n) {
|
||||
if (this._elemCaches[n]) return this._elemCaches[n];
|
||||
|
||||
if (n < 0 || n >= this.elemCount) throw new Error(`Invalid elem index: ${n}`);
|
||||
const view = new DataView(this);
|
||||
const count = this.jCount;
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
const elemC = this.elemCount;
|
||||
|
||||
const typeCode = view.getUint8(12 + ΔelemC + n);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid type code for elem');
|
||||
|
||||
// Find offset for this elem's data block
|
||||
this._initBlockOffsets();
|
||||
const elemOffset = this._elemBlockOffsets[n];
|
||||
|
||||
// Create and populate the array
|
||||
const arr = new type(count);
|
||||
const bytes = typeToBytes[type.name];
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
arr[idx] = readTypedValue(view, elemOffset + idx * bytes, type);
|
||||
}
|
||||
|
||||
this._elemCaches[n] = arr;
|
||||
return arr;
|
||||
}
|
||||
|
||||
getRecord (index) {
|
||||
if (index < 0 || index >= this.jCount) throw new Error(`Invalid record index: ${index}`);
|
||||
|
||||
const arr = [this.udv, this.i, this.j0 + index * this.Δj];
|
||||
|
||||
for (let m = 0; m < this.ΔelemCount; m++) {
|
||||
const values = this.Δelem(m);
|
||||
arr.push(values[index]);
|
||||
}
|
||||
|
||||
for (let m = 0; m < this.elemCount; m++) {
|
||||
const values = this.elem(m);
|
||||
arr.push(values[index]);
|
||||
}
|
||||
|
||||
return arr;
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
let index = 0;
|
||||
const chunk = this;
|
||||
return {
|
||||
next() {
|
||||
if (index < chunk.jCount) {
|
||||
return { value: chunk.getRecord(index++), done: false };
|
||||
} else {
|
||||
return { done: true };
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
class DougalBinaryChunkInterleaved extends ArrayBuffer {
|
||||
constructor(buffer, offset, length) {
|
||||
super(length);
|
||||
new Uint8Array(this).set(new Uint8Array(buffer, offset, length));
|
||||
this._incrStrides = [];
|
||||
this._elemStrides = [];
|
||||
this._incrOffsets = [];
|
||||
this._elemOffsets = [];
|
||||
this._recordStride = 0;
|
||||
this._recordOffset = null;
|
||||
this._initStrides();
|
||||
this._ΔelemCaches = new Array(this.ΔelemCount);
|
||||
this._elemCaches = new Array(this.elemCount);
|
||||
}
|
||||
|
||||
_getRecordOffset() {
|
||||
if (this._recordOffset !== null) return this._recordOffset;
|
||||
const view = new DataView(this);
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
const elemC = this.elemCount;
|
||||
|
||||
let recordOffset = 12 + ΔelemC + elemC;
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const tb = view.getUint8(12 + k);
|
||||
const bc = tb & 0xF;
|
||||
const bt = codeToType[bc];
|
||||
recordOffset += typeToBytes[bt.name];
|
||||
}
|
||||
while (recordOffset % 4 !== 0) recordOffset++;
|
||||
this._recordOffset = recordOffset;
|
||||
return recordOffset;
|
||||
}
|
||||
|
||||
_initStrides() {
|
||||
const view = new DataView(this);
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
const elemC = this.elemCount;
|
||||
|
||||
// Compute incr strides and offsets
|
||||
let incrOffset = 0;
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(12 + k);
|
||||
const incrCode = typeByte >> 4;
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!incrType) throw new Error('Invalid incr type code');
|
||||
this._incrOffsets.push(incrOffset);
|
||||
const bytes = typeToBytes[incrType.name];
|
||||
this._incrStrides.push(bytes);
|
||||
incrOffset += bytes;
|
||||
this._recordStride += bytes;
|
||||
}
|
||||
|
||||
// Compute elem strides and offsets
|
||||
let elemOffset = incrOffset;
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
this._elemOffsets.push(elemOffset);
|
||||
const bytes = typeToBytes[type.name];
|
||||
this._elemStrides.push(bytes);
|
||||
elemOffset += bytes;
|
||||
this._recordStride += bytes;
|
||||
}
|
||||
}
|
||||
|
||||
get udv() {
|
||||
return new DataView(this).getUint8(1);
|
||||
}
|
||||
|
||||
get jCount() {
|
||||
return new DataView(this).getUint16(2, true);
|
||||
}
|
||||
|
||||
get i() {
|
||||
return new DataView(this).getUint16(4, true);
|
||||
}
|
||||
|
||||
get j0() {
|
||||
return new DataView(this).getUint16(6, true);
|
||||
}
|
||||
|
||||
get Δj() {
|
||||
return new DataView(this).getInt16(8, true);
|
||||
}
|
||||
|
||||
get ΔelemCount() {
|
||||
return new DataView(this).getUint8(10);
|
||||
}
|
||||
|
||||
get elemCount() {
|
||||
return new DataView(this).getUint8(11);
|
||||
}
|
||||
|
||||
Δelem(n) {
|
||||
if (this._ΔelemCaches[n]) return this._ΔelemCaches[n];
|
||||
|
||||
if (n < 0 || n >= this.ΔelemCount) throw new Error(`Invalid Δelem index: ${n}`);
|
||||
const view = new DataView(this);
|
||||
const count = this.jCount;
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
|
||||
const typeByte = view.getUint8(12 + n);
|
||||
const baseCode = typeByte & 0xF;
|
||||
const incrCode = typeByte >> 4;
|
||||
const baseType = codeToType[baseCode];
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!baseType || !incrType) throw new Error('Invalid type codes for Δelem');
|
||||
|
||||
// Find offset for initial value of this Δelem
|
||||
let initialOffset = 12 + ΔelemC + this.elemCount;
|
||||
for (let k = 0; k < n; k++) {
|
||||
const tb = view.getUint8(12 + k);
|
||||
const bc = tb & 0xF;
|
||||
const bt = codeToType[bc];
|
||||
initialOffset += typeToBytes[bt.name];
|
||||
}
|
||||
|
||||
let current = readTypedValue(view, initialOffset, baseType);
|
||||
|
||||
// Find offset to start of record data
|
||||
const recordOffset = this._getRecordOffset();
|
||||
|
||||
// Use precomputed offset for this Δelem
|
||||
const deltaOffset = recordOffset + this._incrOffsets[n];
|
||||
|
||||
// Reconstruct the array
|
||||
const arr = new baseType(count);
|
||||
const isBigInt = baseType === BigInt64Array || baseType === BigUint64Array;
|
||||
arr[0] = current;
|
||||
for (let idx = 1; idx < count; idx++) {
|
||||
let delta = readTypedValue(view, deltaOffset + idx * this._recordStride, incrType);
|
||||
if (isBigInt) {
|
||||
delta = BigInt(delta);
|
||||
current += delta;
|
||||
} else {
|
||||
current += delta;
|
||||
}
|
||||
arr[idx] = current;
|
||||
}
|
||||
|
||||
this._ΔelemCaches[n] = arr;
|
||||
return arr;
|
||||
}
|
||||
|
||||
elem(n) {
|
||||
if (this._elemCaches[n]) return this._elemCaches[n];
|
||||
|
||||
if (n < 0 || n >= this.elemCount) throw new Error(`Invalid elem index: ${n}`);
|
||||
const view = new DataView(this);
|
||||
const count = this.jCount;
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
|
||||
const typeCode = view.getUint8(12 + ΔelemC + n);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid type code for elem');
|
||||
|
||||
// Find offset to start of record data
|
||||
const recordOffset = this._getRecordOffset();
|
||||
|
||||
// Use precomputed offset for this elem (relative to start of record data)
|
||||
const elemOffset = recordOffset + this._elemOffsets[n];
|
||||
|
||||
// Create and populate the array
|
||||
const arr = new type(count);
|
||||
const bytes = typeToBytes[type.name];
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
arr[idx] = readTypedValue(view, elemOffset + idx * this._recordStride, type);
|
||||
}
|
||||
|
||||
this._elemCaches[n] = arr;
|
||||
return arr;
|
||||
}
|
||||
|
||||
getRecord (index) {
|
||||
if (index < 0 || index >= this.jCount) throw new Error(`Invalid record index: ${index}`);
|
||||
|
||||
const arr = [this.udv, this.i, this.j0 + index * this.Δj];
|
||||
|
||||
for (let m = 0; m < this.ΔelemCount; m++) {
|
||||
const values = this.Δelem(m);
|
||||
arr.push(values[index]);
|
||||
}
|
||||
|
||||
for (let m = 0; m < this.elemCount; m++) {
|
||||
const values = this.elem(m);
|
||||
arr.push(values[index]);
|
||||
}
|
||||
|
||||
return arr;
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
let index = 0;
|
||||
const chunk = this;
|
||||
return {
|
||||
next() {
|
||||
if (index < chunk.jCount) {
|
||||
return { value: chunk.getRecord(index++), done: false };
|
||||
} else {
|
||||
return { done: true };
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
module.exports = { DougalBinaryBundle, DougalBinaryChunkSequential, DougalBinaryChunkInterleaved }
|
||||
327
lib/modules/@dougal/binary/decode.js
Normal file
327
lib/modules/@dougal/binary/decode.js
Normal file
@@ -0,0 +1,327 @@
|
||||
const codeToType = {
|
||||
0: Int8Array,
|
||||
1: Uint8Array,
|
||||
2: Int16Array,
|
||||
3: Uint16Array,
|
||||
4: Int32Array,
|
||||
5: Uint32Array,
|
||||
7: Float32Array,
|
||||
8: Float64Array,
|
||||
9: BigInt64Array,
|
||||
10: BigUint64Array
|
||||
};
|
||||
|
||||
const typeToBytes = {
|
||||
Int8Array: 1,
|
||||
Uint8Array: 1,
|
||||
Int16Array: 2,
|
||||
Uint16Array: 2,
|
||||
Int32Array: 4,
|
||||
Uint32Array: 4,
|
||||
Float32Array: 4,
|
||||
Float64Array: 8,
|
||||
BigInt64Array: 8,
|
||||
BigUint64Array: 8
|
||||
};
|
||||
|
||||
function sequential(binary) {
|
||||
if (!(binary instanceof Uint8Array) || binary.length < 4) {
|
||||
throw new Error('Invalid binary input');
|
||||
}
|
||||
|
||||
const view = new DataView(binary.buffer, binary.byteOffset, binary.byteLength);
|
||||
let offset = 0;
|
||||
|
||||
// Initialize result (assuming single i value for simplicity; extend for multiple i values if needed)
|
||||
const result = { i: null, j: [], Δelems: [], elems: [] };
|
||||
|
||||
// Process bundles
|
||||
while (offset < binary.length) {
|
||||
// Read bundle header
|
||||
if (offset + 4 > binary.length) throw new Error('Incomplete bundle header');
|
||||
|
||||
const bundleHeader = view.getUint32(offset, true);
|
||||
if ((bundleHeader & 0xFF) !== 0x1C) throw new Error('Invalid bundle marker');
|
||||
const bundleLength = bundleHeader >> 8;
|
||||
offset += 4;
|
||||
const bundleEnd = offset + bundleLength;
|
||||
|
||||
if (bundleEnd > binary.length) throw new Error('Bundle length exceeds input size');
|
||||
|
||||
// Process chunks in bundle
|
||||
while (offset < bundleEnd) {
|
||||
// Read chunk header
|
||||
if (offset + 12 > bundleEnd) throw new Error('Incomplete chunk header');
|
||||
const chunkType = view.getUint8(offset);
|
||||
if (chunkType !== 0x11) throw new Error(`Unsupported chunk type: ${chunkType}`);
|
||||
offset += 1; // Skip udv
|
||||
offset += 1;
|
||||
const count = view.getUint16(offset, true); offset += 2;
|
||||
if (count > 65535) throw new Error('Chunk count exceeds 65535');
|
||||
const iValue = view.getUint16(offset, true); offset += 2;
|
||||
const j0 = view.getUint16(offset, true); offset += 2;
|
||||
const Δj = view.getInt16(offset, true); offset += 2;
|
||||
const ΔelemCount = view.getUint8(offset++); // Δelem_count
|
||||
const elemCount = view.getUint8(offset++); // elem_count
|
||||
|
||||
// Set i value (assuming all chunks share the same i)
|
||||
if (result.i === null) result.i = iValue;
|
||||
else if (result.i !== iValue) throw new Error('Multiple i values not supported');
|
||||
|
||||
// Read preface (element types)
|
||||
const ΔelemTypes = [];
|
||||
for (let i = 0; i < ΔelemCount; i++) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete Δelem types');
|
||||
const typeByte = view.getUint8(offset++);
|
||||
const baseCode = typeByte & 0x0F;
|
||||
const incrCode = typeByte >> 4;
|
||||
if (!codeToType[baseCode] || !codeToType[incrCode]) {
|
||||
throw new Error(`Invalid type code in Δelem: ${typeByte}`);
|
||||
}
|
||||
ΔelemTypes.push({ baseType: codeToType[baseCode], incrType: codeToType[incrCode] });
|
||||
}
|
||||
const elemTypes = [];
|
||||
for (let i = 0; i < elemCount; i++) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete elem types');
|
||||
const typeCode = view.getUint8(offset++);
|
||||
if (!codeToType[typeCode]) throw new Error(`Invalid type code in elem: ${typeCode}`);
|
||||
elemTypes.push(codeToType[typeCode]);
|
||||
}
|
||||
|
||||
// Initialize Δelems and elems arrays if first chunk
|
||||
if (!result.Δelems.length && ΔelemCount > 0) {
|
||||
result.Δelems = Array(ΔelemCount).fill().map(() => []);
|
||||
}
|
||||
if (!result.elems.length && elemCount > 0) {
|
||||
result.elems = Array(elemCount).fill().map(() => []);
|
||||
}
|
||||
|
||||
// Read initial values for Δelems
|
||||
const initialValues = [];
|
||||
for (const { baseType } of ΔelemTypes) {
|
||||
if (offset + typeToBytes[baseType.name] > bundleEnd) {
|
||||
throw new Error('Incomplete initial values');
|
||||
}
|
||||
initialValues.push(readTypedValue(view, offset, baseType));
|
||||
offset += typeToBytes[baseType.name];
|
||||
}
|
||||
// Skip padding
|
||||
while (offset % 4 !== 0) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete padding after initial values');
|
||||
offset++;
|
||||
}
|
||||
|
||||
// Reconstruct j values
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
result.j.push(j0 + idx * Δj);
|
||||
}
|
||||
|
||||
// Read record data (non-interleaved)
|
||||
for (let i = 0; i < ΔelemCount; i++) {
|
||||
let current = initialValues[i];
|
||||
const values = result.Δelems[i];
|
||||
const incrType = ΔelemTypes[i].incrType;
|
||||
const isBigInt = typeof current === 'bigint';
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
if (offset + typeToBytes[incrType.name] > bundleEnd) {
|
||||
throw new Error('Incomplete Δelem data');
|
||||
}
|
||||
let delta = readTypedValue(view, offset, incrType);
|
||||
if (idx === 0) {
|
||||
values.push(isBigInt ? Number(current) : current);
|
||||
} else {
|
||||
if (isBigInt) {
|
||||
delta = BigInt(delta);
|
||||
current += delta;
|
||||
values.push(Number(current));
|
||||
} else {
|
||||
current += delta;
|
||||
values.push(current);
|
||||
}
|
||||
}
|
||||
offset += typeToBytes[incrType.name];
|
||||
}
|
||||
}
|
||||
for (let i = 0; i < elemCount; i++) {
|
||||
const values = result.elems[i];
|
||||
const type = elemTypes[i];
|
||||
const isBigInt = type === BigInt64Array || type === BigUint64Array;
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
if (offset + typeToBytes[type.name] > bundleEnd) {
|
||||
throw new Error('Incomplete elem data');
|
||||
}
|
||||
let value = readTypedValue(view, offset, type);
|
||||
values.push(isBigInt ? Number(value) : value);
|
||||
offset += typeToBytes[type.name];
|
||||
}
|
||||
}
|
||||
// Skip padding
|
||||
while (offset % 4 !== 0) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete padding after record data');
|
||||
offset++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
function interleaved(binary) {
|
||||
if (!(binary instanceof Uint8Array) || binary.length < 4) {
|
||||
throw new Error('Invalid binary input');
|
||||
}
|
||||
|
||||
const view = new DataView(binary.buffer, binary.byteOffset, binary.byteLength);
|
||||
let offset = 0;
|
||||
|
||||
// Initialize result (assuming single i value for simplicity; extend for multiple i values if needed)
|
||||
const result = { i: null, j: [], Δelems: [], elems: [] };
|
||||
|
||||
// Process bundles
|
||||
while (offset < binary.length) {
|
||||
// Read bundle header
|
||||
if (offset + 4 > binary.length) throw new Error('Incomplete bundle header');
|
||||
|
||||
const bundleHeader = view.getUint32(offset, true);
|
||||
if ((bundleHeader & 0xFF) !== 0x1C) throw new Error('Invalid bundle marker');
|
||||
const bundleLength = bundleHeader >> 8;
|
||||
offset += 4;
|
||||
const bundleEnd = offset + bundleLength;
|
||||
|
||||
if (bundleEnd > binary.length) throw new Error('Bundle length exceeds input size');
|
||||
|
||||
// Process chunks in bundle
|
||||
while (offset < bundleEnd) {
|
||||
// Read chunk header
|
||||
if (offset + 12 > bundleEnd) throw new Error('Incomplete chunk header');
|
||||
const chunkType = view.getUint8(offset);
|
||||
if (chunkType !== 0x12) throw new Error(`Unsupported chunk type: ${chunkType}`);
|
||||
offset += 1; // Skip udv
|
||||
offset += 1;
|
||||
const count = view.getUint16(offset, true); offset += 2;
|
||||
if (count > 65535) throw new Error('Chunk count exceeds 65535');
|
||||
const iValue = view.getUint16(offset, true); offset += 2;
|
||||
const j0 = view.getUint16(offset, true); offset += 2;
|
||||
const Δj = view.getInt16(offset, true); offset += 2;
|
||||
const ΔelemCount = view.getUint8(offset++); // Δelem_count
|
||||
const elemCount = view.getUint8(offset++); // elem_count
|
||||
|
||||
// Set i value (assuming all chunks share the same i)
|
||||
if (result.i === null) result.i = iValue;
|
||||
else if (result.i !== iValue) throw new Error('Multiple i values not supported');
|
||||
|
||||
// Read preface (element types)
|
||||
const ΔelemTypes = [];
|
||||
for (let i = 0; i < ΔelemCount; i++) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete Δelem types');
|
||||
const typeByte = view.getUint8(offset++);
|
||||
const baseCode = typeByte & 0x0F;
|
||||
const incrCode = typeByte >> 4;
|
||||
if (!codeToType[baseCode] || !codeToType[incrCode]) {
|
||||
throw new Error(`Invalid type code in Δelem: ${typeByte}`);
|
||||
}
|
||||
ΔelemTypes.push({ baseType: codeToType[baseCode], incrType: codeToType[incrCode] });
|
||||
}
|
||||
const elemTypes = [];
|
||||
for (let i = 0; i < elemCount; i++) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete elem types');
|
||||
const typeCode = view.getUint8(offset++);
|
||||
if (!codeToType[typeCode]) throw new Error(`Invalid type code in elem: ${typeCode}`);
|
||||
elemTypes.push(codeToType[typeCode]);
|
||||
}
|
||||
|
||||
// Initialize Δelems and elems arrays if first chunk
|
||||
if (!result.Δelems.length && ΔelemCount > 0) {
|
||||
result.Δelems = Array(ΔelemCount).fill().map(() => []);
|
||||
}
|
||||
if (!result.elems.length && elemCount > 0) {
|
||||
result.elems = Array(elemCount).fill().map(() => []);
|
||||
}
|
||||
|
||||
// Read initial values for Δelems
|
||||
const initialValues = [];
|
||||
for (const { baseType } of ΔelemTypes) {
|
||||
if (offset + typeToBytes[baseType.name] > bundleEnd) {
|
||||
throw new Error('Incomplete initial values');
|
||||
}
|
||||
initialValues.push(readTypedValue(view, offset, baseType));
|
||||
offset += typeToBytes[baseType.name];
|
||||
}
|
||||
// Skip padding
|
||||
while (offset % 4 !== 0) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete padding after initial values');
|
||||
offset++;
|
||||
}
|
||||
|
||||
// Reconstruct j values
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
result.j.push(j0 + idx * Δj);
|
||||
}
|
||||
|
||||
// Read interleaved record data
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
// Read Δelems
|
||||
for (let i = 0; i < ΔelemCount; i++) {
|
||||
const values = result.Δelems[i];
|
||||
const incrType = ΔelemTypes[i].incrType;
|
||||
const isBigInt = typeof initialValues[i] === 'bigint';
|
||||
if (offset + typeToBytes[incrType.name] > bundleEnd) {
|
||||
throw new Error('Incomplete Δelem data');
|
||||
}
|
||||
let delta = readTypedValue(view, offset, incrType);
|
||||
offset += typeToBytes[incrType.name];
|
||||
if (idx === 0) {
|
||||
values.push(isBigInt ? Number(initialValues[i]) : initialValues[i]);
|
||||
} else {
|
||||
if (isBigInt) {
|
||||
delta = BigInt(delta);
|
||||
initialValues[i] += delta;
|
||||
values.push(Number(initialValues[i]));
|
||||
} else {
|
||||
initialValues[i] += delta;
|
||||
values.push(initialValues[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Read elems
|
||||
for (let i = 0; i < elemCount; i++) {
|
||||
const values = result.elems[i];
|
||||
const type = elemTypes[i];
|
||||
const isBigInt = type === BigInt64Array || type === BigUint64Array;
|
||||
if (offset + typeToBytes[type.name] > bundleEnd) {
|
||||
throw new Error('Incomplete elem data');
|
||||
}
|
||||
let value = readTypedValue(view, offset, type);
|
||||
values.push(isBigInt ? Number(value) : value);
|
||||
offset += typeToBytes[type.name];
|
||||
}
|
||||
}
|
||||
// Skip padding
|
||||
while (offset % 4 !== 0) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete padding after record data');
|
||||
offset++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function readTypedValue(view, offset, type) {
|
||||
switch (type) {
|
||||
case Int8Array: return view.getInt8(offset);
|
||||
case Uint8Array: return view.getUint8(offset);
|
||||
case Int16Array: return view.getInt16(offset, true);
|
||||
case Uint16Array: return view.getUint16(offset, true);
|
||||
case Int32Array: return view.getInt32(offset, true);
|
||||
case Uint32Array: return view.getUint32(offset, true);
|
||||
case Float32Array: return view.getFloat32(offset, true);
|
||||
case Float64Array: return view.getFloat64(offset, true);
|
||||
case BigInt64Array: return view.getBigInt64(offset, true);
|
||||
case BigUint64Array: return view.getBigUint64(offset, true);
|
||||
default: throw new Error(`Unsupported type: ${type.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { sequential, interleaved };
|
||||
380
lib/modules/@dougal/binary/encode.js
Normal file
380
lib/modules/@dougal/binary/encode.js
Normal file
@@ -0,0 +1,380 @@
|
||||
const typeToCode = {
|
||||
Int8Array: 0,
|
||||
Uint8Array: 1,
|
||||
Int16Array: 2,
|
||||
Uint16Array: 3,
|
||||
Int32Array: 4,
|
||||
Uint32Array: 5,
|
||||
Float32Array: 7, // Float16 not natively supported in JS, use Float32
|
||||
Float64Array: 8,
|
||||
BigInt64Array: 9,
|
||||
BigUint64Array: 10
|
||||
};
|
||||
|
||||
const typeToBytes = {
|
||||
Int8Array: 1,
|
||||
Uint8Array: 1,
|
||||
Int16Array: 2,
|
||||
Uint16Array: 2,
|
||||
Int32Array: 4,
|
||||
Uint32Array: 4,
|
||||
Float32Array: 4,
|
||||
Float64Array: 8,
|
||||
BigInt64Array: 8,
|
||||
BigUint64Array: 8
|
||||
};
|
||||
|
||||
function sequential(json, iGetter, jGetter, Δelems = [], elems = [], udv = 0) {
|
||||
if (!Array.isArray(json) || !json.length) return new Uint8Array(0);
|
||||
if (typeof iGetter !== 'function' || typeof jGetter !== 'function') throw new Error('i and j must be getter functions');
|
||||
Δelems.forEach((elem, idx) => {
|
||||
if (typeof elem.key !== 'function') throw new Error(`Δelems[${idx}].key must be a getter function`);
|
||||
});
|
||||
elems.forEach((elem, idx) => {
|
||||
if (typeof elem.key !== 'function') throw new Error(`elems[${idx}].key must be a getter function`);
|
||||
});
|
||||
|
||||
// Group records by i value
|
||||
const groups = new Map();
|
||||
for (const record of json) {
|
||||
const iValue = iGetter(record);
|
||||
if (iValue == null) throw new Error('Missing i value from getter');
|
||||
if (!groups.has(iValue)) groups.set(iValue, []);
|
||||
groups.get(iValue).push(record);
|
||||
}
|
||||
|
||||
const maxBundleSize = 0xFFFFFF; // Max bundle length (24 bits)
|
||||
const buffers = [];
|
||||
|
||||
// Process each group (i value)
|
||||
for (const [iValue, records] of groups) {
|
||||
// Sort records by j to ensure consistent order
|
||||
records.sort((a, b) => jGetter(a) - jGetter(b));
|
||||
const jValues = records.map(jGetter);
|
||||
if (jValues.some(v => v == null)) throw new Error('Missing j value from getter');
|
||||
|
||||
// Split records into chunks based on Δj continuity
|
||||
const chunks = [];
|
||||
let currentChunk = [records[0]];
|
||||
let currentJ0 = jValues[0];
|
||||
let currentΔj = records.length > 1 ? jValues[1] - jValues[0] : 0;
|
||||
|
||||
for (let idx = 1; idx < records.length; idx++) {
|
||||
const chunkIndex = chunks.reduce((sum, c) => sum + c.records.length, 0);
|
||||
const expectedJ = currentJ0 + (idx - chunkIndex) * currentΔj;
|
||||
if (jValues[idx] !== expectedJ || idx - chunkIndex >= 65536) {
|
||||
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
|
||||
currentChunk = [records[idx]];
|
||||
currentJ0 = jValues[idx];
|
||||
currentΔj = idx + 1 < records.length ? jValues[idx + 1] - jValues[idx] : 0;
|
||||
} else {
|
||||
currentChunk.push(records[idx]);
|
||||
}
|
||||
}
|
||||
if (currentChunk.length > 0) {
|
||||
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
|
||||
}
|
||||
|
||||
// Calculate total size for all chunks in this group by simulating offsets
|
||||
const chunkSizes = chunks.map(({ records: chunkRecords }) => {
|
||||
if (chunkRecords.length > 65535) throw new Error(`Chunk size exceeds 65535 for i=${iValue}`);
|
||||
let simulatedOffset = 0; // Relative to chunk start
|
||||
simulatedOffset += 12; // Header
|
||||
simulatedOffset += Δelems.length + elems.length; // Preface
|
||||
simulatedOffset += Δelems.reduce((sum, e) => sum + typeToBytes[e.baseType.name], 0); // Initial values
|
||||
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after initial
|
||||
simulatedOffset += chunkRecords.length * (
|
||||
Δelems.reduce((sum, e) => sum + typeToBytes[e.incrType.name], 0) +
|
||||
elems.reduce((sum, e) => sum + typeToBytes[e.type.name], 0)
|
||||
); // Record data
|
||||
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after record
|
||||
return simulatedOffset;
|
||||
});
|
||||
const totalChunkSize = chunkSizes.reduce((sum, size) => sum + size, 0);
|
||||
|
||||
// Start a new bundle if needed
|
||||
const lastBundle = buffers[buffers.length - 1];
|
||||
if (!lastBundle || lastBundle.offset + totalChunkSize > maxBundleSize) {
|
||||
buffers.push({ offset: 4, buffer: null, view: null });
|
||||
}
|
||||
|
||||
// Initialize DataView for current bundle
|
||||
const currentBundle = buffers[buffers.length - 1];
|
||||
if (!currentBundle.buffer) {
|
||||
const requiredSize = totalChunkSize + 4;
|
||||
currentBundle.buffer = new ArrayBuffer(requiredSize);
|
||||
currentBundle.view = new DataView(currentBundle.buffer);
|
||||
}
|
||||
|
||||
// Process each chunk
|
||||
for (const { records: chunkRecords, j0, Δj } of chunks) {
|
||||
const chunkSize = chunkSizes.shift();
|
||||
|
||||
// Ensure buffer is large enough
|
||||
if (currentBundle.offset + chunkSize > currentBundle.buffer.byteLength) {
|
||||
const newSize = currentBundle.offset + chunkSize;
|
||||
const newBuffer = new ArrayBuffer(newSize);
|
||||
new Uint8Array(newBuffer).set(new Uint8Array(currentBundle.buffer));
|
||||
currentBundle.buffer = newBuffer;
|
||||
currentBundle.view = new DataView(newBuffer);
|
||||
}
|
||||
|
||||
// Write chunk header
|
||||
let offset = currentBundle.offset;
|
||||
currentBundle.view.setUint8(offset++, 0x11); // Chunk type
|
||||
currentBundle.view.setUint8(offset++, udv); // udv
|
||||
currentBundle.view.setUint16(offset, chunkRecords.length, true); offset += 2; // count
|
||||
currentBundle.view.setUint16(offset, iValue, true); offset += 2; // i
|
||||
currentBundle.view.setUint16(offset, j0, true); offset += 2; // j0
|
||||
currentBundle.view.setInt16(offset, Δj, true); offset += 2; // Δj
|
||||
currentBundle.view.setUint8(offset++, Δelems.length); // Δelem_count
|
||||
currentBundle.view.setUint8(offset++, elems.length); // elem_count
|
||||
|
||||
// Write chunk preface (element types)
|
||||
for (const elem of Δelems) {
|
||||
const baseCode = typeToCode[elem.baseType.name];
|
||||
const incrCode = typeToCode[elem.incrType.name];
|
||||
currentBundle.view.setUint8(offset++, (incrCode << 4) | baseCode);
|
||||
}
|
||||
for (const elem of elems) {
|
||||
currentBundle.view.setUint8(offset++, typeToCode[elem.type.name]);
|
||||
}
|
||||
|
||||
// Write initial values for Δelems
|
||||
for (const elem of Δelems) {
|
||||
const value = elem.key(chunkRecords[0]);
|
||||
if (value == null) throw new Error('Missing Δelem value from getter');
|
||||
writeTypedValue(currentBundle.view, offset, value, elem.baseType);
|
||||
offset += typeToBytes[elem.baseType.name];
|
||||
}
|
||||
// Pad to 4-byte boundary
|
||||
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
|
||||
|
||||
// Write record data (non-interleaved)
|
||||
for (const elem of Δelems) {
|
||||
let prev = elem.key(chunkRecords[0]);
|
||||
for (let idx = 0; idx < chunkRecords.length; idx++) {
|
||||
const value = idx === 0 ? 0 : elem.key(chunkRecords[idx]) - prev;
|
||||
writeTypedValue(currentBundle.view, offset, value, elem.incrType);
|
||||
offset += typeToBytes[elem.incrType.name];
|
||||
prev = elem.key(chunkRecords[idx]);
|
||||
}
|
||||
}
|
||||
for (const elem of elems) {
|
||||
for (const record of chunkRecords) {
|
||||
const value = elem.key(record);
|
||||
if (value == null) throw new Error('Missing elem value from getter');
|
||||
writeTypedValue(currentBundle.view, offset, value, elem.type);
|
||||
offset += typeToBytes[elem.type.name];
|
||||
}
|
||||
}
|
||||
// Pad to 4-byte boundary
|
||||
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
|
||||
|
||||
// Update bundle offset
|
||||
currentBundle.offset = offset;
|
||||
}
|
||||
|
||||
// Update bundle header
|
||||
currentBundle.view.setUint32(0, 0x1C | ((currentBundle.offset - 4) << 8), true);
|
||||
}
|
||||
|
||||
// Combine buffers into final Uint8Array
|
||||
const finalLength = buffers.reduce((sum, b) => sum + b.offset, 0);
|
||||
const result = new Uint8Array(finalLength);
|
||||
let offset = 0;
|
||||
for (const { buffer, offset: bundleOffset } of buffers) {
|
||||
result.set(new Uint8Array(buffer, 0, bundleOffset), offset);
|
||||
offset += bundleOffset;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
function interleaved(json, iGetter, jGetter, Δelems = [], elems = [], udv = 0) {
|
||||
if (!Array.isArray(json) || !json.length) return new Uint8Array(0);
|
||||
if (typeof iGetter !== 'function' || typeof jGetter !== 'function') throw new Error('i and j must be getter functions');
|
||||
Δelems.forEach((elem, idx) => {
|
||||
if (typeof elem.key !== 'function') throw new Error(`Δelems[${idx}].key must be a getter function`);
|
||||
});
|
||||
elems.forEach((elem, idx) => {
|
||||
if (typeof elem.key !== 'function') throw new Error(`elems[${idx}].key must be a getter function`);
|
||||
});
|
||||
|
||||
// Group records by i value
|
||||
const groups = new Map();
|
||||
for (const record of json) {
|
||||
const iValue = iGetter(record);
|
||||
if (iValue == null) throw new Error('Missing i value from getter');
|
||||
if (!groups.has(iValue)) groups.set(iValue, []);
|
||||
groups.get(iValue).push(record);
|
||||
}
|
||||
|
||||
const maxBundleSize = 0xFFFFFF; // Max bundle length (24 bits)
|
||||
const buffers = [];
|
||||
|
||||
// Process each group (i value)
|
||||
for (const [iValue, records] of groups) {
|
||||
// Sort records by j to ensure consistent order
|
||||
records.sort((a, b) => jGetter(a) - jGetter(b));
|
||||
const jValues = records.map(jGetter);
|
||||
if (jValues.some(v => v == null)) throw new Error('Missing j value from getter');
|
||||
|
||||
// Split records into chunks based on Δj continuity
|
||||
const chunks = [];
|
||||
let currentChunk = [records[0]];
|
||||
let currentJ0 = jValues[0];
|
||||
let currentΔj = records.length > 1 ? jValues[1] - jValues[0] : 0;
|
||||
|
||||
for (let idx = 1; idx < records.length; idx++) {
|
||||
const chunkIndex = chunks.reduce((sum, c) => sum + c.records.length, 0);
|
||||
const expectedJ = currentJ0 + (idx - chunkIndex) * currentΔj;
|
||||
if (jValues[idx] !== expectedJ || idx - chunkIndex >= 65536) {
|
||||
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
|
||||
currentChunk = [records[idx]];
|
||||
currentJ0 = jValues[idx];
|
||||
currentΔj = idx + 1 < records.length ? jValues[idx + 1] - jValues[idx] : 0;
|
||||
} else {
|
||||
currentChunk.push(records[idx]);
|
||||
}
|
||||
}
|
||||
if (currentChunk.length > 0) {
|
||||
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
|
||||
}
|
||||
|
||||
// Calculate total size for all chunks in this group by simulating offsets
|
||||
const chunkSizes = chunks.map(({ records: chunkRecords }) => {
|
||||
if (chunkRecords.length > 65535) throw new Error(`Chunk size exceeds 65535 for i=${iValue}`);
|
||||
let simulatedOffset = 0; // Relative to chunk start
|
||||
simulatedOffset += 12; // Header
|
||||
simulatedOffset += Δelems.length + elems.length; // Preface
|
||||
simulatedOffset += Δelems.reduce((sum, e) => sum + typeToBytes[e.baseType.name], 0); // Initial values
|
||||
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after initial
|
||||
simulatedOffset += chunkRecords.length * (
|
||||
Δelems.reduce((sum, e) => sum + typeToBytes[e.incrType.name], 0) +
|
||||
elems.reduce((sum, e) => sum + typeToBytes[e.type.name], 0)
|
||||
); // Interleaved record data
|
||||
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after record
|
||||
return simulatedOffset;
|
||||
});
|
||||
const totalChunkSize = chunkSizes.reduce((sum, size) => sum + size, 0);
|
||||
|
||||
// Start a new bundle if needed
|
||||
const lastBundle = buffers[buffers.length - 1];
|
||||
if (!lastBundle || lastBundle.offset + totalChunkSize > maxBundleSize) {
|
||||
buffers.push({ offset: 4, buffer: null, view: null });
|
||||
}
|
||||
|
||||
// Initialize DataView for current bundle
|
||||
const currentBundle = buffers[buffers.length - 1];
|
||||
if (!currentBundle.buffer) {
|
||||
const requiredSize = totalChunkSize + 4;
|
||||
currentBundle.buffer = new ArrayBuffer(requiredSize);
|
||||
currentBundle.view = new DataView(currentBundle.buffer);
|
||||
}
|
||||
|
||||
// Process each chunk
|
||||
for (const { records: chunkRecords, j0, Δj } of chunks) {
|
||||
const chunkSize = chunkSizes.shift();
|
||||
|
||||
// Ensure buffer is large enough
|
||||
if (currentBundle.offset + chunkSize > currentBundle.buffer.byteLength) {
|
||||
const newSize = currentBundle.offset + chunkSize;
|
||||
const newBuffer = new ArrayBuffer(newSize);
|
||||
new Uint8Array(newBuffer).set(new Uint8Array(currentBundle.buffer));
|
||||
currentBundle.buffer = newBuffer;
|
||||
currentBundle.view = new DataView(newBuffer);
|
||||
}
|
||||
|
||||
// Write chunk header
|
||||
let offset = currentBundle.offset;
|
||||
currentBundle.view.setUint8(offset++, 0x12); // Chunk type
|
||||
currentBundle.view.setUint8(offset++, udv); // udv
|
||||
currentBundle.view.setUint16(offset, chunkRecords.length, true); offset += 2; // count
|
||||
currentBundle.view.setUint16(offset, iValue, true); offset += 2; // i
|
||||
currentBundle.view.setUint16(offset, j0, true); offset += 2; // j0
|
||||
currentBundle.view.setInt16(offset, Δj, true); offset += 2; // Δj
|
||||
currentBundle.view.setUint8(offset++, Δelems.length); // Δelem_count
|
||||
currentBundle.view.setUint8(offset++, elems.length); // elem_count
|
||||
|
||||
// Write chunk preface (element types)
|
||||
for (const elem of Δelems) {
|
||||
const baseCode = typeToCode[elem.baseType.name];
|
||||
const incrCode = typeToCode[elem.incrType.name];
|
||||
currentBundle.view.setUint8(offset++, (incrCode << 4) | baseCode);
|
||||
}
|
||||
for (const elem of elems) {
|
||||
currentBundle.view.setUint8(offset++, typeToCode[elem.type.name]);
|
||||
}
|
||||
|
||||
// Write initial values for Δelems
|
||||
for (const elem of Δelems) {
|
||||
const value = elem.key(chunkRecords[0]);
|
||||
if (value == null) throw new Error('Missing Δelem value from getter');
|
||||
writeTypedValue(currentBundle.view, offset, value, elem.baseType);
|
||||
offset += typeToBytes[elem.baseType.name];
|
||||
}
|
||||
// Pad to 4-byte boundary
|
||||
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
|
||||
|
||||
// Write interleaved record data
|
||||
const prevValues = Δelems.map(elem => elem.key(chunkRecords[0]));
|
||||
for (let idx = 0; idx < chunkRecords.length; idx++) {
|
||||
// Write Δelems increments
|
||||
for (let i = 0; i < Δelems.length; i++) {
|
||||
const elem = Δelems[i];
|
||||
const value = idx === 0 ? 0 : elem.key(chunkRecords[idx]) - prevValues[i];
|
||||
writeTypedValue(currentBundle.view, offset, value, elem.incrType);
|
||||
offset += typeToBytes[elem.incrType.name];
|
||||
prevValues[i] = elem.key(chunkRecords[idx]);
|
||||
}
|
||||
// Write elems
|
||||
for (const elem of elems) {
|
||||
const value = elem.key(chunkRecords[idx]);
|
||||
if (value == null) throw new Error('Missing elem value from getter');
|
||||
writeTypedValue(currentBundle.view, offset, value, elem.type);
|
||||
offset += typeToBytes[elem.type.name];
|
||||
}
|
||||
}
|
||||
// Pad to 4-byte boundary
|
||||
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
|
||||
|
||||
// Update bundle offset
|
||||
currentBundle.offset = offset;
|
||||
}
|
||||
|
||||
// Update bundle header
|
||||
currentBundle.view.setUint32(0, 0x1C | ((currentBundle.offset - 4) << 8), true);
|
||||
}
|
||||
|
||||
// Combine buffers into final Uint8Array
|
||||
const finalLength = buffers.reduce((sum, b) => sum + b.offset, 0);
|
||||
const result = new Uint8Array(finalLength);
|
||||
let offset = 0;
|
||||
for (const { buffer, offset: bundleOffset } of buffers) {
|
||||
result.set(new Uint8Array(buffer, 0, bundleOffset), offset);
|
||||
offset += bundleOffset;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
function writeTypedValue(view, offset, value, type) {
|
||||
switch (type) {
|
||||
case Int8Array: view.setInt8(offset, value); break;
|
||||
case Uint8Array: view.setUint8(offset, value); break;
|
||||
case Int16Array: view.setInt16(offset, value, true); break;
|
||||
case Uint16Array: view.setUint16(offset, value, true); break;
|
||||
case Int32Array: view.setInt32(offset, value, true); break;
|
||||
case Uint32Array: view.setUint32(offset, value, true); break;
|
||||
case Float32Array: view.setFloat32(offset, value, true); break;
|
||||
case Float64Array: view.setFloat64(offset, value, true); break;
|
||||
case BigInt64Array: view.setBigInt64(offset, BigInt(value), true); break;
|
||||
case BigUint64Array: view.setBigUint64(offset, BigInt(value), true); break;
|
||||
default: throw new Error(`Unsupported type: ${type.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { sequential, interleaved };
|
||||
139
lib/modules/@dougal/binary/index.js
Normal file
139
lib/modules/@dougal/binary/index.js
Normal file
@@ -0,0 +1,139 @@
|
||||
|
||||
/** Binary encoder
|
||||
*
|
||||
* This module encodes scalar data from a grid-like source
|
||||
* into a packed binary format for bandwidth efficiency and
|
||||
* speed of access.
|
||||
*
|
||||
* Data are indexed by i & j values, with "i" being constant
|
||||
* (e.g., a sequence or line number) and "j" expected to change
|
||||
* by a constant, linear amount (e.g., point numbers). All data
|
||||
* from consecutive "j" values will be encoded as a single array
|
||||
* (or series of arrays if multiple values are encoded).
|
||||
* If there is a jump in the "j" progression, a new "chunk" will
|
||||
* be started with a new array (or series of arrays).
|
||||
*
|
||||
* Multiple values may be encoded per (i, j) pair, using any of
|
||||
* the types supported by JavaScript's TypedArray except for
|
||||
* Float16 and Uint8Clamped. Each variable can be encoded with
|
||||
* a different size.
|
||||
*
|
||||
* Values may be encoded directly or as deltas from an initial
|
||||
* value. The latter is particularly efficient when dealing with
|
||||
* monotonically incrementing data, such as timestamps.
|
||||
*
|
||||
* The conceptual packet format for sequentially encoded data
|
||||
* looks like this:
|
||||
*
|
||||
* <msg-type> <count: x> <i> <j0> <Δj>
|
||||
*
|
||||
* <Δelement_count: y>
|
||||
* <element_count: z>
|
||||
*
|
||||
* <Δelement_1_type_base> … <Δelement_y_type_base>
|
||||
* <Δelement_1_type_incr> … <Δelement_y_type_incr>
|
||||
* <elem_1_type> … <elem_z_type>
|
||||
*
|
||||
* <Δelement_1_first> … <Δelement_z_first>
|
||||
*
|
||||
* <Δelem_1_0> … <Δelem_1_x>
|
||||
* …
|
||||
* <Δelem_y_0> … <Δelem_y_x>
|
||||
* <elem_1_0> … <elem_1_x>
|
||||
* …
|
||||
* <elem_z_0> … <elem_z_x>
|
||||
*
|
||||
*
|
||||
* The conceptual packet format for interleaved encoded data
|
||||
* looks like this:
|
||||
*
|
||||
*
|
||||
* <msg-type> <count: x> <i> <j0> <Δj>
|
||||
*
|
||||
* <Δelement_count: y>
|
||||
* <element_count: z>
|
||||
*
|
||||
* <Δelement_1_type_base> … <Δelement_y_type_base>
|
||||
* <Δelement_1_type_incr> … <Δelement_y_type_incr>
|
||||
* <elem_1_type> … <elem_z_type>
|
||||
*
|
||||
* <Δelement_1_first> … <Δelement_y_first>
|
||||
*
|
||||
* <Δelem_1_0> <Δelem_2_0> … <Δelem_y_0> <elem_1_0> <elem_2_0> … <elem_z_0>
|
||||
* <Δelem_1_1> <Δelem_2_1> … <Δelem_y_1> <elem_1_1> <elem_2_1> … <elem_z_1>
|
||||
* …
|
||||
* <Δelem_1_x> <Δelem_2_x> … <Δelem_y_x> <elem_1_x> <elem_2_x> … <elem_z_x>
|
||||
*
|
||||
*
|
||||
* Usage example:
|
||||
*
|
||||
* json = [
|
||||
* {
|
||||
* sequence: 7,
|
||||
* sailline: 5354,
|
||||
* line: 5356,
|
||||
* point: 1068,
|
||||
* tstamp: 1695448704372,
|
||||
* objrefraw: 3,
|
||||
* objreffinal: 4
|
||||
* },
|
||||
* {
|
||||
* sequence: 7,
|
||||
* sailline: 5354,
|
||||
* line: 5352,
|
||||
* point: 1070,
|
||||
* tstamp: 1695448693612,
|
||||
* objrefraw: 2,
|
||||
* objreffinal: 3
|
||||
* },
|
||||
* {
|
||||
* sequence: 7,
|
||||
* sailline: 5354,
|
||||
* line: 5356,
|
||||
* point: 1072,
|
||||
* tstamp: 1695448684624,
|
||||
* objrefraw: 3,
|
||||
* objreffinal: 4
|
||||
* }
|
||||
* ];
|
||||
*
|
||||
* deltas = [
|
||||
* { key: el => el.tstamp, baseType: BigUint64Array, incrType: Int16Array }
|
||||
* ];
|
||||
*
|
||||
* elems = [
|
||||
* { key: el => el.objrefraw, type: Uint8Array },
|
||||
* { key: el => el.objreffinal, type: Uint8Array }
|
||||
* ];
|
||||
*
|
||||
* i = el => el.sequence;
|
||||
*
|
||||
* j = el => el.point;
|
||||
*
|
||||
* bundle = encode(json, i, j, deltas, elems);
|
||||
*
|
||||
* // bundle:
|
||||
*
|
||||
* Uint8Array(40) [
|
||||
* 36, 0, 0, 28, 17, 0, 3, 0, 7, 0,
|
||||
* 44, 4, 2, 0, 1, 2, 42, 1, 1, 116,
|
||||
* 37, 158, 192, 138, 1, 0, 0, 0, 0, 0,
|
||||
* 248, 213, 228, 220, 3, 2, 3, 4, 3, 4
|
||||
* ]
|
||||
*
|
||||
* decode(bundle);
|
||||
*
|
||||
* {
|
||||
* i: 7,
|
||||
* j: [ 1068, 1070, 1072 ],
|
||||
* 'Δelems': [ [ 1695448704372, 1695448693612, 1695448684624 ] ],
|
||||
* elems: [ [ 3, 2, 3 ], [ 4, 3, 4 ] ]
|
||||
* }
|
||||
*
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
encode: {...require('./encode')},
|
||||
decode: {...require('./decode')},
|
||||
...require('./classes')
|
||||
};
|
||||
12
lib/modules/@dougal/binary/package.json
Normal file
12
lib/modules/@dougal/binary/package.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "@dougal/binary",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": ""
|
||||
}
|
||||
25
lib/modules/@dougal/concurrency/index.js
Normal file
25
lib/modules/@dougal/concurrency/index.js
Normal file
@@ -0,0 +1,25 @@
|
||||
class ConcurrencyLimiter {
|
||||
|
||||
constructor(maxConcurrent) {
|
||||
this.maxConcurrent = maxConcurrent;
|
||||
this.active = 0;
|
||||
this.queue = [];
|
||||
}
|
||||
|
||||
async enqueue(task) {
|
||||
if (this.active >= this.maxConcurrent) {
|
||||
await new Promise(resolve => this.queue.push(resolve));
|
||||
}
|
||||
this.active++;
|
||||
try {
|
||||
return await task();
|
||||
} finally {
|
||||
this.active--;
|
||||
if (this.queue.length > 0) {
|
||||
this.queue.shift()();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ConcurrencyLimiter;
|
||||
12
lib/modules/@dougal/concurrency/package.json
Normal file
12
lib/modules/@dougal/concurrency/package.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "@dougal/concurrency",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": ""
|
||||
}
|
||||
75
lib/modules/@dougal/organisations/Organisation.js
Normal file
75
lib/modules/@dougal/organisations/Organisation.js
Normal file
@@ -0,0 +1,75 @@
|
||||
|
||||
class Organisation {
|
||||
|
||||
constructor (data) {
|
||||
|
||||
this.read = !!data?.read;
|
||||
this.write = !!data?.write;
|
||||
this.edit = !!data?.edit;
|
||||
|
||||
this.other = {};
|
||||
|
||||
return new Proxy(this, {
|
||||
get (target, prop) {
|
||||
if (prop in target) {
|
||||
return target[prop]
|
||||
} else {
|
||||
return target.other[prop];
|
||||
}
|
||||
},
|
||||
|
||||
set (target, prop, value) {
|
||||
const oldValue = target[prop] !== undefined ? target[prop] : target.other[prop];
|
||||
const newValue = Boolean(value);
|
||||
|
||||
if (["read", "write", "edit"].includes(prop)) {
|
||||
target[prop] = newValue;
|
||||
} else {
|
||||
target.other[prop] = newValue;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
toJSON () {
|
||||
return {
|
||||
read: this.read,
|
||||
write: this.write,
|
||||
edit: this.edit,
|
||||
...this.other
|
||||
}
|
||||
}
|
||||
|
||||
toString (replacer, space) {
|
||||
return JSON.stringify(this.toJSON(), replacer, space);
|
||||
}
|
||||
|
||||
/** Limit the operations to only those allowed by `other`
|
||||
*/
|
||||
filter (other) {
|
||||
const filteredOrganisation = new Organisation();
|
||||
|
||||
filteredOrganisation.read = this.read && other.read;
|
||||
filteredOrganisation.write = this.write && other.write;
|
||||
filteredOrganisation.edit = this.edit && other.edit;
|
||||
|
||||
return filteredOrganisation;
|
||||
}
|
||||
|
||||
intersect (other) {
|
||||
return this.filter(other);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
if (typeof module !== 'undefined' && module.exports) {
|
||||
module.exports = Organisation; // CJS export
|
||||
}
|
||||
|
||||
// ESM export
|
||||
if (typeof exports !== 'undefined' && !exports.default) {
|
||||
exports.default = Organisation; // ESM export
|
||||
}
|
||||
225
lib/modules/@dougal/organisations/Organisations.js
Normal file
225
lib/modules/@dougal/organisations/Organisations.js
Normal file
@@ -0,0 +1,225 @@
|
||||
const Organisation = require('./Organisation');
|
||||
|
||||
class Organisations {
|
||||
|
||||
#values = {}
|
||||
|
||||
#overlord
|
||||
|
||||
static entries (orgs) {
|
||||
return orgs.names().map(name => [name, orgs.get(name)]);
|
||||
}
|
||||
|
||||
constructor (data, overlord) {
|
||||
if (data instanceof Organisations) {
|
||||
for (const [name, value] of Organisations.entries(data)) {
|
||||
this.set(name, new Organisation(value));
|
||||
}
|
||||
} else if (data instanceof Object) {
|
||||
for (const [name, value] of Object.entries(data)) {
|
||||
this.set(name, new Organisation(value));
|
||||
}
|
||||
} else if (data instanceof String) {
|
||||
this.set(data, new Organisation());
|
||||
} else if (typeof data !== "undefined") {
|
||||
throw new Error("Invalid constructor argument");
|
||||
}
|
||||
|
||||
if (overlord) {
|
||||
this.#overlord = overlord;
|
||||
}
|
||||
}
|
||||
|
||||
get values () {
|
||||
return this.#values;
|
||||
}
|
||||
|
||||
get length () {
|
||||
return this.names().length;
|
||||
}
|
||||
|
||||
get overlord () {
|
||||
return this.#overlord;
|
||||
}
|
||||
|
||||
set overlord (v) {
|
||||
this.#overlord = new Organisations(v);
|
||||
}
|
||||
|
||||
/** Get the operations for `name`
|
||||
*/
|
||||
get (name) {
|
||||
const key = Object.keys(this.values).find( k => k.toLowerCase() == name.toLowerCase() ) ?? name;
|
||||
return this.values[key];
|
||||
}
|
||||
|
||||
/** Set the operations for `name` to `value`
|
||||
*
|
||||
* If we have an overlord, ensure we cannot:
|
||||
*
|
||||
* 1. Add new organisations which the overlord
|
||||
* is not a member of
|
||||
* 2. Access operations that the overlord is not
|
||||
* allowed to access
|
||||
*/
|
||||
set (name, value) {
|
||||
name = String(name).trim();
|
||||
const key = Object.keys(this.values).find( k => k.toLowerCase() == name.toLowerCase() ) ?? name;
|
||||
const org = new Organisation(value);
|
||||
|
||||
if (this.overlord) {
|
||||
const parent = this.overlord.get(key) ?? this.overlord.get("*");
|
||||
if (parent) {
|
||||
this.values[key] = parent.filter(org);
|
||||
}
|
||||
} else {
|
||||
this.values[key] = new Organisation(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Enable the operation `op` in all organisations
|
||||
*/
|
||||
enableOperation (op) {
|
||||
if (this.overlord) {
|
||||
Object.keys(this.#values)
|
||||
.filter( key => (this.overlord.get(key) ?? this.overlord.get("*"))?.[op] )
|
||||
.forEach( key => this.#values[key][op] = true );
|
||||
} else {
|
||||
Object.values(this.#values).forEach( org => org[op] = true );
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Disable the operation `op` in all organisations
|
||||
*/
|
||||
disableOperation (op) {
|
||||
Object.values(this.#values).forEach( org => org[op] = false );
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Create a new organisation object limited by the caller's rights
|
||||
*
|
||||
* The spawned Organisations instance will have the same organisations
|
||||
* and rights as the caller minus the applied `mask`. With the default
|
||||
* mask, the spawned object will inherit all rights except for `edit`
|
||||
* rights.
|
||||
*
|
||||
* The "*" organisation must be explicitly assigned. It is not inherited.
|
||||
*/
|
||||
spawn (mask = {read: true, write: true, edit: false}) {
|
||||
|
||||
const parent = new Organisations();
|
||||
const wildcard = this.get("*").edit; // If true, we can spawn everywhere
|
||||
|
||||
this.entries().forEach( ([k, v]) => {
|
||||
// if (k != "*") { // This organisation is not inherited
|
||||
if (v.edit || wildcard) { // We have the right to spawn in this organisation
|
||||
const o = new Organisation({
|
||||
read: v.read && mask.read,
|
||||
write: v.write && mask.write,
|
||||
edit: v.edit && mask.edit
|
||||
});
|
||||
parent.set(k, o);
|
||||
}
|
||||
// }
|
||||
});
|
||||
|
||||
return new Organisations({}, parent);
|
||||
}
|
||||
|
||||
remove (name) {
|
||||
const key = Object.keys(this.values).find( k => k.toLowerCase() == name.toLowerCase() ) ?? name;
|
||||
delete this.values[key];
|
||||
}
|
||||
|
||||
/** Return the list of organisation names
|
||||
*/
|
||||
names () {
|
||||
return Object.keys(this.values);
|
||||
}
|
||||
|
||||
/** Same as this.get(name)
|
||||
*/
|
||||
value (name) {
|
||||
return this.values[name];
|
||||
}
|
||||
|
||||
/** Same as Object.prototype.entries
|
||||
*/
|
||||
entries () {
|
||||
return this.names().map( name => [ name, this.value(name) ] );
|
||||
}
|
||||
|
||||
/** Return true if the named organisation is present
|
||||
*/
|
||||
has (name) {
|
||||
return Boolean(this.value(name));
|
||||
}
|
||||
|
||||
/** Return only those of our organisations
|
||||
* and operations present in `other`
|
||||
*/
|
||||
filter (other) {
|
||||
const filteredOrganisations = new Organisations();
|
||||
|
||||
const wildcard = other.value("*");
|
||||
|
||||
for (const [name, org] of this.entries()) {
|
||||
const ownOrg = other.value(name) ?? wildcard;
|
||||
if (ownOrg) {
|
||||
filteredOrganisations.set(name, org.filter(ownOrg))
|
||||
}
|
||||
}
|
||||
|
||||
return filteredOrganisations;
|
||||
}
|
||||
|
||||
/** Return only those organisations
|
||||
* that have access to the required
|
||||
* operation
|
||||
*/
|
||||
accessToOperation (op) {
|
||||
const filteredOrganisations = new Organisations();
|
||||
|
||||
for (const [name, org] of this.entries()) {
|
||||
if (org[op]) {
|
||||
filteredOrganisations.set(name, org);
|
||||
}
|
||||
}
|
||||
|
||||
return filteredOrganisations;
|
||||
}
|
||||
|
||||
toJSON () {
|
||||
const obj = {};
|
||||
for (const key in this.values) {
|
||||
obj[key] = this.values[key].toJSON();
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
toString (replacer, space) {
|
||||
return JSON.stringify(this.toJSON(), replacer, space);
|
||||
}
|
||||
|
||||
*[Symbol.iterator] () {
|
||||
for (const [name, operations] of this.entries()) {
|
||||
yield {name, operations};
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
if (typeof module !== 'undefined' && module.exports) {
|
||||
module.exports = Organisations; // CJS export
|
||||
}
|
||||
|
||||
// ESM export
|
||||
if (typeof exports !== 'undefined' && !exports.default) {
|
||||
exports.default = Organisations; // ESM export
|
||||
}
|
||||
5
lib/modules/@dougal/organisations/index.js
Normal file
5
lib/modules/@dougal/organisations/index.js
Normal file
@@ -0,0 +1,5 @@
|
||||
|
||||
module.exports = {
|
||||
Organisation: require('./Organisation'),
|
||||
Organisations: require('./Organisations')
|
||||
}
|
||||
12
lib/modules/@dougal/organisations/package.json
Normal file
12
lib/modules/@dougal/organisations/package.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "@dougal/organisations",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": ""
|
||||
}
|
||||
364
lib/modules/@dougal/user/User.js
Normal file
364
lib/modules/@dougal/user/User.js
Normal file
@@ -0,0 +1,364 @@
|
||||
const EventEmitter = require('events');
|
||||
const { Organisations } = require('@dougal/organisations');
|
||||
|
||||
function randomUUID () {
|
||||
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
|
||||
const r = Math.random() * 16 | 0;
|
||||
const v = c === 'x' ? r : (r & 0x3 | 0x8);
|
||||
return v.toString(16);
|
||||
});
|
||||
}
|
||||
|
||||
class User extends EventEmitter {
|
||||
|
||||
// Valid field names
|
||||
static fields = [ "ip", "host", "name", "email", "description", "colour", "active", "organisations", "meta" ]
|
||||
|
||||
static validUUID (str) {
|
||||
const uuidv4Rx = /^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
|
||||
return uuidv4Rx.test(str);
|
||||
}
|
||||
|
||||
static validIPv4 (str) {
|
||||
const ipv4Rx = /^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\/([0-9]|[1-2][0-9]|3[0-2]))?$/;
|
||||
return ipv4Rx.test(str);
|
||||
}
|
||||
|
||||
static validIPv6 (str) {
|
||||
const ipv6Rx = /^(?:[0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|(?:[0-9a-fA-F]{1,4}:){1,7}:|(?:[0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|(?:[0-9a-fA-F]{1,4}:){1,5}(?::[0-9a-fA-F]{1,4}){1,2}|(?:[0-9a-fA-F]{1,4}:){1,4}(?::[0-9a-fA-F]{1,4}){1,3}|(?:[0-9a-fA-F]{1,4}:){1,3}(?::[0-9a-fA-F]{1,4}){1,4}|(?:[0-9a-fA-F]{1,4}:){1,2}(?::[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:(?::[0-9a-fA-F]{1,4}){1,6}|:((?::[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(?::[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(?:ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|(2[0-4][0-9]|[01]?[0-9][0-9]?))|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|(2[0-4][0-9]|[01]?[0-9][0-9]?))))$/;
|
||||
return ipv6Rx.test(str);
|
||||
}
|
||||
|
||||
static validHostname (str) {
|
||||
const hostnameRx = /^(?=.{1,253}$)(?:(?!-)[A-Za-z0-9-]{1,63}(?<!-)\.)+[A-Za-z]{2,}$/;
|
||||
return hostnameRx.test(str);
|
||||
}
|
||||
|
||||
#setString (k, v) {
|
||||
if (typeof v === "undefined") {
|
||||
this.values[k] = v;
|
||||
} else {
|
||||
this.values[k] = String(v).trim();
|
||||
}
|
||||
this.emit("changed", k, v);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
#updateTimestamp (v) {
|
||||
if (typeof v === "undefined") {
|
||||
this.#timestamp = (new Date()).valueOf();
|
||||
} else {
|
||||
this.#timestamp = (new Date(v)).valueOf();
|
||||
}
|
||||
this.emit("last_modified", this.#timestamp);
|
||||
}
|
||||
|
||||
// Create a new instance of `other`, where `other` is
|
||||
// an instance of User or of a derived class
|
||||
#clone (other = this) {
|
||||
const clone = new this.constructor();
|
||||
Object.assign(clone.values, other.values);
|
||||
clone.organisations = new Organisations(other.organisations);
|
||||
return clone;
|
||||
}
|
||||
|
||||
values = {}
|
||||
|
||||
#timestamp
|
||||
|
||||
constructor (data) {
|
||||
super();
|
||||
|
||||
User.fields.forEach( f => this[f] = data?.[f] );
|
||||
this.values.id = data?.id ?? randomUUID();
|
||||
this.values.active = !!this.active;
|
||||
this.values.hash = data?.hash;
|
||||
this.values.password = data?.password;
|
||||
this.values.organisations = new Organisations(data?.organisations);
|
||||
this.#updateTimestamp(data?.last_modified);
|
||||
}
|
||||
|
||||
/*
|
||||
* Getters
|
||||
*/
|
||||
|
||||
get id () { return this.values.id }
|
||||
|
||||
get ip () { return this.values.ip }
|
||||
|
||||
get host () { return this.values.host }
|
||||
|
||||
get name () { return this.values.name }
|
||||
|
||||
get email () { return this.values.email }
|
||||
|
||||
get description () { return this.values.description }
|
||||
|
||||
get colour () { return this.values.colour }
|
||||
|
||||
get active () { return this.values.active }
|
||||
|
||||
get organisations () { return this.values.organisations }
|
||||
|
||||
get password () { return this.values.password }
|
||||
|
||||
get timestamp () { return new Date(this.#timestamp) }
|
||||
|
||||
/*
|
||||
* Setters
|
||||
*/
|
||||
|
||||
set id (v) {
|
||||
if (typeof v === "undefined") {
|
||||
this.values.id = randomUUID();
|
||||
} else if (User.validUUID(v)) {
|
||||
this.values.id = v;
|
||||
} else {
|
||||
throw new Error("Invalid ID format (must be UUIDv4)");
|
||||
}
|
||||
this.emit("changed", "id", this.values.id);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
set ip (v) {
|
||||
if (User.validIPv4(v) || User.validIPv6(v) || typeof v === "undefined") {
|
||||
this.values.ip = v;
|
||||
} else {
|
||||
throw new Error("Invalid IP address or subnet");
|
||||
}
|
||||
this.emit("changed", "ip", this.values.ip);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
set host (v) {
|
||||
if (User.validHostname(v) || typeof v === "undefined") {
|
||||
this.values.host = v;
|
||||
} else {
|
||||
throw new Error("Invalid hostname");
|
||||
}
|
||||
this.emit("changed", "host", this.values.host);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
set name (v) {
|
||||
this.#setString("name", v);
|
||||
}
|
||||
|
||||
set email (v) {
|
||||
// TODO should validate, buy hey!
|
||||
this.#setString("email", v);
|
||||
}
|
||||
|
||||
set description (v) {
|
||||
this.#setString("description", v);
|
||||
}
|
||||
|
||||
set colour (v) {
|
||||
this.#setString("colour", v);
|
||||
}
|
||||
|
||||
set active (v) {
|
||||
this.values.active = !!v;
|
||||
this.emit("changed", "active", this.values.active);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
set organisations (v) {
|
||||
this.values.organisations = new Organisations(v);
|
||||
this.emit("changed", "organisations", this.values.organisations);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
set password (v) {
|
||||
this.values.password = v;
|
||||
this.emit("changed", "password", this.values.password);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Validation methods
|
||||
*/
|
||||
|
||||
get errors () {
|
||||
let err = [];
|
||||
|
||||
if (!this.id) err.push("ERR_NO_ID");
|
||||
if (!this.name) err.push("ERR_NO_NAME");
|
||||
if (!this.organisations.length) err.push("ERR_NO_ORG");
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
get isValid () {
|
||||
return this.errors.length == 0;
|
||||
}
|
||||
|
||||
/*
|
||||
* Filtering methods
|
||||
*/
|
||||
|
||||
filter (other) {
|
||||
// const filteredUser = new User(this);
|
||||
const filteredUser = this.#clone();
|
||||
filteredUser.organisations = this.organisations.filter(other.organisations);
|
||||
return filteredUser;
|
||||
}
|
||||
|
||||
/** Return users that are visible to me.
|
||||
*
|
||||
* These are users with which at leas one common organisation
|
||||
* with read, write or delete access to.
|
||||
*
|
||||
* If we are wildcarded ("*"), we see everyone.
|
||||
*
|
||||
* If a peer is wildcarded, they can be seen by everone.
|
||||
*/
|
||||
peers (list) {
|
||||
if (this.organisations.value("*")) {
|
||||
return list;
|
||||
} else {
|
||||
return list.filter( user => this.canRead(user) );
|
||||
// return list.filter( user =>
|
||||
// user.organisations.value("*") ||
|
||||
// user.organisations.filter(this.organisations).length > 0
|
||||
// this.organisations.filter(user.organisations).length > 0
|
||||
// );
|
||||
}
|
||||
}
|
||||
|
||||
/** Return users that I can edit
|
||||
*
|
||||
* These users must belong to an organisation
|
||||
* over which I have edit rights.
|
||||
*
|
||||
* If we are edit wildcarded, we can edit everyone.
|
||||
*/
|
||||
editablePeers (list) {
|
||||
const editableOrgs = this.organisations.accessToOperation("edit");
|
||||
if (editableOrgs.value("*")) {
|
||||
return list;
|
||||
} else {
|
||||
return list.filter( user => this.canEdit(user) );
|
||||
// editableOrgs.filter(user.organisations).length > 0
|
||||
// );
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* General methods
|
||||
*/
|
||||
|
||||
/** Return `true` if we are `other`
|
||||
*/
|
||||
is (other) {
|
||||
return this.id == other.id;
|
||||
}
|
||||
|
||||
canDo (operation, other) {
|
||||
if (this.organisations.get('*')?.[operation])
|
||||
return true;
|
||||
|
||||
if (other instanceof User) {
|
||||
return other.organisations.names().some(name => this.organisations.get(name)?.[operation]);
|
||||
} else if (other instanceof Organisations) {
|
||||
return other.accessToOperation(operation).names().some(name => this.organisations.get(name)?.[operation]);
|
||||
} else if (other?.organisations) {
|
||||
return this.canDo(operation, new Organisations(other.organisations));
|
||||
} else if (other instanceof Object) {
|
||||
return this.canDo(operation, new Organisations(other));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
canRead (other) {
|
||||
return this.canDo("read", other);
|
||||
}
|
||||
|
||||
canWrite (other) {
|
||||
return this.canDo("write", other);
|
||||
}
|
||||
|
||||
canEdit (other) {
|
||||
return this.canDo("edit", other);
|
||||
}
|
||||
|
||||
/** Perform an edit on another user
|
||||
*
|
||||
* Syntax: user.edit(other).to(another);
|
||||
*
|
||||
* Applies to `other` the changes described in `another`
|
||||
* that are permitted to `user`. The argument `another`
|
||||
* must be a plain object (not a `User` instance) with
|
||||
* only the properties that are to be changed.
|
||||
*
|
||||
* NOTE: Organisations are not merged, they are overwritten
|
||||
* and then filtered to ensure that the edited user does not
|
||||
* gain more privileges than those granted to the editing
|
||||
* user.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* // This causes user test77 to set user x23 to
|
||||
* // inactive
|
||||
* test77.edit(x23).to({active: false})
|
||||
*/
|
||||
edit (other) {
|
||||
if (this.canEdit(other)) {
|
||||
return {
|
||||
to: (another) => {
|
||||
const newUser = Object.assign(this.#clone(other), another);
|
||||
return newUser.filter(this);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Do not fail or throw but return undefined
|
||||
}
|
||||
|
||||
/** Create a new user similar to us except it doesn't have `edit` rights
|
||||
* by default
|
||||
*/
|
||||
spawn (init = {}, mask = {read: true, write: true, edit: false}) {
|
||||
// const user = new User(init);
|
||||
const user = this.#clone(init);
|
||||
user.organisations = this.organisations.accessToOperation("edit").disableOperation("edit");
|
||||
user.organisations.overlord = this.organisations;
|
||||
return user;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/*
|
||||
* Conversion and presentation methods
|
||||
*/
|
||||
|
||||
toJSON () {
|
||||
return {
|
||||
id: this.id,
|
||||
ip: this.ip,
|
||||
host: this.host,
|
||||
name: this.name,
|
||||
email: this.email,
|
||||
description: this.description,
|
||||
colour: this.colour,
|
||||
active: this.active,
|
||||
organisations: this.organisations.toJSON(),
|
||||
password: this.password
|
||||
}
|
||||
}
|
||||
|
||||
toString (replacer, space) {
|
||||
return JSON.stringify(this.toJSON(), replacer, space);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
if (typeof module !== 'undefined' && module.exports) {
|
||||
module.exports = User; // CJS export
|
||||
}
|
||||
|
||||
// ESM export
|
||||
if (typeof exports !== 'undefined' && !exports.default) {
|
||||
exports.default = User; // ESM export
|
||||
}
|
||||
4
lib/modules/@dougal/user/index.js
Normal file
4
lib/modules/@dougal/user/index.js
Normal file
@@ -0,0 +1,4 @@
|
||||
|
||||
module.exports = {
|
||||
User: require('./User')
|
||||
}
|
||||
15
lib/modules/@dougal/user/package.json
Normal file
15
lib/modules/@dougal/user/package.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "@dougal/user",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"dependencies": {
|
||||
"@dougal/organisations": "file:../organisations"
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ module.exports = {
|
||||
'@vue/cli-plugin-babel/preset'
|
||||
],
|
||||
plugins: [
|
||||
'@babel/plugin-proposal-logical-assignment-operators'
|
||||
'@babel/plugin-proposal-logical-assignment-operators',
|
||||
'@babel/plugin-transform-private-methods'
|
||||
]
|
||||
}
|
||||
|
||||
19806
lib/www/client/source/package-lock.json
generated
19806
lib/www/client/source/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -7,6 +7,14 @@
|
||||
"build": "vue-cli-service build"
|
||||
},
|
||||
"dependencies": {
|
||||
"@deck.gl/aggregation-layers": "^9.1.13",
|
||||
"@deck.gl/geo-layers": "^9.1.13",
|
||||
"@deck.gl/mesh-layers": "^9.1.14",
|
||||
"@dougal/binary": "file:../../../modules/@dougal/binary",
|
||||
"@dougal/concurrency": "file:../../../modules/@dougal/concurrency",
|
||||
"@dougal/organisations": "file:../../../modules/@dougal/organisations",
|
||||
"@dougal/user": "file:../../../modules/@dougal/user",
|
||||
"@loaders.gl/obj": "^4.3.4",
|
||||
"@mdi/font": "^7.2.96",
|
||||
"buffer": "^6.0.3",
|
||||
"core-js": "^3.6.5",
|
||||
@@ -17,6 +25,7 @@
|
||||
"leaflet-arrowheads": "^1.2.2",
|
||||
"leaflet-realtime": "^2.2.0",
|
||||
"leaflet.markercluster": "^1.4.1",
|
||||
"lodash.debounce": "^4.0.8",
|
||||
"marked": "^9.1.4",
|
||||
"path-browserify": "^1.0.1",
|
||||
"plotly.js-dist": "^2.27.0",
|
||||
@@ -31,6 +40,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/plugin-proposal-logical-assignment-operators": "^7.14.5",
|
||||
"@babel/plugin-transform-private-methods": "^7.27.1",
|
||||
"@vue/cli-plugin-babel": "^5.0.8",
|
||||
"@vue/cli-plugin-router": "^5.0.8",
|
||||
"@vue/cli-plugin-vuex": "^5.0.8",
|
||||
|
||||
406982
lib/www/client/source/public/assets/boat0.obj
Normal file
406982
lib/www/client/source/public/assets/boat0.obj
Normal file
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,7 @@
|
||||
:color="snackColour"
|
||||
:timeout="6000"
|
||||
>
|
||||
{{ snackText }}
|
||||
<div v-html="snackText"></div>
|
||||
<template v-slot:action="{ attrs }">
|
||||
<v-btn
|
||||
text
|
||||
@@ -52,9 +52,8 @@ export default {
|
||||
}),
|
||||
|
||||
computed: {
|
||||
snackText () { return this.$store.state.snack.snackText },
|
||||
snackText () { return this.$root.markdownInline(this.$store.state.snack.snackText) },
|
||||
snackColour () { return this.$store.state.snack.snackColour },
|
||||
...mapGetters(["serverEvent"])
|
||||
},
|
||||
|
||||
watch: {
|
||||
@@ -77,24 +76,41 @@ export default {
|
||||
this.$store.commit('setSnackText', "");
|
||||
}
|
||||
},
|
||||
|
||||
async serverEvent (event) {
|
||||
if (event.channel == "project" && event.payload?.schema == "public") {
|
||||
// Projects changed in some way or another
|
||||
await this.refreshProjects();
|
||||
} else if (event.channel == ".jwt" && event.payload?.token) {
|
||||
await this.setCredentials({token: event.payload?.token});
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
handleJWT (context, {payload}) {
|
||||
this.setCredentials({token: payload.token});
|
||||
},
|
||||
|
||||
handleProject (context, {payload}) {
|
||||
if (payload?.table == "public") {
|
||||
this.refreshProjects();
|
||||
}
|
||||
},
|
||||
|
||||
registerNotificationHandlers () {
|
||||
|
||||
this.$store.dispatch('registerHandler', {
|
||||
table: '.jwt',
|
||||
handler: this.handleJWT
|
||||
});
|
||||
|
||||
this.$store.dispatch('registerHandler', {
|
||||
table: 'project',
|
||||
handler: this.handleProject
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
...mapActions(["setCredentials", "refreshProjects"])
|
||||
},
|
||||
|
||||
async mounted () {
|
||||
// Local Storage values are always strings
|
||||
this.$vuetify.theme.dark = localStorage.getItem("darkTheme") == "true";
|
||||
this.registerNotificationHandlers();
|
||||
await this.setCredentials();
|
||||
this.refreshProjects();
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
max-width="600"
|
||||
>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-btn v-if="adminaccess"
|
||||
<v-btn v-if="adminaccess()"
|
||||
title="Create a new project from scratch. Generally, it's preferable to clone an existing project (right-click → ‘Clone’)"
|
||||
small
|
||||
outlined
|
||||
@@ -31,6 +31,7 @@
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import DougalProjectSettingsNameIdGeodetics from '@/components/project-settings/name-id-geodetics'
|
||||
import AccessMixin from '@/mixins/access';
|
||||
|
||||
export default {
|
||||
name: 'DougalAppBarExtensionProjectList',
|
||||
@@ -39,6 +40,10 @@ export default {
|
||||
DougalProjectSettingsNameIdGeodetics
|
||||
},
|
||||
|
||||
mixins: [
|
||||
AccessMixin
|
||||
],
|
||||
|
||||
data() {
|
||||
return {
|
||||
dialogOpen: false,
|
||||
@@ -50,10 +55,6 @@ export default {
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
...mapGetters(["adminaccess"])
|
||||
},
|
||||
|
||||
methods: {
|
||||
async save (data) {
|
||||
this.dialogOpen = false;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<template>
|
||||
<v-tabs :value="tab" show-arrows v-if="page != 'configuration'">
|
||||
<v-tab v-for="tab, index in tabs" :key="index" link :to="tabLink(tab.href)" v-text="tab.text"></v-tab>
|
||||
<template v-if="adminaccess">
|
||||
<template v-if="adminaccess()">
|
||||
<v-spacer></v-spacer>
|
||||
<v-tab :to="tabLink('configuration')" class="orange--text darken-3" title="Edit project settings"><v-icon small left color="orange darken-3">mdi-cog-outline</v-icon> Settings</v-tab>
|
||||
</template>
|
||||
@@ -15,9 +15,15 @@
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import AccessMixin from '@/mixins/access';
|
||||
|
||||
export default {
|
||||
name: 'DougalAppBarExtensionProject',
|
||||
|
||||
mixins: [
|
||||
AccessMixin
|
||||
],
|
||||
|
||||
data() {
|
||||
return {
|
||||
tabs: [
|
||||
@@ -44,7 +50,6 @@ export default {
|
||||
return this.tabs.findIndex(t => t.href == this.page);
|
||||
},
|
||||
|
||||
...mapGetters(["adminaccess"])
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
@@ -9,8 +9,17 @@
|
||||
|
||||
<v-spacer></v-spacer>
|
||||
|
||||
<v-icon v-if="serverConnected" class="mr-6" small title="Connected to server">mdi-lan-connect</v-icon>
|
||||
<v-icon v-else class="mr-6" small color="red" title="Server connection lost (we'll reconnect automatically when the server comes back)">mdi-lan-disconnect</v-icon>
|
||||
<template v-if="isFrontendRemote">
|
||||
<template v-if="serverConnected">
|
||||
<v-icon v-if="isGatewayReliable" class="mr-6" title="Connected to server via gateway">mdi-cloud-outline</v-icon>
|
||||
<v-icon v-else class="mr-6" color="orange" title="Gateway connection is unreliable. Expect outages.">mdi-cloud-off</v-icon>
|
||||
</template>
|
||||
<v-icon v-else class="mr-6" color="red" :title="`Server connection lost: the gateway cannot reach the remote server.\nWe will reconnect automatically when the link with the remote server is restored.`">mdi-cloud-off</v-icon>
|
||||
</template>
|
||||
<template v-else>
|
||||
<v-icon v-if="serverConnected" class="mr-6" small title="Connected to server">mdi-lan-connect</v-icon>
|
||||
<v-icon v-else class="mr-6" small color="red" :title="`Server connection lost.\nWe will reconnect automatically when the server comes back.`">mdi-lan-disconnect</v-icon>
|
||||
</template>
|
||||
|
||||
<dougal-notifications-control class="mr-6"></dougal-notifications-control>
|
||||
|
||||
@@ -51,13 +60,39 @@ export default {
|
||||
DougalNotificationsControl
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
lastGatewayErrorTimestamp: 0,
|
||||
gatewayErrorSilencePeriod: 60000,
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
year () {
|
||||
const date = new Date();
|
||||
return date.getUTCFullYear();
|
||||
},
|
||||
|
||||
...mapState({serverConnected: state => state.notify.serverConnected})
|
||||
...mapState({
|
||||
serverConnected: state => state.notify.serverConnected,
|
||||
isFrontendRemote: state => state.api.serverInfo?.["remote-frontend"] ?? false,
|
||||
isGatewayReliable: state => state.api.isGatewayReliable
|
||||
})
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
isGatewayReliable (val) {
|
||||
if (val === false) {
|
||||
const elapsed = Date.now() - this.lastGatewayErrorTimestamp;
|
||||
const lastGatewayErrorTimestamp = Date.now();
|
||||
if (elapsed > this.gatewayErrorSilencePeriod) {
|
||||
this.$root.showSnack("Gateway error", "warning");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
</script>
|
||||
|
||||
@@ -3,8 +3,10 @@
|
||||
<v-card-title class="headline">
|
||||
Array inline / crossline error
|
||||
<v-spacer></v-spacer>
|
||||
<!--
|
||||
<v-switch v-model="scatterplot" label="Scatterplot"></v-switch>
|
||||
<v-switch class="ml-4" v-model="histogram" label="Histogram"></v-switch>
|
||||
-->
|
||||
</v-card-title>
|
||||
|
||||
<v-container fluid fill-height>
|
||||
@@ -57,8 +59,8 @@ export default {
|
||||
graph: [],
|
||||
busy: false,
|
||||
resizeObserver: null,
|
||||
scatterplot: false,
|
||||
histogram: false
|
||||
scatterplot: true,
|
||||
histogram: true
|
||||
};
|
||||
},
|
||||
|
||||
|
||||
@@ -3,8 +3,10 @@
|
||||
<v-card-title class="headline">
|
||||
Gun depth
|
||||
<v-spacer></v-spacer>
|
||||
<!--
|
||||
<v-switch v-model="shotpoint" label="Shotpoint"></v-switch>
|
||||
<v-switch class="ml-4" v-model="violinplot" label="Violin plot"></v-switch>
|
||||
-->
|
||||
</v-card-title>
|
||||
|
||||
<v-container fluid fill-height>
|
||||
@@ -59,7 +61,7 @@ export default {
|
||||
busy: false,
|
||||
resizeObserver: null,
|
||||
shotpoint: true,
|
||||
violinplot: false
|
||||
violinplot: true
|
||||
};
|
||||
},
|
||||
|
||||
|
||||
@@ -3,8 +3,10 @@
|
||||
<v-card-title class="headline">
|
||||
Gun pressures
|
||||
<v-spacer></v-spacer>
|
||||
<!--
|
||||
<v-switch v-model="shotpoint" label="Shotpoint"></v-switch>
|
||||
<v-switch class="ml-4" v-model="violinplot" label="Violin plot"></v-switch>
|
||||
-->
|
||||
</v-card-title>
|
||||
|
||||
<v-container fluid fill-height>
|
||||
@@ -59,7 +61,7 @@ export default {
|
||||
busy: false,
|
||||
resizeObserver: null,
|
||||
shotpoint: true,
|
||||
violinplot: false
|
||||
violinplot: true
|
||||
};
|
||||
},
|
||||
|
||||
|
||||
@@ -3,8 +3,10 @@
|
||||
<v-card-title class="headline">
|
||||
Gun timing
|
||||
<v-spacer></v-spacer>
|
||||
<!--
|
||||
<v-switch v-model="shotpoint" label="Shotpoint"></v-switch>
|
||||
<v-switch class="ml-4" v-model="violinplot" label="Violin plot"></v-switch>
|
||||
-->
|
||||
</v-card-title>
|
||||
|
||||
<v-container fluid fill-height>
|
||||
@@ -59,7 +61,7 @@ export default {
|
||||
busy: false,
|
||||
resizeObserver: null,
|
||||
shotpoint: true,
|
||||
violinplot: false
|
||||
violinplot: true
|
||||
};
|
||||
},
|
||||
|
||||
|
||||
@@ -127,7 +127,7 @@ export default {
|
||||
},
|
||||
|
||||
computed: {
|
||||
...mapGetters(['user', 'writeaccess', 'loading', 'serverEvent'])
|
||||
...mapGetters(['user', 'loading', 'serverEvent'])
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
@@ -39,7 +39,8 @@ export default {
|
||||
default:
|
||||
return {
|
||||
editable: false,
|
||||
displaylogo: false
|
||||
displaylogo: false,
|
||||
responsive: true
|
||||
};
|
||||
}
|
||||
},
|
||||
@@ -48,7 +49,8 @@ export default {
|
||||
const base = {
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
}
|
||||
},
|
||||
autosize: true
|
||||
};
|
||||
|
||||
switch (this.facet) {
|
||||
@@ -274,18 +276,25 @@ export default {
|
||||
replot () {
|
||||
if (this.plotted) {
|
||||
const ref = this.$refs.graph;
|
||||
Plotly.relayout(ref, {
|
||||
width: ref.clientWidth,
|
||||
height: ref.clientHeight
|
||||
});
|
||||
if (ref && ref.clientWidth > 0 && ref.clientHeight > 0) {
|
||||
Plotly.relayout(ref, {
|
||||
width: ref.clientWidth,
|
||||
height: ref.clientHeight
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graph);
|
||||
this.$nextTick( () => {
|
||||
if (this.items?.length) {
|
||||
this.plot();
|
||||
}
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graph);
|
||||
});
|
||||
},
|
||||
|
||||
beforeDestroy () {
|
||||
|
||||
@@ -36,7 +36,8 @@ export default {
|
||||
config () {
|
||||
return {
|
||||
editable: false,
|
||||
displaylogo: false
|
||||
displaylogo: false,
|
||||
responsive: true
|
||||
};
|
||||
},
|
||||
|
||||
@@ -53,7 +54,8 @@ export default {
|
||||
title: "Time (s)"
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)"
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
autosize: true
|
||||
};
|
||||
},
|
||||
|
||||
@@ -154,10 +156,12 @@ export default {
|
||||
replot () {
|
||||
if (this.plotted) {
|
||||
const ref = this.$refs.graph;
|
||||
Plotly.relayout(ref, {
|
||||
width: ref.clientWidth,
|
||||
height: ref.clientHeight
|
||||
});
|
||||
if (ref && ref.clientWidth > 0 && ref.clientHeight > 0) {
|
||||
Plotly.relayout(ref, {
|
||||
width: ref.clientWidth,
|
||||
height: ref.clientHeight
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -190,8 +194,13 @@ export default {
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graph);
|
||||
this.$nextTick( () => {
|
||||
if (this.items?.length) {
|
||||
this.plot();
|
||||
}
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graph);
|
||||
});
|
||||
},
|
||||
|
||||
beforeDestroy () {
|
||||
|
||||
@@ -0,0 +1,187 @@
|
||||
<template>
|
||||
<v-card v-if="comparison" class="ma-1">
|
||||
<v-card-title>Comparison Summary: Baseline {{ baseline.pid }} vs Monitor {{ monitor.pid }}</v-card-title>
|
||||
<v-card-text>
|
||||
<v-row>
|
||||
<v-col cols="12" md="6">
|
||||
<h3>Deviation Statistics</h3>
|
||||
<v-simple-table dense>
|
||||
<template v-slot:default>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Metric</th>
|
||||
<th>I (m)</th>
|
||||
<th>J (m)</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Mean (μ)</td>
|
||||
<td>{{ comparison['μ'][0].toFixed(3) }}</td>
|
||||
<td>{{ comparison['μ'][1].toFixed(3) }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Std Dev (σ)</td>
|
||||
<td>{{ comparison['σ'][0].toFixed(3) }}</td>
|
||||
<td>{{ comparison['σ'][1].toFixed(3) }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>RMS</td>
|
||||
<td>{{ comparison.rms[0].toFixed(3) }}</td>
|
||||
<td>{{ comparison.rms[1].toFixed(3) }}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</template>
|
||||
</v-simple-table>
|
||||
|
||||
<h3 class="mt-4">Error distribution</h3>
|
||||
<ul>
|
||||
<li title="Relative to I-axis positive direction">Primary Direction: {{ (comparison.primaryDirection * 180 / Math.PI).toFixed(2) }}°</li>
|
||||
<li>Anisotropy: {{ comparison.anisotropy.toFixed(2) }}</li>
|
||||
<li title="Length of the semi-major axis of the error ellipse">Semi-Major Axis: {{ semiMajorAxis.toFixed(2) }} m</li>
|
||||
<li title="Length of the semi-minor axis of the error ellipse">Semi-Minor Axis: {{ semiMinorAxis.toFixed(2) }} m</li>
|
||||
<li title="Area of the error ellipse">Error Ellipse Area: {{ ellipseArea.toFixed(2) }} m²</li>
|
||||
</ul>
|
||||
|
||||
<h3 class="mt-4">Counts</h3>
|
||||
<ul>
|
||||
<li title="Unique line / point pairs found in both projects">Common Points: {{ comparison.common }}</li>
|
||||
<li title="Total number of points compared, including reshoots, infills, etc.">Comparison Length: {{ comparison.length }}</li>
|
||||
<li title="Number of points in the baseline project">Baseline Points: {{ comparison.baselineLength }} (Unique: {{ comparison.baselineUniqueLength }})</li>
|
||||
<li title="Number of points in the monitor project">Monitor Points: {{ comparison.monitorLength }} (Unique: {{ comparison.monitorUniqueLength }})</li>
|
||||
</ul>
|
||||
|
||||
<p class="mt-3" title="Date and time when the comparison was last performed">Computation timestamp: {{ new Date(comparison.tstamp).toLocaleString() }}</p>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="12" md="6">
|
||||
<h3>Error Ellipse</h3>
|
||||
<svg width="300" height="300" style="border: 1px solid #ccc;">
|
||||
<g :transform="`translate(150, 150) scale(${ellipseScale})`">
|
||||
<line x1="0" y1="-150" x2="0" y2="150" stroke="lightgray" stroke-dasharray="5,5"/>
|
||||
<line x1="-150" y1="0" x2="150" y2="0" stroke="lightgray" stroke-dasharray="5,5"/>
|
||||
<ellipse
|
||||
:rx="Math.sqrt(comparison.eigenvalues[0])"
|
||||
:ry="Math.sqrt(comparison.eigenvalues[1])"
|
||||
:transform="`rotate(${ellipseAngle})`"
|
||||
fill="none"
|
||||
stroke="blue"
|
||||
stroke-width="2"
|
||||
/>
|
||||
<line
|
||||
:x1="0"
|
||||
:y1="0"
|
||||
:x2="Math.sqrt(comparison.eigenvalues[0]) * Math.cos(ellipseRad)"
|
||||
:y2="Math.sqrt(comparison.eigenvalues[0]) * Math.sin(ellipseRad)"
|
||||
stroke="red"
|
||||
stroke-width="2"
|
||||
arrow-end="classic-wide-long"
|
||||
/>
|
||||
<line
|
||||
:x1="0"
|
||||
:y1="0"
|
||||
:x2="Math.sqrt(comparison.eigenvalues[1]) * Math.cos(ellipseRad + Math.PI / 2)"
|
||||
:y2="Math.sqrt(comparison.eigenvalues[1]) * Math.sin(ellipseRad + Math.PI / 2)"
|
||||
stroke="green"
|
||||
stroke-width="2"
|
||||
arrow-end="classic-wide-long"
|
||||
/>
|
||||
</g>
|
||||
</svg>
|
||||
<p class="text-caption">Ellipse scaled for visibility (factor: {{ ellipseScale.toFixed(1) }}). Axes represent sqrt(eigenvalues).</p>
|
||||
</v-col>
|
||||
</v-row>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: "DougalGroupComparisonSummary",
|
||||
|
||||
props: {
|
||||
baseline: { type: Object, required: true },
|
||||
monitor: { type: Object, required: true },
|
||||
comparison: { type: Object, required: true }
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
ellipseAngle () {
|
||||
if (!this.comparison) return 0;
|
||||
const ev = this.comparison.eigenvectors[0];
|
||||
return Math.atan2(ev[1], ev[0]) * 180 / Math.PI;
|
||||
},
|
||||
|
||||
ellipseRad () {
|
||||
return this.ellipseAngle * Math.PI / 180;
|
||||
},
|
||||
|
||||
ellipseRx () {
|
||||
if (!this.comparison) return 0;
|
||||
return Math.sqrt(this.comparison.eigenvalues[0]) * this.ellipseScale;
|
||||
},
|
||||
|
||||
ellipseRy () {
|
||||
if (!this.comparison) return 0;
|
||||
return Math.sqrt(this.comparison.eigenvalues[1]) * this.ellipseScale;
|
||||
},
|
||||
|
||||
ellipseScale () {
|
||||
if (!this.comparison) return 1;
|
||||
const maxSigma = Math.max(
|
||||
Math.sqrt(this.comparison.eigenvalues[0]),
|
||||
Math.sqrt(this.comparison.eigenvalues[1])
|
||||
);
|
||||
const maxMu = Math.max(
|
||||
Math.abs(this.comparison['μ'][0]),
|
||||
Math.abs(this.comparison['μ'][1])
|
||||
);
|
||||
//const maxExtent = maxMu + 3 * maxSigma;
|
||||
const maxExtent = 20;
|
||||
return 100 / maxExtent; // Adjust scale to fit within ~200 pixels diameter
|
||||
},
|
||||
|
||||
ellipseArea () {
|
||||
if (!this.comparison) return 0;
|
||||
const a = Math.sqrt(this.comparison.eigenvalues[0]);
|
||||
const b = Math.sqrt(this.comparison.eigenvalues[1]);
|
||||
return Math.PI * a * b;
|
||||
},
|
||||
|
||||
semiMajorAxis () {
|
||||
if (!this.comparison) return 0;
|
||||
return Math.max(
|
||||
Math.sqrt(this.comparison.eigenvalues[0]),
|
||||
Math.sqrt(this.comparison.eigenvalues[1])
|
||||
);
|
||||
},
|
||||
|
||||
semiMinorAxis () {
|
||||
if (!this.comparison) return 0;
|
||||
return Math.min(
|
||||
Math.sqrt(this.comparison.eigenvalues[0]),
|
||||
Math.sqrt(this.comparison.eigenvalues[1])
|
||||
);
|
||||
},
|
||||
|
||||
meanX () {
|
||||
return this.comparison ? this.comparison['μ'][0] : 0;
|
||||
},
|
||||
|
||||
meanY () {
|
||||
return this.comparison ? this.comparison['μ'][1] : 0;
|
||||
},
|
||||
|
||||
ellipseViewBox () {
|
||||
return '-150 -150 300 300';
|
||||
},
|
||||
|
||||
}
|
||||
}
|
||||
</script>
|
||||
1302
lib/www/client/source/src/components/groups/group-map.vue
Normal file
1302
lib/www/client/source/src/components/groups/group-map.vue
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,118 @@
|
||||
<template>
|
||||
<v-card class="ma-1">
|
||||
<v-card-title>Group Repeatability Summary</v-card-title>
|
||||
<v-card-text>
|
||||
<p>Error ellipse area for each baseline-monitor pair. Lower values indicate better repeatability. Colors range from green (best) to red (worst).</p>
|
||||
<v-simple-table dense>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Baseline \ Monitor</th>
|
||||
<th v-for="project in projects" :key="project.pid">{{ project.pid }}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr v-for="(baselineProject, rowIndex) in projects" :key="baselineProject.pid">
|
||||
<td>{{ baselineProject.pid }}</td>
|
||||
<td v-for="(monitorProject, colIndex) in projects" :key="monitorProject.pid">
|
||||
<v-tooltip v-if="colIndex > rowIndex" top>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<div
|
||||
:style="{ backgroundColor: getEllipseAreaColor(baselineProject.pid, monitorProject.pid), color: 'white', textAlign: 'center', padding: '4px' }"
|
||||
v-bind="attrs"
|
||||
v-on="on"
|
||||
@click="emitInput(baselineProject, monitorProject)"
|
||||
>
|
||||
{{ formatEllipseArea(baselineProject.pid, monitorProject.pid) }}
|
||||
</div>
|
||||
</template>
|
||||
<span v-if="getComp(baselineProject.pid, monitorProject.pid)">
|
||||
<div>σ_i: {{ getComp(baselineProject.pid, monitorProject.pid).meta['σ'][0].toFixed(2) }} m</div>
|
||||
<div>σ_j: {{ getComp(baselineProject.pid, monitorProject.pid).meta['σ'][1].toFixed(2) }} m</div>
|
||||
<div>Anisotropy: {{ getComp(baselineProject.pid, monitorProject.pid).meta.anisotropy.toFixed(0) }}</div>
|
||||
<div>Ellipse Area: {{ getEllipseArea(baselineProject.pid, monitorProject.pid).toFixed(2) }} m²</div>
|
||||
<div>Primary Direction: {{ formatPrimaryDirection(getComp(baselineProject.pid, monitorProject.pid)) }}°</div>
|
||||
</span>
|
||||
</v-tooltip>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</v-simple-table>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: 'DougalGroupRepeatabilitySummary',
|
||||
|
||||
props: {
|
||||
comparisons: {
|
||||
type: Array,
|
||||
required: true
|
||||
},
|
||||
projects: {
|
||||
type: Array,
|
||||
required: true
|
||||
}
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
compMap () {
|
||||
return new Map(this.comparisons.map(c => [`${c.baseline_pid}-${c.monitor_pid}`, c]));
|
||||
},
|
||||
minEllipseArea () {
|
||||
if (!this.comparisons.length) return 0;
|
||||
return Math.min(...this.comparisons.map(c => {
|
||||
const a = Math.sqrt(c.meta.eigenvalues[0]);
|
||||
const b = Math.sqrt(c.meta.eigenvalues[1]);
|
||||
return Math.PI * a * b;
|
||||
}));
|
||||
},
|
||||
maxEllipseArea () {
|
||||
if (!this.comparisons.length) return 0;
|
||||
return Math.max(...this.comparisons.map(c => {
|
||||
const a = Math.sqrt(c.meta.eigenvalues[0]);
|
||||
const b = Math.sqrt(c.meta.eigenvalues[1]);
|
||||
return Math.PI * a * b;
|
||||
}));
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
getComp (basePid, monPid) {
|
||||
return this.compMap.get(`${basePid}-${monPid}`);
|
||||
},
|
||||
getEllipseArea (basePid, monPid) {
|
||||
const comp = this.getComp(basePid, monPid);
|
||||
if (!comp) return null;
|
||||
const a = Math.sqrt(comp.meta.eigenvalues[0]);
|
||||
const b = Math.sqrt(comp.meta.eigenvalues[1]);
|
||||
return Math.PI * a * b;
|
||||
},
|
||||
formatEllipseArea (basePid, monPid) {
|
||||
const val = this.getEllipseArea(basePid, monPid);
|
||||
return val !== null ? val.toFixed(1) : '';
|
||||
},
|
||||
getEllipseAreaColor (basePid, monPid) {
|
||||
const val = this.getEllipseArea(basePid, monPid);
|
||||
if (val === null) return '';
|
||||
const ratio = (val - this.minEllipseArea) / (this.maxEllipseArea - this.minEllipseArea);
|
||||
const hue = (1 - ratio) * 120;
|
||||
return `hsl(${hue}, 70%, 70%)`;
|
||||
},
|
||||
formatPrimaryDirection (comp) {
|
||||
if (!comp) return '';
|
||||
return (comp.meta.primaryDirection * 180 / Math.PI).toFixed(1);
|
||||
},
|
||||
emitInput (baselineProject, monitorProject) {
|
||||
if (this.getComp(baselineProject.pid, monitorProject.pid)) {
|
||||
this.$emit('input', baselineProject, monitorProject);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
@@ -2,6 +2,7 @@
|
||||
<v-dialog
|
||||
v-model="dialog"
|
||||
max-width="500"
|
||||
scrollable
|
||||
style="z-index:2020;"
|
||||
>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
@@ -14,15 +15,54 @@
|
||||
</template>
|
||||
|
||||
<v-card>
|
||||
<v-card-title class="headline">
|
||||
Dougal user support
|
||||
</v-card-title>
|
||||
<v-window v-model="page">
|
||||
<v-window-item value="support">
|
||||
<v-card-title class="headline">
|
||||
Dougal user support
|
||||
</v-card-title>
|
||||
|
||||
<v-card-text>
|
||||
<p>You can get help or report a problem by sending an email to <a :href="`mailto:${email}`">{{email}}</a>. Please include as much information as possible about your problem or question—screenshots are often a good idea, and data files may also be attached.</p>
|
||||
<v-card-text>
|
||||
<p>You can get help or report a problem by sending an email to <a :href="`mailto:${email}`">{{email}}</a>. Please include as much information as possible about your problem or question—screenshots are often a good idea, and data files may also be attached.</p>
|
||||
|
||||
<p>When you write to the above address a ticket will be automatically created in the project's issue tracking system.</p>
|
||||
</v-card-text>
|
||||
<p>When you write to the above address a ticket will be automatically created in the project's issue tracking system.</p>
|
||||
|
||||
<v-alert dense type="info" border="left" outlined>
|
||||
<div class="text-body-2">
|
||||
You are using Dougal version:
|
||||
<ul>
|
||||
<li><code>{{clientVersion}}</code> (client)</li>
|
||||
<li><code>{{serverVersion}}</code> (server)</li>
|
||||
</ul>
|
||||
</div>
|
||||
</v-alert>
|
||||
|
||||
</v-card-text>
|
||||
</v-window-item>
|
||||
|
||||
<v-window-item value="changelog">
|
||||
<v-card-title class="headline">
|
||||
Dougal release notes
|
||||
</v-card-title>
|
||||
|
||||
<v-card-text>
|
||||
<v-carousel v-model="releaseShown"
|
||||
:continuous="false"
|
||||
:cycle="false"
|
||||
:show-arrows="true"
|
||||
:hide-delimiters="true"
|
||||
>
|
||||
<v-carousel-item v-for="release in releaseHistory">
|
||||
<pre>{{release}}</pre>
|
||||
</v-carousel-item>
|
||||
</v-carousel>
|
||||
</v-card-text>
|
||||
|
||||
|
||||
</v-window-item>
|
||||
<v-window-item value="serverinfo">
|
||||
<dougal-server-status :status="serverStatus"></dougal-server-status>
|
||||
</v-window-item>
|
||||
</v-window>
|
||||
|
||||
<v-divider></v-divider>
|
||||
|
||||
@@ -33,8 +73,7 @@
|
||||
text
|
||||
:href="`mailto:${email}?Subject=Question`"
|
||||
>
|
||||
<v-icon class="d-lg-none">mdi-help-circle</v-icon>
|
||||
<span class="d-none d-lg-inline">Ask a question</span>
|
||||
<v-icon title="Ask a question">mdi-help-circle</v-icon>
|
||||
</v-btn>
|
||||
|
||||
<v-btn
|
||||
@@ -42,10 +81,10 @@
|
||||
text
|
||||
href="mailto:dougal-support@aaltronav.eu?Subject=Bug report"
|
||||
>
|
||||
<v-icon class="d-lg-none">mdi-bug</v-icon>
|
||||
<span class="d-none d-lg-inline">Report a bug</span>
|
||||
<v-icon title="Report a bug">mdi-bug</v-icon>
|
||||
</v-btn>
|
||||
|
||||
<!---
|
||||
<v-btn
|
||||
color="info"
|
||||
text
|
||||
@@ -54,6 +93,37 @@
|
||||
>
|
||||
<v-icon>mdi-rss</v-icon>
|
||||
</v-btn>
|
||||
--->
|
||||
|
||||
<v-btn
|
||||
color="info"
|
||||
text
|
||||
title="View support info"
|
||||
:input-value="page == 'support'"
|
||||
@click="page = 'support'"
|
||||
>
|
||||
<v-icon>mdi-account-question</v-icon>
|
||||
</v-btn>
|
||||
|
||||
<v-btn v-if="versionHistory"
|
||||
color="info"
|
||||
text
|
||||
title="View release notes"
|
||||
:input-value="page == 'changelog'"
|
||||
@click="page = 'changelog'"
|
||||
>
|
||||
<v-icon>mdi-history</v-icon>
|
||||
</v-btn>
|
||||
|
||||
<v-btn v-if="serverStatus"
|
||||
color="info"
|
||||
text
|
||||
title="View server status"
|
||||
:input-value="page == 'serverinfo'"
|
||||
@click="page = 'serverinfo'"
|
||||
>
|
||||
<v-icon>mdi-server-network</v-icon>
|
||||
</v-btn>
|
||||
|
||||
<v-spacer></v-spacer>
|
||||
|
||||
@@ -75,15 +145,111 @@
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import DougalServerStatus from './server-status';
|
||||
|
||||
export default {
|
||||
name: 'DougalHelpDialog',
|
||||
|
||||
components: {
|
||||
DougalServerStatus
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
dialog: false,
|
||||
email: "dougal-support@aaltronav.eu",
|
||||
feed: btoa(encodeURIComponent("https://gitlab.com/wgp/dougal/software.atom?feed_token=XSPpvsYEny8YmH75Nz5W"))
|
||||
feed: btoa(encodeURIComponent("https://gitlab.com/wgp/dougal/software.atom?feed_token=XSPpvsYEny8YmH75Nz5W")),
|
||||
serverStatus: null,
|
||||
clientVersion: process.env.DOUGAL_FRONTEND_VERSION ?? "(unknown)",
|
||||
serverVersion: null,
|
||||
versionHistory: null,
|
||||
releaseHistory: [],
|
||||
releaseShown: null,
|
||||
page: "support",
|
||||
|
||||
lastUpdate: 0,
|
||||
updateInterval: 12000,
|
||||
refreshTimer: null
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
sinceUpdate () {
|
||||
return this.lastUpdate
|
||||
? (Date.now() - this.lastUpdate)
|
||||
: +Infinity;
|
||||
}
|
||||
},
|
||||
|
||||
watch: {
|
||||
dialog(newVal) {
|
||||
if (newVal) {
|
||||
this.startAutoRefresh();
|
||||
} else {
|
||||
this.stopAutoRefresh();
|
||||
}
|
||||
},
|
||||
page(newVal) {
|
||||
if (newVal === 'serverinfo' && this.dialog) {
|
||||
this.getServerStatus(); // Immediate update when switching to serverinfo
|
||||
this.startAutoRefresh();
|
||||
} else {
|
||||
this.stopAutoRefresh();
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
async getServerVersion () {
|
||||
if (!this.serverVersion) {
|
||||
const version = await this.api(['/version', {}, null, {silent:true}]);
|
||||
this.serverVersion = version?.tag ?? "(unknown)";
|
||||
if (version) this.lastUpdate = Date.now();
|
||||
}
|
||||
if (!this.versionHistory) {
|
||||
const history = await this.api(['/version/history?count=6', {}, null, {silent:true}]);
|
||||
this.releaseHistory = history;
|
||||
this.versionHistory = history?.[this.serverVersion.replace(/-.*$/, "")] ?? null;
|
||||
}
|
||||
},
|
||||
|
||||
async getServerStatus () {
|
||||
const status = await this.api(['/diagnostics', {}, null, {silent: true}]);
|
||||
if (status) {
|
||||
this.serverStatus = status;
|
||||
this.lastUpdate = Date.now();
|
||||
}
|
||||
},
|
||||
|
||||
startAutoRefresh() {
|
||||
if (this.refreshTimer) return; // Prevent multiple timers
|
||||
this.refreshTimer = setInterval(() => {
|
||||
if (this.dialog && this.page === 'serverinfo') {
|
||||
this.getServerStatus();
|
||||
// Optionally refresh server version if needed
|
||||
// this.getServerVersion();
|
||||
}
|
||||
}, this.updateInterval);
|
||||
},
|
||||
|
||||
stopAutoRefresh() {
|
||||
if (this.refreshTimer) {
|
||||
clearInterval(this.refreshTimer);
|
||||
this.refreshTimer = null;
|
||||
}
|
||||
},
|
||||
|
||||
...mapActions(["api"])
|
||||
},
|
||||
|
||||
async mounted () {
|
||||
this.getServerVersion();
|
||||
this.getServerStatus();
|
||||
},
|
||||
|
||||
beforeDestroy() {
|
||||
this.stopAutoRefresh(); // Clean up timer on component destruction
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
<template>
|
||||
<div class="line-status" v-if="sequences.length == 0">
|
||||
<slot name="empty"></slot>
|
||||
</div>
|
||||
<div class="line-status" v-else-if="sequenceHref || plannedSequenceHref || pendingReshootHref">
|
||||
<div class="line-status" v-if="sequenceHref || plannedSequenceHref || pendingReshootHref">
|
||||
<router-link v-for="sequence in sequences" :key="sequence.sequence" v-if="sequenceHref"
|
||||
class="sequence"
|
||||
:class="sequence.status"
|
||||
@@ -26,7 +23,7 @@
|
||||
>
|
||||
</router-link>
|
||||
</div>
|
||||
<div class="line-status" v-else>
|
||||
<div class="line-status" v-else-if="sequences.length || plannedSequences.length || Object.keys(pendingReshoots).length">
|
||||
<div v-for="sequence in sequences" :key="sequence.sequence"
|
||||
class="sequence"
|
||||
:class="sequence.status"
|
||||
@@ -47,6 +44,9 @@
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
<div class="line-status" v-else>
|
||||
<slot name="empty"></slot>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style lang="stylus" scoped>
|
||||
|
||||
@@ -32,16 +32,61 @@
|
||||
</template>
|
||||
|
||||
<v-list dense>
|
||||
<v-list-item :href="`/settings/equipment`">
|
||||
<v-list-item-title>Equipment list</v-list-item-title>
|
||||
<v-list-item href="/settings/equipment">
|
||||
<v-list-item-content>
|
||||
<v-list-item-title>Equipment list</v-list-item-title>
|
||||
<v-list-item-subtitle>Manage the list of equipment reported in logs</v-list-item-subtitle>
|
||||
</v-list-item-content>
|
||||
<v-list-item-action><v-icon small>mdi-view-list</v-icon></v-list-item-action>
|
||||
</v-list-item>
|
||||
<template v-if="false">
|
||||
<v-divider></v-divider>
|
||||
<v-list-item href="/settings">
|
||||
<v-list-item-content>
|
||||
<v-list-item-title>Local settings</v-list-item-title>
|
||||
<v-list-item-subtitle>Manage this vessel's configuration</v-list-item-subtitle>
|
||||
</v-list-item-content>
|
||||
<v-list-item-action><v-icon small>mdi-ferry</v-icon></v-list-item-action>
|
||||
</v-list-item>
|
||||
</template>
|
||||
</v-list>
|
||||
|
||||
</v-menu>
|
||||
|
||||
|
||||
<v-breadcrumbs :items="path"></v-breadcrumbs>
|
||||
<v-breadcrumbs :items="path">
|
||||
<template v-slot:item="{ item }">
|
||||
<v-breadcrumbs-item :href="item.href" :disabled="item.disabled" v-if="item.organisations">
|
||||
<v-tooltip bottom>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<span v-bind="attrs" v-on="on">{{ item.text }}</span>
|
||||
</template>
|
||||
<div class="text-overline">Project permissions</div>
|
||||
<v-simple-table dense>
|
||||
<template v-slot:default>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Organisation</th><th>Read</th><th>Write</th><th>Edit</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr v-for="(operations, name) in item.organisations">
|
||||
<td v-if="name == '*'"><v-chip small label color="primary">All</v-chip></td>
|
||||
<td v-else><v-chip small label outlined>{{ name }}</v-chip></td>
|
||||
<td>{{ operations.read ? "✔" : " " }}</td>
|
||||
<td>{{ operations.write ? "✔" : " " }}</td>
|
||||
<td>{{ operations.edit ? "✔" : " " }}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</template>
|
||||
</v-simple-table>
|
||||
</v-tooltip>
|
||||
</v-breadcrumbs-item>
|
||||
<v-breadcrumbs-item :href="item.href" :disabled="item.disabled" v-else>
|
||||
{{ item.text }}
|
||||
</v-breadcrumbs-item>
|
||||
</template>
|
||||
</v-breadcrumbs>
|
||||
|
||||
<template v-if="$route.name != 'Login'">
|
||||
<v-btn text link to="/login" v-if="!user && !loading">Log in</v-btn>
|
||||
@@ -50,10 +95,37 @@
|
||||
<v-menu
|
||||
offset-y
|
||||
>
|
||||
<template v-slot:activator="{on, attrs}">
|
||||
<v-avatar :color="user.colour || 'primary'" :title="`${user.name} (${user.role})`" v-bind="attrs" v-on="on">
|
||||
<span class="white--text">{{user.name.slice(0, 5)}}</span>
|
||||
</v-avatar>
|
||||
<template v-slot:activator="{ on: menu, attrs }">
|
||||
<v-tooltip bottom>
|
||||
<template v-slot:activator="{ on: tooltip }">
|
||||
<v-avatar :color="user.colour || 'primary'" v-bind="attrs" v-on="{...tooltip, ...menu}">
|
||||
<span class="white--text">{{user.name.slice(0, 5)}}</span>
|
||||
</v-avatar>
|
||||
</template>
|
||||
<div class="text-overline">{{ user.name }}</div>
|
||||
<v-card flat class="my-1" v-if="user.description">
|
||||
<v-card-text class="pb-1" v-html="$root.markdown(user.description)">
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
<v-simple-table dense>
|
||||
<template v-slot:default>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Organisation</th><th>Read</th><th>Write</th><th>Edit</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr v-for="org in user.organisations">
|
||||
<td v-if="org.name == '*'"><v-chip small label color="primary">All</v-chip></td>
|
||||
<td v-else><v-chip small label outlined>{{ org.name }}</v-chip></td>
|
||||
<td>{{ org.operations.read ? "✔" : " " }}</td>
|
||||
<td>{{ org.operations.write ? "✔" : " " }}</td>
|
||||
<td>{{ org.operations.edit ? "✔" : " " }}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</template>
|
||||
</v-simple-table>
|
||||
</v-tooltip>
|
||||
</template>
|
||||
|
||||
<v-list dense>
|
||||
@@ -66,8 +138,29 @@
|
||||
</v-list-item>
|
||||
<v-list-item link to="/logout" v-else>
|
||||
<v-list-item-icon><v-icon small>mdi-logout</v-icon></v-list-item-icon>
|
||||
<v-list-item-title>Log out</v-list-item-title>
|
||||
<v-list-item-content>
|
||||
<v-list-item-title>Log out</v-list-item-title>
|
||||
</v-list-item-content>
|
||||
</v-list-item>
|
||||
<v-divider></v-divider>
|
||||
<template v-if="canManageUsers">
|
||||
<v-list-item link to="/users">
|
||||
<v-list-item-icon><v-icon small>mdi-account-multiple</v-icon></v-list-item-icon>
|
||||
<v-list-item-content>
|
||||
<v-list-item-title>Manage users</v-list-item-title>
|
||||
<v-list-item-subtitle>Add, edit and remove users</v-list-item-subtitle>
|
||||
</v-list-item-content>
|
||||
</v-list-item>
|
||||
</template>
|
||||
<template v-else-if="user && !user.autologin">
|
||||
<v-list-item link :to="`/users/${user.id}`">
|
||||
<v-list-item-icon><v-icon small>mdi-account</v-icon></v-list-item-icon>
|
||||
<v-list-item-content>
|
||||
<v-list-item-title>User profile</v-list-item-title>
|
||||
<v-list-item-subtitle>Edit your user profile</v-list-item-subtitle>
|
||||
</v-list-item-content>
|
||||
</v-list-item>
|
||||
</template>
|
||||
</v-list>
|
||||
|
||||
</v-menu>
|
||||
@@ -102,6 +195,19 @@ export default {
|
||||
.pop()?.component;
|
||||
},
|
||||
|
||||
title () {
|
||||
return this.user.name + "\n" + [...this.user.organisations].map( ({name, operations}) => {
|
||||
if (name == "*") name = "All organisations";
|
||||
let str = name+": ";
|
||||
str += [ "read", "write", "edit" ].map( op => operations[op] ? op : null ).filter( op => op ).join(", ");
|
||||
return str;
|
||||
}).join("\n")
|
||||
},
|
||||
|
||||
canManageUsers () {
|
||||
return this.user.organisations.accessToOperation("edit").length;
|
||||
},
|
||||
|
||||
...mapGetters(['user', 'loading'])
|
||||
},
|
||||
|
||||
|
||||
112
lib/www/client/source/src/components/organisations-item.vue
Normal file
112
lib/www/client/source/src/components/organisations-item.vue
Normal file
@@ -0,0 +1,112 @@
|
||||
<template>
|
||||
<v-row dense no-gutters>
|
||||
|
||||
<v-col>
|
||||
<slot name="prepend"></slot>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="6">
|
||||
<v-text-field
|
||||
class="mr-5"
|
||||
dense
|
||||
label="Name"
|
||||
:value="name"
|
||||
:readonly="true"
|
||||
></v-text-field>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<v-checkbox
|
||||
class="mr-3"
|
||||
label="Read"
|
||||
v-model="operations.read"
|
||||
:readonly="readonly"
|
||||
></v-checkbox>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<v-checkbox
|
||||
class="mr-3"
|
||||
label="Write"
|
||||
v-model="operations.write"
|
||||
:readonly="readonly"
|
||||
></v-checkbox>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<v-checkbox
|
||||
class="mr-3"
|
||||
label="Edit"
|
||||
v-model="operations.edit"
|
||||
:readonly="readonly"
|
||||
></v-checkbox>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<!-- Just to fill the twelve-column grid -->
|
||||
<!--
|
||||
NOTE: this column could also be used for
|
||||
a popdown menu with additional operations
|
||||
if needed.
|
||||
-->
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<slot name="append"></slot>
|
||||
</v-col>
|
||||
|
||||
</v-row>
|
||||
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { Organisations } from '@dougal/organisations';
|
||||
|
||||
export default {
|
||||
name: "DougalOrganisationsItem",
|
||||
|
||||
props: {
|
||||
name: String,
|
||||
value: Object,
|
||||
readonly: Boolean,
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
operations: {...this.value}
|
||||
}
|
||||
},
|
||||
|
||||
watch: {
|
||||
value: {
|
||||
handler (newValue) {
|
||||
this.operations = {...this.value};
|
||||
},
|
||||
deep: true,
|
||||
},
|
||||
|
||||
operations: {
|
||||
handler (newValue) {
|
||||
if (["read", "write", "edit"].some( k => newValue[k] != this.value[k] )) {
|
||||
// Only emit if a value has actually changed
|
||||
this.$emit("input", {...newValue});
|
||||
}
|
||||
},
|
||||
deep: true,
|
||||
},
|
||||
},
|
||||
|
||||
methods: {
|
||||
reset () {
|
||||
}
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.reset();
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
191
lib/www/client/source/src/components/organisations.vue
Normal file
191
lib/www/client/source/src/components/organisations.vue
Normal file
@@ -0,0 +1,191 @@
|
||||
<template>
|
||||
<v-card>
|
||||
<v-card-title>Organisations</v-card-title>
|
||||
<v-card-subtitle>Organisation access</v-card-subtitle>
|
||||
<v-card-text>
|
||||
<v-form>
|
||||
|
||||
<v-container>
|
||||
|
||||
<dougal-organisations-item v-for="organisation in localOrganisations.names()"
|
||||
:key="organisation"
|
||||
:name="organisation"
|
||||
:value="localOrganisations.get(organisation)"
|
||||
@input="setOrganisation(organisation, $event)"
|
||||
>
|
||||
<template v-slot:append v-if="!readonly">
|
||||
<v-btn
|
||||
class="ml-3"
|
||||
fab
|
||||
text
|
||||
small
|
||||
title="Remove this organisation"
|
||||
>
|
||||
<v-icon
|
||||
color="error"
|
||||
@click="removeOrganisation(organisation)"
|
||||
>mdi-minus</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
</dougal-organisations-item>
|
||||
|
||||
|
||||
<v-row no-gutters class="mb-2" v-if="!readonly">
|
||||
<h4>Add organisation</h4>
|
||||
</v-row>
|
||||
|
||||
<v-row no-gutters class="mb-2" v-if="!readonly">
|
||||
<v-combobox v-if="canCreateOrganisations"
|
||||
label="Organisation"
|
||||
:items="remainingOrganisations"
|
||||
v-model="organisationName"
|
||||
@input.native="organisationName = $event.srcElement.value"
|
||||
@keyup.enter="addOrganisation()"
|
||||
></v-combobox>
|
||||
<v-select v-else
|
||||
label="Organisation"
|
||||
:items="remainingOrganisations"
|
||||
v-model="organisationName"
|
||||
></v-select>
|
||||
<v-btn
|
||||
class="ml-3"
|
||||
fab
|
||||
text
|
||||
small
|
||||
title="Add organisation"
|
||||
:disabled="!(organisationName && organisationName.length)"
|
||||
@click="addOrganisation()"
|
||||
>
|
||||
<v-icon
|
||||
color="primary"
|
||||
>mdi-plus</v-icon>
|
||||
</v-btn>
|
||||
</v-row>
|
||||
|
||||
</v-container>
|
||||
</v-form>
|
||||
</v-card-text>
|
||||
<v-card-actions>
|
||||
<slot name="actions" v-bind="{ self, organisations, readonly, validationErrors, canCreateOrganisations }">
|
||||
</slot>
|
||||
</v-card-actions>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { Organisations } from '@dougal/organisations';
|
||||
import DougalOrganisationsItem from './organisations-item';
|
||||
|
||||
|
||||
export default {
|
||||
name: "DougalOrganisations",
|
||||
|
||||
components: {
|
||||
DougalOrganisationsItem
|
||||
},
|
||||
|
||||
props: {
|
||||
self: Object,
|
||||
organisations: Object,
|
||||
readonly: Boolean
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
organisationName: "",
|
||||
localOrganisations: this.setLocalOrganisations(this.organisations)
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
availableOrganisations () {
|
||||
return this.self.organisations.names();
|
||||
},
|
||||
|
||||
// Organisations available to add.
|
||||
// These are the organisations in `availableOrganisations`
|
||||
// minus any that have already been added.
|
||||
// The special value "*" (meaning "every organisation")
|
||||
// is not included.
|
||||
remainingOrganisations () {
|
||||
const orgs = [];
|
||||
|
||||
for (const org of this.availableOrganisations) {
|
||||
if (org != "*" && !this.localOrganisations.has(org)) {
|
||||
orgs.push(org);
|
||||
}
|
||||
}
|
||||
|
||||
return orgs;
|
||||
},
|
||||
|
||||
canCreateOrganisations () {
|
||||
return this.self.organisations.value("*")?.edit ?? false;
|
||||
},
|
||||
|
||||
validationErrors () {
|
||||
const errors = [];
|
||||
|
||||
// Check if there is at least one organisation
|
||||
if (this.localOrganisations.length) {
|
||||
errors.push("ERR_NO_ORGS");
|
||||
}
|
||||
|
||||
// Check if at least one organisation has edit rights
|
||||
|
||||
},
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
organisations (newValue) {
|
||||
this.localOrganisations = this.setLocalOrganisations(newValue);
|
||||
},
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
setLocalOrganisations (value) {
|
||||
return new Organisations(this.organisations);
|
||||
},
|
||||
|
||||
setOrganisation(name, value) {
|
||||
this.localOrganisations.set(name, value);
|
||||
this.$emit("update:organisations", new Organisations(this.localOrganisations));
|
||||
},
|
||||
|
||||
addOrganisation () {
|
||||
const key = this.organisationName;
|
||||
if (!this.localOrganisations.has(key)) {
|
||||
this.localOrganisations.set(key);
|
||||
this.$emit("update:organisations", this.localOrganisations);
|
||||
}
|
||||
this.organisationName = "";
|
||||
},
|
||||
|
||||
removeOrganisation (key) {
|
||||
if (this.localOrganisations.has(key)) {
|
||||
this.localOrganisations.remove(key);
|
||||
}
|
||||
this.$emit("update:organisations", this.localOrganisations);
|
||||
},
|
||||
|
||||
reset () {
|
||||
},
|
||||
|
||||
save () {
|
||||
},
|
||||
|
||||
back () {
|
||||
this.$emit('close');
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.reset();
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -4,15 +4,15 @@
|
||||
<v-card-subtitle v-text="subtitle"></v-card-subtitle>
|
||||
<v-card-text>
|
||||
<v-tabs v-model="tab">
|
||||
<v-tab>Paths</v-tab>
|
||||
<v-tab>Globs</v-tab>
|
||||
<v-tab v-if="pattern">Pattern</v-tab>
|
||||
<v-tab v-if="lineNameInfo">Line info</v-tab>
|
||||
<v-tab tab-value="paths">Paths</v-tab>
|
||||
<v-tab tab-value="globs">Globs</v-tab>
|
||||
<v-tab tab-value="pattern" v-if="pattern">Pattern</v-tab>
|
||||
<v-tab tab-value="lineNameInfo" v-if="lineNameInfo">Line info</v-tab>
|
||||
</v-tabs>
|
||||
|
||||
<v-tabs-items v-model="tab">
|
||||
|
||||
<v-tab-item>
|
||||
<v-tab-item value="paths">
|
||||
<v-card flat>
|
||||
<v-card-subtitle>
|
||||
A list of directories which are searched for matching files.
|
||||
@@ -56,7 +56,7 @@
|
||||
</v-card>
|
||||
</v-tab-item>
|
||||
|
||||
<v-tab-item>
|
||||
<v-tab-item value="globs">
|
||||
<v-card flat>
|
||||
<v-card-subtitle>
|
||||
A list of <a href="https://en.wikipedia.org/wiki/Glob_(programming)" target="_blank">glob patterns</a> expanding to match the files of interest. Note that Linux is case-sensitive.
|
||||
@@ -93,7 +93,7 @@
|
||||
</v-card>
|
||||
</v-tab-item>
|
||||
|
||||
<v-tab-item v-if="pattern">
|
||||
<v-tab-item value="pattern" v-if="pattern">
|
||||
<v-card flat>
|
||||
<v-card-subtitle>
|
||||
Regular expression that describes the file format definition. Used to capture information such as line and sequence number, etc.
|
||||
@@ -153,7 +153,7 @@
|
||||
</v-card>
|
||||
</v-tab-item>
|
||||
|
||||
<v-tab-item v-if="lineNameInfo">
|
||||
<v-tab-item value="lineNameInfo">
|
||||
<v-card flat>
|
||||
<v-card-subtitle>
|
||||
Line information that will be extracted from file names
|
||||
@@ -165,14 +165,14 @@
|
||||
label="Example file name"
|
||||
hint="Enter the name of a representative file to make it easier to visualise your configuration"
|
||||
persistent-hint
|
||||
v-model="lineNameInfo.example"
|
||||
v-model="lineNameInfo_.example"
|
||||
></v-text-field>
|
||||
|
||||
<dougal-fixed-string-decoder
|
||||
:multiline="true"
|
||||
:text="lineNameInfo.example"
|
||||
:fixed.sync="lineNameInfo.fixed"
|
||||
:fields.sync="lineNameInfo.fields"
|
||||
:text="lineNameInfo_.example"
|
||||
:fixed.sync="lineNameInfo_.fixed"
|
||||
:fields.sync="lineNameInfo_.fields"
|
||||
></dougal-fixed-string-decoder>
|
||||
|
||||
</v-form>
|
||||
@@ -195,6 +195,23 @@
|
||||
@click="reset"
|
||||
>Reset</v-btn>
|
||||
-->
|
||||
<v-btn
|
||||
v-if="tab=='lineNameInfo'"
|
||||
:disabled="!validLineNameInfo"
|
||||
@click="copyLineNameInfo"
|
||||
title="Copy this definition into the clipboard. It can then be pasted into other sections or configurations."
|
||||
>
|
||||
<v-icon left>mdi-content-copy</v-icon>
|
||||
Copy
|
||||
</v-btn>
|
||||
<v-btn
|
||||
v-if="tab=='lineNameInfo'"
|
||||
@click="pasteLineNameInfo"
|
||||
title="Paste a line info definition copied from elsewhere"
|
||||
>
|
||||
<v-icon left>mdi-content-paste</v-icon>
|
||||
Paste
|
||||
</v-btn>
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn
|
||||
color="secondary"
|
||||
@@ -253,6 +270,9 @@ export default {
|
||||
},
|
||||
|
||||
computed: {
|
||||
validLineNameInfo () {
|
||||
return typeof this.lineNameInfo == 'object';
|
||||
},
|
||||
},
|
||||
|
||||
watch: {
|
||||
@@ -285,6 +305,28 @@ export default {
|
||||
|
||||
methods: {
|
||||
|
||||
async copyLineNameInfo () {
|
||||
await navigator.clipboard.writeText(JSON.stringify(this.lineNameInfo, null, 4));
|
||||
this.showSnack(["Line name information copied to clipboard", "primary"]);
|
||||
},
|
||||
|
||||
async pasteLineNameInfo () {
|
||||
const text = await navigator.clipboard.readText();
|
||||
try {
|
||||
const data = JSON.parse(text);
|
||||
if (["fixed", "fields", "example"].every( key => key in data )) {
|
||||
this.$emit("update:lineNameInfo", data);
|
||||
this.showSnack(["Line name information pasted from clipboard", "primary"]);
|
||||
} else {
|
||||
this.showSnack(["Clipboard contents are not valid line name information", "error"]);
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof SyntaxError) {
|
||||
this.showSnack(["Clipboard contents are not valid line name information", "error"]);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
reset () {
|
||||
this.globs_ = this.globs;
|
||||
this.paths_ = this.paths;
|
||||
@@ -302,6 +344,8 @@ export default {
|
||||
this.$emit('close');
|
||||
},
|
||||
|
||||
...mapActions(["showSnack"])
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
<template>
|
||||
<v-card flat>
|
||||
<v-card-text>
|
||||
<dougal-organisations
|
||||
:self="user"
|
||||
:organisations.sync="organisations_"
|
||||
>
|
||||
<template v-slot:actions>
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn
|
||||
color="secondary"
|
||||
@click="back"
|
||||
>Back</v-btn>
|
||||
</template>
|
||||
</dougal-organisations>
|
||||
</v-card-text>
|
||||
<v-card-actions>
|
||||
</v-card-actions>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex'
|
||||
import DougalOrganisations from '../organisations'
|
||||
|
||||
|
||||
export default {
|
||||
name: "DougalProjectSettingsOrganisations",
|
||||
|
||||
components: {
|
||||
DougalOrganisations
|
||||
},
|
||||
|
||||
props: {
|
||||
organisations: Object,
|
||||
value: Object
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
organisations_: {
|
||||
get () {
|
||||
return this.organisations;
|
||||
},
|
||||
|
||||
set (v) {
|
||||
this.$emit("input", {
|
||||
...this.value,
|
||||
organisations: v.toJSON()
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
...mapGetters(['user', 'loading', 'serverEvent'])
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
reset () {
|
||||
},
|
||||
|
||||
save () {
|
||||
},
|
||||
|
||||
back () {
|
||||
this.$emit('close');
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.reset();
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -23,7 +23,34 @@
|
||||
label="File format"
|
||||
:items="preplotFileTypes"
|
||||
v-model="fileType"
|
||||
></v-select>
|
||||
:append-outer-icon="fileClass == 'saillines' && fileType == 'x-sl+csv' ? 'mdi-help-circle-outline' : ''"
|
||||
>
|
||||
<template v-slot:append-outer="" v-if="fileClass == 'saillines' && fileType == 'x-sl+csv'">
|
||||
<v-menu :close-on-content-click="false" v-model="tooltip">
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-btn icon v-bind="attrs" title="Information on sailline CSV files" @click="tooltip = !tooltip"><v-icon>mdi-help-circle-outline</v-icon></v-btn>
|
||||
</template>
|
||||
|
||||
<v-card>
|
||||
<v-card-title>Saillines CSV format</v-card-title>
|
||||
<v-card-text>
|
||||
<p>
|
||||
The input CSV should have the following comma-separated fields:
|
||||
<dl>
|
||||
<dt><code>sail_line</code></dt> <dd>The vessel line number</dd>
|
||||
<dt><code>incr</code></dt> <dd><em>1</em> if this line is to be shot in the incrementing shot points direction, <em>0</em> or blank otherwise</dd>
|
||||
<dt><code>ntba</code></dt> <dd><em>1</em> if this line is not to be acquired</dd>
|
||||
<dt><code>remarks</code></dt> <dd>Any comments pertinent to the line. Supports <a target="_blank" href="https://commonmark.org/help/">Markdown</a>.</dd>
|
||||
<dt><code>meta.colour</code></dt> <dd>An <a target="_blank" href="https://developer.mozilla.org/en-US/docs/Web/CSS/color_value">HTML colour</a>. Changes the background colour of the line in Dougal's ‘Lines’ tab.</dd>
|
||||
<dt><code>source_line</code></dt> <dd>The source line number. This column should be repeated once per gun array.</dd>
|
||||
</dl>
|
||||
</p>
|
||||
<p>See an <a target="_blank" href="https://gitlab.com/-/snippets/4873650">example file</a> (<a title="Direct download" href="https://gitlab.com/-/snippets/4873650/raw/main/preplots-saillines-example.csv?inline=false"><v-icon dense small>mdi-paperclip</v-icon></a>)</p>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</v-menu>
|
||||
</template>
|
||||
</v-select>
|
||||
|
||||
<v-text-field v-if="value.class == 'S'"
|
||||
class="mb-3"
|
||||
@@ -218,6 +245,7 @@ export default {
|
||||
{ text: "16 kiB", value: 1024*16 },
|
||||
{ text: "32 kiB", value: 1024*32 },
|
||||
],
|
||||
tooltip: false,
|
||||
};
|
||||
},
|
||||
|
||||
@@ -491,17 +519,18 @@ export default {
|
||||
methods: {
|
||||
|
||||
async getHead () {
|
||||
console.log("getHead", this.value?.path);
|
||||
if (this.value?.path) {
|
||||
const url = `/files/${this.value.path}`;
|
||||
const init = {
|
||||
text: true,
|
||||
headers: {
|
||||
"Range": `bytes=0-${this.sampleSize}`
|
||||
}
|
||||
};
|
||||
const head = await this.api([url, init]);
|
||||
return head?.substring(0, head.lastIndexOf("\n")) || "";
|
||||
const opts = {format: "text"};
|
||||
const head = await this.api([url, init, null, opts]);
|
||||
return typeof head === "string"
|
||||
? head?.substring(0, head.lastIndexOf("\n")) || ""
|
||||
: this.head ?? "";
|
||||
}
|
||||
return "";
|
||||
},
|
||||
|
||||
213
lib/www/client/source/src/components/server-status.vue
Normal file
213
lib/www/client/source/src/components/server-status.vue
Normal file
@@ -0,0 +1,213 @@
|
||||
<template>
|
||||
<v-card max-width="800" max-height="600" class="mx-auto" style="overflow-y: auto;">
|
||||
<v-card-title class="headline">
|
||||
Server status – {{ status.hostname }}
|
||||
</v-card-title>
|
||||
<v-card-text>
|
||||
<v-expansion-panels accordion>
|
||||
<!-- System Info -->
|
||||
<v-expansion-panel>
|
||||
<v-expansion-panel-header>System Info</v-expansion-panel-header>
|
||||
<v-expansion-panel-content>
|
||||
<v-row>
|
||||
<v-col cols="6">
|
||||
<strong>Uptime:</strong> {{ formatUptime(status.uptime) }}
|
||||
</v-col>
|
||||
<v-col cols="6">
|
||||
<strong>Load:</strong> {{ status.loadavg[0].toFixed(2) }} / {{ status.loadavg[1].toFixed(2) }} / {{ status.loadavg[2].toFixed(2) }}
|
||||
<v-progress-linear
|
||||
:value="loadAvgPercent"
|
||||
:color="getLoadAvgColor(status.loadavg[0])"
|
||||
height="6"
|
||||
rounded
|
||||
></v-progress-linear>
|
||||
<div class="text-caption">
|
||||
1-min Load: {{ status.loadavg[0].toFixed(2) }} ({{ loadAvgPercent.toFixed(1) }}% of max)
|
||||
</div>
|
||||
</v-col>
|
||||
</v-row>
|
||||
</v-expansion-panel-content>
|
||||
</v-expansion-panel>
|
||||
|
||||
<!-- Memory -->
|
||||
<v-expansion-panel>
|
||||
<v-expansion-panel-header>Memory</v-expansion-panel-header>
|
||||
<v-expansion-panel-content>
|
||||
<v-progress-linear
|
||||
:value="memoryUsedPercent"
|
||||
:color="getProgressColor(memoryUsedPercent)"
|
||||
height="10"
|
||||
rounded
|
||||
></v-progress-linear>
|
||||
<div class="text-caption mt-2">
|
||||
Used: {{ formatBytes(status.memory.total - status.memory.free) }} / Total: {{ formatBytes(status.memory.total) }} ({{ memoryUsedPercent.toFixed(1) }}%)
|
||||
</div>
|
||||
</v-expansion-panel-content>
|
||||
</v-expansion-panel>
|
||||
|
||||
<!-- CPUs -->
|
||||
<v-expansion-panel>
|
||||
<v-expansion-panel-header>CPUs ({{ status.cpus.length }} cores)</v-expansion-panel-header>
|
||||
<v-expansion-panel-content>
|
||||
<v-row dense>
|
||||
<v-col v-for="(cpu, index) in status.cpus" :key="index" cols="12" sm="6">
|
||||
<v-card outlined class="pa-2">
|
||||
<div class="text-caption">Core {{ index + 1 }}: {{ cpu.model }} @ {{ cpu.speed }} MHz</div>
|
||||
<v-progress-linear
|
||||
:value="cpuUsagePercent(cpu)"
|
||||
:color="getProgressColor(cpuUsagePercent(cpu))"
|
||||
height="8"
|
||||
rounded
|
||||
></v-progress-linear>
|
||||
<div class="text-caption">
|
||||
Usage: {{ cpuUsagePercent(cpu).toFixed(1) }}% (Idle: {{ cpuIdlePercent(cpu).toFixed(1) }}%)
|
||||
</div>
|
||||
</v-card>
|
||||
</v-col>
|
||||
</v-row>
|
||||
</v-expansion-panel-content>
|
||||
</v-expansion-panel>
|
||||
|
||||
<!-- Network Interfaces -->
|
||||
<v-expansion-panel>
|
||||
<v-expansion-panel-header>Network Interfaces</v-expansion-panel-header>
|
||||
<v-expansion-panel-content>
|
||||
<v-list dense>
|
||||
<v-list-item v-for="(iface, name) in status.networkInterfaces" :key="name">
|
||||
<v-list-item-content>
|
||||
<v-list-item-title>{{ name }}</v-list-item-title>
|
||||
<v-list-item-subtitle v-for="(addr, idx) in iface" :key="idx">
|
||||
{{ addr.family }}: {{ addr.address }} (Netmask: {{ addr.netmask }})
|
||||
</v-list-item-subtitle>
|
||||
</v-list-item-content>
|
||||
</v-list-item>
|
||||
</v-list>
|
||||
</v-expansion-panel-content>
|
||||
</v-expansion-panel>
|
||||
|
||||
<!-- Storage -->
|
||||
<v-expansion-panel>
|
||||
<v-expansion-panel-header>Storage</v-expansion-panel-header>
|
||||
<v-expansion-panel-content>
|
||||
<!-- Root -->
|
||||
<div class="mb-4">
|
||||
<strong>Root (/):</strong>
|
||||
<v-progress-linear
|
||||
:value="status.storage.root.usedPercent"
|
||||
:color="getProgressColor(status.storage.root.usedPercent)"
|
||||
height="10"
|
||||
rounded
|
||||
></v-progress-linear>
|
||||
<div class="text-caption">
|
||||
Used: {{ formatBytes(status.storage.root.used) }} / Total: {{ formatBytes(status.storage.root.total) }} ({{ status.storage.root.usedPercent.toFixed(1) }}%)
|
||||
</div>
|
||||
</div>
|
||||
<!-- Data subfolders -->
|
||||
<div>
|
||||
<strong>Data:</strong>
|
||||
<v-expansion-panels flat>
|
||||
<v-expansion-panel v-for="(folder, name) in status.storage.data" :key="name">
|
||||
<v-expansion-panel-header disable-icon-rotate>{{ name }}</v-expansion-panel-header>
|
||||
<v-expansion-panel-content>
|
||||
<v-progress-linear
|
||||
:value="folder.usedPercent"
|
||||
:color="getProgressColor(folder.usedPercent)"
|
||||
height="10"
|
||||
rounded
|
||||
></v-progress-linear>
|
||||
<div class="text-caption">
|
||||
Used: {{ formatBytes(folder.used) }} / Total: {{ formatBytes(folder.total) }} ({{ folder.usedPercent.toFixed(1) }}%)
|
||||
</div>
|
||||
</v-expansion-panel-content>
|
||||
</v-expansion-panel>
|
||||
</v-expansion-panels>
|
||||
</div>
|
||||
</v-expansion-panel-content>
|
||||
</v-expansion-panel>
|
||||
|
||||
<!-- Database -->
|
||||
<v-expansion-panel>
|
||||
<v-expansion-panel-header>Database</v-expansion-panel-header>
|
||||
<v-expansion-panel-content>
|
||||
<div class="mb-2">
|
||||
<strong>Total Size:</strong> {{ formatBytes(status.database.size) }}
|
||||
</div>
|
||||
<v-list dense>
|
||||
<v-list-item v-for="(project, name) in status.database.projects" :key="name">
|
||||
<v-list-item-content>
|
||||
<v-list-item-title>{{ name }}</v-list-item-title>
|
||||
<v-progress-linear
|
||||
:value="project.percent"
|
||||
:color="getProgressColor(project.percent)"
|
||||
height="8"
|
||||
rounded
|
||||
></v-progress-linear>
|
||||
<v-list-item-subtitle>
|
||||
Size: {{ formatBytes(project.size) }} ({{ project.percent.toFixed(2) }}%)
|
||||
</v-list-item-subtitle>
|
||||
</v-list-item-content>
|
||||
</v-list-item>
|
||||
</v-list>
|
||||
</v-expansion-panel-content>
|
||||
</v-expansion-panel>
|
||||
</v-expansion-panels>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: "DougalServerStatus",
|
||||
props: {
|
||||
status: {
|
||||
type: Object,
|
||||
required: true
|
||||
}
|
||||
},
|
||||
computed: {
|
||||
memoryUsedPercent() {
|
||||
return ((this.status.memory.total - this.status.memory.free) / this.status.memory.total) * 100;
|
||||
},
|
||||
loadAvgPercent() {
|
||||
const maxLoad = this.status.cpus.length * 4; // Assume 4x cores as max for scaling
|
||||
return Math.min((this.status.loadavg[0] / maxLoad) * 100, 100); // Cap at 100%
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
getProgressColor(value) {
|
||||
if (value >= 80) return 'error'; // Red for 80–100%
|
||||
if (value >= 60) return 'warning'; // Yellow for 60–80%
|
||||
return 'success'; // Green for 0–60%
|
||||
},
|
||||
getLoadAvgColor(load) {
|
||||
const coreCount = this.status.cpus.length;
|
||||
if (load >= coreCount * 2) return 'error'; // Red for load ≥ 2x cores
|
||||
if (load >= coreCount) return 'warning'; // Yellow for load ≥ 1x cores but < 2x
|
||||
return 'success'; // Green for load < 1x cores
|
||||
},
|
||||
formatBytes(bytes) {
|
||||
if (bytes === 0) return '0 Bytes';
|
||||
const k = 1024;
|
||||
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
},
|
||||
formatUptime(seconds) {
|
||||
const days = Math.floor(seconds / 86400);
|
||||
seconds %= 86400;
|
||||
const hours = Math.floor(seconds / 3600);
|
||||
seconds %= 3600;
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
return `${days}d ${hours}h ${minutes}m`;
|
||||
},
|
||||
cpuUsagePercent(cpu) {
|
||||
const total = cpu.times.user + cpu.times.nice + cpu.times.sys + cpu.times.idle + cpu.times.irq;
|
||||
return ((total - cpu.times.idle) / total) * 100;
|
||||
},
|
||||
cpuIdlePercent(cpu) {
|
||||
const total = cpu.times.user + cpu.times.nice + cpu.times.sys + cpu.times.idle + cpu.times.irq;
|
||||
return (cpu.times.idle / total) * 100;
|
||||
}
|
||||
}
|
||||
};
|
||||
</script>
|
||||
256
lib/www/client/source/src/components/user-settings.vue
Normal file
256
lib/www/client/source/src/components/user-settings.vue
Normal file
@@ -0,0 +1,256 @@
|
||||
<template>
|
||||
<v-card>
|
||||
<v-card-title>
|
||||
User {{ name }} <v-chip class="mx-3" small>{{id}}</v-chip>
|
||||
<v-chip v-if="self.id == value.id"
|
||||
small
|
||||
color="primary"
|
||||
>It's me!</v-chip>
|
||||
</v-card-title>
|
||||
<v-card-subtitle>User settings</v-card-subtitle>
|
||||
<v-card-text>
|
||||
<v-form>
|
||||
<!--
|
||||
<v-text-field
|
||||
label="User ID"
|
||||
hint="Unique user ID (read-only)"
|
||||
persistent-hint
|
||||
readonly
|
||||
disabled
|
||||
v-model="id"
|
||||
>
|
||||
</v-text-field>
|
||||
-->
|
||||
|
||||
<v-switch
|
||||
dense
|
||||
label="Active"
|
||||
:title="(self.id == value.id) ? 'You cannot make yourself inactive' : active ? 'Make this user inactive' : 'Make this user active'"
|
||||
:disabled="self.id == value.id"
|
||||
v-model="active"
|
||||
></v-switch>
|
||||
|
||||
<label class="mr-3 pt-5">Colour
|
||||
<v-menu v-model="colourMenu"
|
||||
:close-on-content-click="false"
|
||||
offset-y
|
||||
>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-btn
|
||||
:title="colour"
|
||||
dense
|
||||
small
|
||||
icon
|
||||
v-on="on"
|
||||
><v-icon :color="colour">mdi-palette</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
<v-color-picker
|
||||
dot-size="25"
|
||||
mode="hexa"
|
||||
swatches-max-height="200"
|
||||
v-model="colour"
|
||||
></v-color-picker>
|
||||
</v-menu>
|
||||
</label>
|
||||
|
||||
<v-text-field
|
||||
v-if="showIp || ip"
|
||||
label="IP address"
|
||||
hint="IP address or subnet specification for auto-login"
|
||||
v-model="ip"
|
||||
>
|
||||
</v-text-field>
|
||||
|
||||
<v-text-field
|
||||
v-if="showHost || host"
|
||||
label="Host name"
|
||||
hint="Hostname (for auto-login)"
|
||||
v-model="host"
|
||||
>
|
||||
</v-text-field>
|
||||
|
||||
<v-text-field
|
||||
label="Name"
|
||||
hint="User name"
|
||||
v-model="name"
|
||||
>
|
||||
</v-text-field>
|
||||
|
||||
<v-text-field
|
||||
v-if="showPasswordField"
|
||||
:type="visiblePassword ? 'text' : 'password'"
|
||||
:append-icon="visiblePassword ? 'mdi-eye' : 'mdi-eye-off'"
|
||||
@click:append="visiblePassword = !visiblePassword"
|
||||
label="Password"
|
||||
hint="User password"
|
||||
v-model="password"
|
||||
>
|
||||
</v-text-field>
|
||||
|
||||
<v-text-field
|
||||
label="Email"
|
||||
hint="Email address"
|
||||
v-model="email"
|
||||
>
|
||||
</v-text-field>
|
||||
|
||||
<v-textarea
|
||||
class="mb-5"
|
||||
label="Remarks"
|
||||
hint="User description (visible to the user)"
|
||||
auto-grow
|
||||
v-model="description"
|
||||
></v-textarea>
|
||||
|
||||
<dougal-organisations
|
||||
:self="self"
|
||||
:organisations.sync="organisations"
|
||||
></dougal-organisations>
|
||||
|
||||
</v-form>
|
||||
</v-card-text>
|
||||
<v-card-actions>
|
||||
<slot name="actions" v-bind="{ isValid, hasErrors, errors, dirty }"></slot>
|
||||
</v-card-actions>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import { User } from '@/lib/user';
|
||||
|
||||
import DougalOrganisations from './organisations'
|
||||
|
||||
export default {
|
||||
name: "DougalUserSettings",
|
||||
|
||||
components: {
|
||||
DougalOrganisations
|
||||
},
|
||||
|
||||
props: {
|
||||
value: Object,
|
||||
self: Object, // User calling the dialogue
|
||||
|
||||
// The next three props determine whether the
|
||||
// ip, host, and password fields are shown even
|
||||
// when null / empty. If non-null, those fields
|
||||
// are always shown
|
||||
showIp: { type: Boolean, default: false },
|
||||
showHost: { type: Boolean, default: false },
|
||||
showPassword: { type: Boolean, default: false },
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
colourMenu: null,
|
||||
visiblePassword: false
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
id () { return this.value.id },
|
||||
|
||||
ip: {
|
||||
get () { return this.value.ip },
|
||||
set (v) { this.input("ip", v) }
|
||||
},
|
||||
|
||||
host: {
|
||||
get () { return this.value.host },
|
||||
set (v) { this.input("host", v) }
|
||||
},
|
||||
|
||||
name: {
|
||||
get () { return this.value.name },
|
||||
set (v) { this.input("name", v) }
|
||||
},
|
||||
|
||||
password: {
|
||||
get () { return this.value.password },
|
||||
set (v) { this.input("password", v) }
|
||||
},
|
||||
|
||||
active: {
|
||||
get () { return this.value.active },
|
||||
set (v) { this.input("active", v) }
|
||||
},
|
||||
|
||||
email: {
|
||||
get () { return this.value.email },
|
||||
set (v) { this.input("email", v) }
|
||||
},
|
||||
|
||||
colour: {
|
||||
get () { return this.value.colour },
|
||||
set (v) { this.input("colour", v) }
|
||||
},
|
||||
|
||||
description: {
|
||||
get () { return this.value.description },
|
||||
set (v) { this.input("description", v) }
|
||||
},
|
||||
|
||||
organisations: {
|
||||
get () { return this.value.organisations },
|
||||
set (v) { this.input("organisations", v) }
|
||||
},
|
||||
|
||||
errors () {
|
||||
return this.value.errors;
|
||||
},
|
||||
|
||||
hasErrors () {
|
||||
return !this.isValid;
|
||||
},
|
||||
|
||||
isValid () {
|
||||
return this.value.isValid;
|
||||
},
|
||||
|
||||
dirty () {
|
||||
return this.value?.dirty ?? false;
|
||||
},
|
||||
|
||||
showPasswordField () {
|
||||
return this.password || (this.showPassword &&
|
||||
!(this.showIp || this.ip || this.showHost || this.host));
|
||||
},
|
||||
|
||||
...mapGetters(['user', 'loading', 'serverEvent'])
|
||||
},
|
||||
|
||||
watch: {
|
||||
validationErrors () {
|
||||
this.$emit("update:errors", this.validationErrors);
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
input (k, v) {
|
||||
const user = new User(this.value);
|
||||
user[k] = v;
|
||||
this.$emit("input", user);
|
||||
},
|
||||
|
||||
reset () {
|
||||
},
|
||||
|
||||
save () {
|
||||
},
|
||||
|
||||
back () {
|
||||
this.$emit('close');
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.reset();
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
1
lib/www/client/source/src/lib/binary
Symbolic link
1
lib/www/client/source/src/lib/binary
Symbolic link
@@ -0,0 +1 @@
|
||||
../../../../server/lib/binary
|
||||
150
lib/www/client/source/src/lib/deck.gl/DougalBinaryLoader.js
Normal file
150
lib/www/client/source/src/lib/deck.gl/DougalBinaryLoader.js
Normal file
@@ -0,0 +1,150 @@
|
||||
// src/lib/deck.gl/DougalBinaryLoader.js
|
||||
import { LoaderObject } from '@loaders.gl/core';
|
||||
import { DougalBinaryBundle } from '@dougal/binary';
|
||||
|
||||
async function cachedFetch(url, init, opts = {}) {
|
||||
let res; // The response
|
||||
let cache; // Potentially, a Cache API cache name
|
||||
let isCached;
|
||||
|
||||
if (opts?.cache === true) {
|
||||
opts.cache = { name: "dougal" };
|
||||
} else if (typeof opts?.cache === "string") {
|
||||
opts.cache = { name: opts.cache };
|
||||
} else if (opts?.cache) {
|
||||
if (!(opts.cache instanceof Object)) {
|
||||
opts.cache = { name: "dougal" }
|
||||
} else if (!(opts.cache.name)) {
|
||||
opts.cache.name = "dougal";
|
||||
}
|
||||
}
|
||||
|
||||
if (opts?.cache && window.cache) {
|
||||
cache = await caches.open(opts.cache.name);
|
||||
res = await cache.match(url);
|
||||
isCached = !!res;
|
||||
}
|
||||
|
||||
if (!res) {
|
||||
res = await fetch(url, init);
|
||||
}
|
||||
|
||||
if (cache && !isCached) {
|
||||
cache.put(url, res.clone());
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
const DougalBinaryLoader = {
|
||||
name: 'DougalBinaryBundle Loader',
|
||||
extensions: ['dbb'],
|
||||
mimeTypes: ['application/vnd.aaltronav.dougal+octet-stream'],
|
||||
parse: async (input, options) => {
|
||||
let arrayBuffer;
|
||||
if (typeof input === 'string') {
|
||||
// Input is URL, fetch with caching
|
||||
const response = await cachedFetch(input, options?.fetch, options);
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch: ${response.statusText}`);
|
||||
}
|
||||
arrayBuffer = await response.arrayBuffer();
|
||||
} else if (input instanceof ArrayBuffer) {
|
||||
arrayBuffer = input;
|
||||
} else {
|
||||
throw new Error('Invalid input: Expected URL string or ArrayBuffer');
|
||||
}
|
||||
|
||||
const bundle = DougalBinaryBundle.clone(arrayBuffer);
|
||||
|
||||
// Calculate total points
|
||||
const totalCount = bundle.chunks().reduce((acc, chunk) => acc + chunk.jCount, 0);
|
||||
|
||||
// Prepare positions (Float32Array: [lon1, lat1, lon2, lat2, ...])
|
||||
const positions = new Float32Array(totalCount * 2);
|
||||
|
||||
// Extract udv (assume constant across chunks)
|
||||
const udv = bundle.chunks()[0].udv;
|
||||
|
||||
// Prepare values as an array of TypedArrays
|
||||
const ΔelemCount = bundle.chunks()[0].ΔelemCount;
|
||||
const elemCount = bundle.chunks()[0].elemCount;
|
||||
const values = new Array(ΔelemCount + elemCount + 2);
|
||||
|
||||
// Initialize values arrays with correct types
|
||||
if (udv == 0) {
|
||||
for (let k = 0; k < values.length; k++) {
|
||||
values[k] = new (k === 0 ? Uint16Array : k === 1 ? Uint32Array : Uint8Array)(totalCount);
|
||||
}
|
||||
} else if (udv == 1) {
|
||||
for (let k = 0; k < values.length; k++) {
|
||||
values[k] = new (k === 0 ? Uint16Array : k === 1 ? Uint32Array : k === 2 ? Uint8Array : Uint16Array)(totalCount);
|
||||
}
|
||||
} else if (udv == 2) {
|
||||
for (let k = 0; k < values.length; k++) {
|
||||
values[k] = new (k === 0 ? Uint16Array : k === 1 ? Uint32Array : k === 2 ? BigUint64Array : Float32Array)(totalCount);
|
||||
}
|
||||
} else if (udv == 4) {
|
||||
for (let k = 0; k < values.length; k++) {
|
||||
values[k] = new (k === 0 ? Uint16Array : k === 1 ? Uint32Array : k === 2 ? Uint16Array : Float32Array)(totalCount);
|
||||
}
|
||||
} else {
|
||||
throw new Error(`Invalid udv: Expected 0, 1, 2, or 4; found ${udv}`);
|
||||
}
|
||||
|
||||
let offset = 0;
|
||||
for (const chunk of bundle.chunks()) {
|
||||
const λarray = chunk.elem(0);
|
||||
const φarray = chunk.elem(1);
|
||||
for (let i = 0; i < λarray.length; i++) {
|
||||
positions[offset * 2 + i * 2] = λarray[i];
|
||||
positions[offset * 2 + i * 2 + 1] = φarray[i];
|
||||
}
|
||||
|
||||
values[0].set(new Uint16Array(chunk.jCount).fill(chunk.i), offset);
|
||||
values[1].set(Uint32Array.from({ length: chunk.jCount }, (_, i) => chunk.j0 + i * chunk.Δj), offset);
|
||||
|
||||
for (let j = 0; j < ΔelemCount; j++) {
|
||||
values[2 + j].set(chunk.Δelem(j), offset);
|
||||
}
|
||||
for (let j = 2; j < elemCount; j++) {
|
||||
values[2 + ΔelemCount + j - 2].set(chunk.elem(j), offset);
|
||||
}
|
||||
|
||||
offset += chunk.jCount;
|
||||
}
|
||||
|
||||
console.log(`Parsed ${totalCount} points, ${values.length} value arrays, udv = ${udv}`);
|
||||
|
||||
const attributes = {
|
||||
getPosition: {
|
||||
value: positions,
|
||||
type: 'float32',
|
||||
size: 2
|
||||
},
|
||||
udv
|
||||
};
|
||||
|
||||
values.forEach((valArray, k) => {
|
||||
let value = valArray;
|
||||
if (valArray instanceof BigUint64Array) {
|
||||
value = Float64Array.from(valArray, v => Number(v));
|
||||
}
|
||||
attributes[`value${k}`] = {
|
||||
value,
|
||||
type: value instanceof Float64Array ? 'float64' :
|
||||
value instanceof Uint16Array ? 'uint16' :
|
||||
value instanceof Uint32Array ? 'uint32' : 'float32',
|
||||
size: 1
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
length: totalCount,
|
||||
attributes
|
||||
};
|
||||
},
|
||||
options: {} // Optional: Add custom options if needed
|
||||
};
|
||||
|
||||
export default DougalBinaryLoader;
|
||||
144
lib/www/client/source/src/lib/deck.gl/DougalEventsLayer.js
Normal file
144
lib/www/client/source/src/lib/deck.gl/DougalEventsLayer.js
Normal file
@@ -0,0 +1,144 @@
|
||||
// Ref.: https://deck.gl/docs/developer-guide/custom-layers/composite-layers
|
||||
import { CompositeLayer } from '@deck.gl/core';
|
||||
import { GeoJsonLayer, ColumnLayer } from '@deck.gl/layers';
|
||||
|
||||
class DougalEventsLayer extends CompositeLayer {
|
||||
static layerName = "DougalEventsLayer";
|
||||
|
||||
static defaultProps = {
|
||||
columnsZoom: 11, // Threshold zoom level for switching layers
|
||||
jitter: 0, // Add a small amount of jitter so that columns do not overlap.
|
||||
// GeoJsonLayer props
|
||||
getLineColor: [127, 65, 90],
|
||||
getFillColor: [127, 65, 90],
|
||||
getPointRadius: 2,
|
||||
radiusUnits: "pixels",
|
||||
pointRadiusMinPixels: 2,
|
||||
lineWidthMinPixels: 2,
|
||||
// ColumnLayer props
|
||||
getPosition: { type: 'accessor', value: d => d.geometry.coordinates },
|
||||
getElevation: { type: 'accessor', value: d => Math.min(Math.max(d.properties.remarks?.length || 10, 10), 200) },
|
||||
diskResolution: 20,
|
||||
radius: 5,
|
||||
radiusUnits: "pixels",
|
||||
radiusScale: 1,
|
||||
elevationScale: 1,
|
||||
filled: true,
|
||||
stroked: false,
|
||||
extruded: true,
|
||||
wireframe: false,
|
||||
material: true,
|
||||
getFillColor: [255, 0, 0, 200],
|
||||
getLineColor: [255, 0, 0, 200],
|
||||
getLineWidth: 2,
|
||||
pickable: true
|
||||
}
|
||||
|
||||
constructor(props) {
|
||||
super(props);
|
||||
this.uid = "el-" + Math.random().toString().slice(2);
|
||||
// Initialize state with current zoom
|
||||
this.state = {
|
||||
zoom: this.context?.viewport?.zoom || 0
|
||||
};
|
||||
}
|
||||
|
||||
shouldUpdateState({ changeFlags }) {
|
||||
// Always update if viewport changed (including zoom)
|
||||
if (changeFlags.viewportChanged) {
|
||||
return true;
|
||||
}
|
||||
return super.shouldUpdateState({ changeFlags });
|
||||
}
|
||||
|
||||
updateState({ props, oldProps, context, changeFlags }) {
|
||||
// Check if zoom has changed
|
||||
const newZoom = context.viewport?.zoom || 0;
|
||||
if (newZoom !== this.state.zoom) {
|
||||
this.setState({ zoom: newZoom });
|
||||
this.setNeedsRedraw(); // Trigger re-render of sublayers
|
||||
console.log(`Zoom changed to ${newZoom}, triggering redraw`);
|
||||
}
|
||||
}
|
||||
|
||||
getPickingInfo({ info, mode, sourceLayer }) {
|
||||
if (info.index >= 0) {
|
||||
info.object = {
|
||||
...info.object // Merge default picking info (GeoJSON feature or ColumnLayer object)
|
||||
};
|
||||
if (sourceLayer) {
|
||||
info.object.type = sourceLayer.constructor.layerName;
|
||||
}
|
||||
//console.log(`Picked ${info.object.type}, index ${info.index}`);
|
||||
}
|
||||
return info;
|
||||
}
|
||||
|
||||
renderLayers() {
|
||||
const { zoom } = this.state;
|
||||
const sublayers = [];
|
||||
|
||||
if (zoom >= this.props.columnsZoom) {
|
||||
// Render ColumnLayer at high zoom
|
||||
const data = Array.isArray(this.props.data) ? this.props.data : this.props.data.features || [];
|
||||
|
||||
const positionFn = this.props.jitter
|
||||
? (d, info) => {
|
||||
let pos;
|
||||
if (typeof this.props.getPosition == 'function') {
|
||||
pos = this.props.getPosition(d, info);
|
||||
} else {
|
||||
pos = this.props.getPosition;
|
||||
}
|
||||
return pos.map( i => i + (Math.random() - 0.5) * this.props.jitter )
|
||||
}
|
||||
: this.props.getPosition;
|
||||
|
||||
sublayers.push(
|
||||
new ColumnLayer(this.getSubLayerProps({
|
||||
id: `${this.uid}-column`,
|
||||
data,
|
||||
visible: this.props.visible,
|
||||
getPosition: positionFn,
|
||||
getElevation: this.props.getElevation,
|
||||
diskResolution: this.props.diskResolution,
|
||||
radius: this.props.radius,
|
||||
radiusUnits: this.props.radiusUnits,
|
||||
radiusScale: this.props.radiusScale,
|
||||
elevationScale: this.props.elevationScale,
|
||||
filled: this.props.filled,
|
||||
stroked: this.props.stroked,
|
||||
extruded: this.props.extruded,
|
||||
wireframe: this.props.wireframe,
|
||||
material: this.props.material,
|
||||
getFillColor: this.props.getFillColor,
|
||||
getLineColor: this.props.getLineColor,
|
||||
getLineWidth: this.props.getLineWidth,
|
||||
pickable: this.props.pickable
|
||||
}))
|
||||
);
|
||||
} else {
|
||||
// Render GeoJsonLayer at low zoom
|
||||
sublayers.push(
|
||||
new GeoJsonLayer(this.getSubLayerProps({
|
||||
id: `${this.uid}-geojson`,
|
||||
data: this.props.data,
|
||||
visible: this.props.visible,
|
||||
getLineColor: this.props.getLineColor,
|
||||
getFillColor: this.props.getFillColor,
|
||||
getPointRadius: this.props.getPointRadius,
|
||||
radiusUnits: this.props.radiusUnits,
|
||||
pointRadiusMinPixels: this.props.pointRadiusMinPixels,
|
||||
lineWidthMinPixels: this.props.lineWidthMinPixels,
|
||||
pickable: this.props.pickable
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
console.log(`Rendering ${sublayers.length} sublayer(s) at zoom ${zoom}`);
|
||||
|
||||
return sublayers;
|
||||
}
|
||||
}
|
||||
|
||||
export default DougalEventsLayer;
|
||||
108
lib/www/client/source/src/lib/deck.gl/DougalSequenceLayer.js
Normal file
108
lib/www/client/source/src/lib/deck.gl/DougalSequenceLayer.js
Normal file
@@ -0,0 +1,108 @@
|
||||
// Ref.: https://deck.gl/docs/developer-guide/custom-layers/layer-lifecycle
|
||||
import { ScatterplotLayer } from '@deck.gl/layers';
|
||||
|
||||
class DougalSequenceLayer extends ScatterplotLayer {
|
||||
static layerName = "DougalSequenceLayer";
|
||||
|
||||
static defaultProps = {
|
||||
...ScatterplotLayer.defaultProps,
|
||||
valueIndex: 0,
|
||||
radiusUnits: "pixels",
|
||||
radiusScale: 1,
|
||||
lineWidthUnits: "pixels",
|
||||
lineWidthScale: 1,
|
||||
stroked: false,
|
||||
filled: true,
|
||||
radiusMinPixels: 1,
|
||||
radiusMaxPixels: 50,
|
||||
lineWidthMinPixels: 1,
|
||||
lineWidthMaxPixels: 50,
|
||||
getPosition: { type: 'accessor', value: d => d.positions },
|
||||
getRadius: 5,
|
||||
getFillColor: [255, 0, 0, 200],
|
||||
getLineColor: [255, 0, 0, 200],
|
||||
getLineWidth: 2,
|
||||
pickable: true
|
||||
}
|
||||
|
||||
constructor(props) {
|
||||
super(props);
|
||||
}
|
||||
|
||||
initializeState(context) {
|
||||
super.initializeState(context);
|
||||
}
|
||||
|
||||
getPickingInfo({ info, mode }) {
|
||||
const index = info.index;
|
||||
if (index >= 0) {
|
||||
const d = this.props.data.attributes;
|
||||
if (d) {
|
||||
if (d.udv == 0) {
|
||||
info.object = {
|
||||
udv: d.udv,
|
||||
i: d.value0.value[index],
|
||||
j: d.value1.value[index],
|
||||
ntba: d.value2.value[index] & 0x01,
|
||||
sailline_ntba: d.value2.value[index] & 0x02
|
||||
};
|
||||
} else if (d.udv == 1) {
|
||||
info.object = {
|
||||
udv: d.udv,
|
||||
i: d.value0.value[index],
|
||||
j: d.value1.value[index],
|
||||
sailline: d.value3.value[index],
|
||||
ntba: d.value2.value[index] & 0x01 ? true : false,
|
||||
sailline_ntba: d.value2.value[index] & 0x02 ? true : false
|
||||
};
|
||||
} else if (d.udv == 2) {
|
||||
info.object = {
|
||||
udv: d.udv,
|
||||
i: d.value0.value[index],
|
||||
j: d.value1.value[index],
|
||||
ts: Number(d.value2.value[index]),
|
||||
εi: d.value3.value[index] / 100,
|
||||
εj: d.value4.value[index] / 100,
|
||||
delta_μ: d.value5.value[index] / 10,
|
||||
delta_σ: d.value6.value[index] / 10,
|
||||
delta_R: d.value7.value[index] / 10,
|
||||
press_μ: d.value8.value[index],
|
||||
press_σ: d.value9.value[index],
|
||||
press_R: d.value10.value[index],
|
||||
depth_μ: d.value11.value[index] / 10,
|
||||
depth_σ: d.value12.value[index] / 10,
|
||||
depth_R: d.value13.value[index] / 10,
|
||||
fill_μ: d.value14.value[index],
|
||||
fill_σ: d.value15.value[index],
|
||||
fill_R: d.value16.value[index],
|
||||
delay_μ: d.value17.value[index] / 10,
|
||||
delay_σ: d.value18.value[index] / 10,
|
||||
delay_R: d.value19.value[index] / 10,
|
||||
nofire: d.value20.value[index] >> 4,
|
||||
autofire: d.value20.value[index] & 0xf
|
||||
};
|
||||
} else if (d.udv == 3) {
|
||||
info.object = {
|
||||
udv: d.udv,
|
||||
i: d.value0.value[index],
|
||||
j: d.value1.value[index],
|
||||
ts: Number(d.value2.value[index]),
|
||||
εi: d.value3.value[index] / 100,
|
||||
εj: d.value4.value[index] / 100,
|
||||
co_i: d.value5.value[index] / 100,
|
||||
co_j: d.value6.value[index] / 100,
|
||||
}
|
||||
} else {
|
||||
console.warn(`Unknown udv value ${d.udv}. No picking info`);
|
||||
info.object = {};
|
||||
}
|
||||
console.log(`Picked sequence ${info.object.i}, point ${info.object.j}, udv ${info.object.udv}`);
|
||||
} else {
|
||||
console.log(`No data found index = ${index}`);
|
||||
}
|
||||
}
|
||||
return info;
|
||||
}
|
||||
}
|
||||
|
||||
export default DougalSequenceLayer;
|
||||
8
lib/www/client/source/src/lib/deck.gl/index.js
Normal file
8
lib/www/client/source/src/lib/deck.gl/index.js
Normal file
@@ -0,0 +1,8 @@
|
||||
|
||||
import DougalSequenceLayer from './DougalSequenceLayer'
|
||||
import DougalEventsLayer from './DougalEventsLayer'
|
||||
|
||||
export {
|
||||
DougalSequenceLayer,
|
||||
DougalEventsLayer
|
||||
};
|
||||
47
lib/www/client/source/src/lib/durations.js
Normal file
47
lib/www/client/source/src/lib/durations.js
Normal file
@@ -0,0 +1,47 @@
|
||||
|
||||
function duration_to_ms(v) {
|
||||
if (v instanceof Object) {
|
||||
return (
|
||||
(v.days || 0) * 86400000 +
|
||||
(v.hours || 0) * 3600000 +
|
||||
(v.minutes || 0) * 60000 +
|
||||
(v.seconds || 0) * 1000 +
|
||||
(v.milliseconds || 0)
|
||||
);
|
||||
} else {
|
||||
return {
|
||||
days: 0,
|
||||
hours: 0,
|
||||
minutes: 0,
|
||||
seconds: 0,
|
||||
milliseconds: 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function ms_to_duration(v) {
|
||||
const days = Math.floor(v / 86400000);
|
||||
v %= 86400000;
|
||||
const hours = Math.floor(v / 3600000);
|
||||
v %= 3600000;
|
||||
const minutes = Math.floor(v / 60000);
|
||||
v %= 60000;
|
||||
const seconds = Math.floor(v / 1000);
|
||||
const milliseconds = v % 1000;
|
||||
return { days, hours, minutes, seconds, milliseconds };
|
||||
}
|
||||
|
||||
function normalise_duration (v) {
|
||||
return ms_to_duration(duration_to_ms(v));
|
||||
}
|
||||
|
||||
function add_durations(a, b) {
|
||||
return ms_to_duration(duration_to_ms(a) + duration_to_ms(b));
|
||||
}
|
||||
|
||||
export {
|
||||
duration_to_ms,
|
||||
ms_to_duration,
|
||||
normalise_duration,
|
||||
add_durations
|
||||
}
|
||||
97
lib/www/client/source/src/lib/user/User.js
Normal file
97
lib/www/client/source/src/lib/user/User.js
Normal file
@@ -0,0 +1,97 @@
|
||||
|
||||
import { User as BaseUser } from '@dougal/user';
|
||||
|
||||
class User extends BaseUser {
|
||||
|
||||
api // Instance of Vuex api method
|
||||
dirty // Whether the values have changed since last saved
|
||||
|
||||
constructor (data, client) {
|
||||
super (data);
|
||||
|
||||
if (client) {
|
||||
this.api = client;
|
||||
} else if (data instanceof User) {
|
||||
this.api = data.api;
|
||||
}
|
||||
|
||||
this.dirty = false;
|
||||
this.on("changed", () => this.dirty = true);
|
||||
}
|
||||
|
||||
static async fromAPI (api, id) {
|
||||
if (id) {
|
||||
const url = `/user/${id}`;
|
||||
const res = await api([url]);
|
||||
return new User(res, api);
|
||||
} else {
|
||||
const url = `/user`;
|
||||
const res = await api([url]);
|
||||
return res?.map( row => new User(row, api) );
|
||||
}
|
||||
}
|
||||
|
||||
/** Save this user to the server
|
||||
*
|
||||
* If this is a new user, the `api` parameter must be
|
||||
* supplied and this will result in a `POST` request.
|
||||
* For an existing user coming from the database,
|
||||
* `this.api` will be used for a `PUT` request.
|
||||
*/
|
||||
async save (api) {
|
||||
if (this.api) {
|
||||
const url = `/user/${this.id}`;
|
||||
const init = {
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
method: "PUT",
|
||||
body: this.toJSON()
|
||||
};
|
||||
const res = await this.api([url, init]);
|
||||
if (res) {
|
||||
this.dirty = false;
|
||||
return new User(res, this.api);
|
||||
} else {
|
||||
// Something has gone wrong
|
||||
console.log("Something has gone wrong (PUT)");
|
||||
}
|
||||
} else if (api) {
|
||||
const url = `/user`;
|
||||
const init = {
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
method: "POST",
|
||||
body: this.toJSON()
|
||||
}
|
||||
const res = await api([url, init]);
|
||||
if (res) {
|
||||
return new User(res, api);
|
||||
} else {
|
||||
// Something has gone wrong
|
||||
console.log("Something has gone wrong (POST)");
|
||||
}
|
||||
} else {
|
||||
throw new Error("Don't know how to save this user");
|
||||
}
|
||||
}
|
||||
|
||||
/** Delete this user from the server
|
||||
*/
|
||||
async remove () {
|
||||
const url = `/user/${this.id}`;
|
||||
const init = {
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
method: "PUT",
|
||||
body: this.toJSON()
|
||||
};
|
||||
const res = await this.api([url, init]);
|
||||
console.log("remove RES", res);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default User;
|
||||
5
lib/www/client/source/src/lib/user/index.js
Normal file
5
lib/www/client/source/src/lib/user/index.js
Normal file
@@ -0,0 +1,5 @@
|
||||
import User from './User'
|
||||
|
||||
export {
|
||||
User
|
||||
}
|
||||
@@ -4,7 +4,7 @@ import router from './router'
|
||||
import store from './store'
|
||||
import vuetify from './plugins/vuetify'
|
||||
import vueDebounce from 'vue-debounce'
|
||||
import { mapMutations } from 'vuex';
|
||||
import { mapMutations, mapActions } from 'vuex';
|
||||
import { markdown, markdownInline } from './lib/markdown';
|
||||
import { geometryAsString } from './lib/utils';
|
||||
import { mapGetters } from 'vuex';
|
||||
@@ -46,20 +46,33 @@ new Vue({
|
||||
|
||||
methods: {
|
||||
|
||||
async sleep (ms = 0) {
|
||||
return await new Promise( (resolve) => {
|
||||
setTimeout( resolve, ms );
|
||||
});
|
||||
},
|
||||
|
||||
markdown (value) {
|
||||
return typeof value == "string"
|
||||
? marked(value)
|
||||
: value;
|
||||
return markdown(value);
|
||||
},
|
||||
|
||||
markdownInline (value) {
|
||||
return markdownInline(value);
|
||||
},
|
||||
|
||||
showSnack(text, colour = "primary") {
|
||||
console.log("showSnack", text, colour);
|
||||
this.snackColour = colour;
|
||||
this.snackText = text;
|
||||
this.snack = true;
|
||||
this.$store.dispatch("showSnack", [text, colour]);
|
||||
},
|
||||
|
||||
sendJwt () {
|
||||
if (this.jwt) {
|
||||
this.ws.send(JSON.stringify({ jwt: this.jwt }));
|
||||
}
|
||||
},
|
||||
|
||||
initWs () {
|
||||
|
||||
if (this.ws) {
|
||||
console.log("WebSocket initWs already called");
|
||||
return;
|
||||
@@ -69,11 +82,12 @@ new Vue({
|
||||
|
||||
this.ws.addEventListener("message", (ev) => {
|
||||
const msg = JSON.parse(ev.data);
|
||||
this.setServerEvent(msg);
|
||||
this.processServerEvent(msg);
|
||||
});
|
||||
|
||||
this.ws.addEventListener("open", (ev) => {
|
||||
console.log("WebSocket connection open", ev);
|
||||
this.sendJwt()
|
||||
this.setServerConnectionState(true);
|
||||
});
|
||||
|
||||
@@ -99,14 +113,13 @@ new Vue({
|
||||
}
|
||||
|
||||
this.wsCredentialsCheckTimer = setInterval( () => {
|
||||
this.ws.send(JSON.stringify({
|
||||
jwt: this.jwt
|
||||
}));
|
||||
this.sendJwt();
|
||||
}, this.wsCredentialsCheckInterval);
|
||||
|
||||
},
|
||||
|
||||
...mapMutations(['setServerEvent', 'setServerConnectionState'])
|
||||
...mapMutations(['setServerConnectionState']),
|
||||
...mapActions(['processServerEvent'])
|
||||
|
||||
},
|
||||
|
||||
|
||||
35
lib/www/client/source/src/mixins/access.js
Normal file
35
lib/www/client/source/src/mixins/access.js
Normal file
@@ -0,0 +1,35 @@
|
||||
import { mapGetters } from 'vuex';
|
||||
import { Organisations } from '@dougal/organisations';
|
||||
|
||||
export default {
|
||||
name: "AccessMixin",
|
||||
|
||||
computed: {
|
||||
...mapGetters(['user', 'projectConfiguration'])
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
access (operation, organisations) {
|
||||
if (this.user) {
|
||||
if (!organisations) organisations = this.projectConfiguration?.organisations;
|
||||
if (!organisations instanceof Organisations) {
|
||||
organisations = new Organisations(organisations);
|
||||
}
|
||||
return this.user.canDo(operation, organisations);
|
||||
}
|
||||
},
|
||||
|
||||
readaccess (item) {
|
||||
return this.access('read', item);
|
||||
},
|
||||
|
||||
writeaccess (item) {
|
||||
return this.access('write', item);
|
||||
},
|
||||
|
||||
adminaccess (item) {
|
||||
return this.access('edit', item);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -17,8 +17,12 @@ import QC from '../views/QC.vue'
|
||||
import Graphs from '../views/Graphs.vue'
|
||||
import Map from '../views/Map.vue'
|
||||
import ProjectSettings from '../views/ProjectSettings.vue'
|
||||
import Users from '../views/Users.vue'
|
||||
import DougalAppBarExtensionProject from '../components/app-bar-extension-project'
|
||||
import DougalAppBarExtensionProjectList from '../components/app-bar-extension-project-list'
|
||||
import GroupList from '../views/GroupList.vue'
|
||||
import Group from '../views/Group.vue'
|
||||
|
||||
|
||||
Vue.use(VueRouter)
|
||||
|
||||
@@ -49,6 +53,19 @@ Vue.use(VueRouter)
|
||||
name: "equipment",
|
||||
component: () => import(/* webpackChunkName: "about" */ '../views/Equipment.vue')
|
||||
},
|
||||
{
|
||||
pathToRegexpOptions: { strict: true },
|
||||
path: "/users",
|
||||
redirect: "/users/"
|
||||
},
|
||||
{
|
||||
pathToRegexpOptions: { strict: true },
|
||||
name: "Users",
|
||||
path: "/users/",
|
||||
component: Users,
|
||||
meta: {
|
||||
}
|
||||
},
|
||||
{
|
||||
pathToRegexpOptions: { strict: true },
|
||||
path: "/login",
|
||||
@@ -103,7 +120,9 @@ Vue.use(VueRouter)
|
||||
{ text: "Projects", href: "/projects" },
|
||||
{
|
||||
text: (ctx) => ctx.$store.state.project.projectName || "…",
|
||||
href: (ctx) => `/projects/${ctx.$store.state.project.projectId || ctx.$route.params.project || ""}/`
|
||||
href: (ctx) => `/projects/${ctx.$store.state.project.projectId || ctx.$route.params.project || ""}/`,
|
||||
title: (ctx) => Object.entries(ctx.$store.getters.projectConfiguration?.organisations ?? {}).map( ([org, ops]) => `* ${org}: ${Object.entries(ops).filter( ([k, v]) => v ).map( ([k, v]) => k ).join(", ")}`).join("\n"),
|
||||
organisations: (ctx) => ctx.$store.getters.projectConfiguration?.organisations ?? {}
|
||||
}
|
||||
],
|
||||
appBarExtension: {
|
||||
@@ -139,6 +158,7 @@ Vue.use(VueRouter)
|
||||
component: SequenceList
|
||||
},
|
||||
{
|
||||
name: "shotlog",
|
||||
path: "sequences/:sequence",
|
||||
component: SequenceSummary
|
||||
},
|
||||
@@ -180,7 +200,57 @@ Vue.use(VueRouter)
|
||||
component: ProjectSettings
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
pathToRegexpOptions: { strict: true },
|
||||
path: "/groups",
|
||||
redirect: "/groups/"
|
||||
},
|
||||
{
|
||||
pathToRegexpOptions: { strict: true },
|
||||
path: "/groups/",
|
||||
component: GroupList,
|
||||
meta: {
|
||||
breadcrumbs: [
|
||||
{ text: "Comparisons", href: "/groups", disabled: true }
|
||||
],
|
||||
appBarExtension: {
|
||||
// component: DougalAppBarExtensionProjectList
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
pathToRegexpOptions: { strict: true },
|
||||
path: "/groups/:group",
|
||||
redirect: "/groups/:group/"
|
||||
},
|
||||
{
|
||||
pathToRegexpOptions: { strict: true },
|
||||
path: "/groups/:group/",
|
||||
name: "Group",
|
||||
component: Group,
|
||||
meta: {
|
||||
breadcrumbs: [
|
||||
{ text: "Comparisons", href: "/groups" },
|
||||
{ text: (ctx) => ctx.$route.params.group }
|
||||
/*
|
||||
{
|
||||
text: (ctx) => ctx.$store.state.project.projectName || "…",
|
||||
href: (ctx) => `/projects/${ctx.$store.state.project.projectId || ctx.$route.params.project || ""}/`,
|
||||
title: (ctx) => Object.entries(ctx.$store.getters.projectConfiguration?.organisations ?? {}).map( ([org, ops]) => `* ${org}: ${Object.entries(ops).filter( ([k, v]) => v ).map( ([k, v]) => k ).join(", ")}`).join("\n"),
|
||||
organisations: (ctx) => ctx.$store.getters.projectConfiguration?.organisations ?? {}
|
||||
}
|
||||
*/
|
||||
],
|
||||
/*
|
||||
appBarExtension: {
|
||||
component: DougalAppBarExtensionGroup
|
||||
}
|
||||
*/
|
||||
},
|
||||
children: [
|
||||
]
|
||||
},
|
||||
]
|
||||
|
||||
const router = new VueRouter({
|
||||
|
||||
@@ -1,5 +1,27 @@
|
||||
const ConcurrencyLimiter = require('@dougal/concurrency');
|
||||
|
||||
/** Make an API request
|
||||
*
|
||||
* @a resource {String} is the target URL
|
||||
* @a init {Object} are the Fetch options
|
||||
* @a cb {Function} is a callback function: (res, err) => {}
|
||||
* @a opts {Object} are other optional parameters:
|
||||
* opts.silent {Boolean} controls whether snack messages are shown on failure
|
||||
* opts.cache {Object} controls whether Cache API is used
|
||||
* opts.cache.name {String} is the name of the cache to use. Defaults to "dougal"
|
||||
*
|
||||
* If Cache API is used, this function looks for a matching request in the cache
|
||||
* first, and returns it if found. If not found, it makes the request over the API
|
||||
* and then stores it in the cache.
|
||||
*
|
||||
* `opts.cache` may also be `true` (defaults to using the "dougal" cache),
|
||||
* a cache name (equivalent to {name: "…"}) or even an empty object (equivalent
|
||||
* to `true`).
|
||||
*/
|
||||
async function api ({state, getters, commit, dispatch}, [resource, init = {}, cb, opts = {}]) {
|
||||
|
||||
const limiter = api.limiter || (api.limiter = new ConcurrencyLimiter(state.maxConcurrent));
|
||||
|
||||
async function api ({state, getters, commit, dispatch}, [resource, init = {}, cb]) {
|
||||
try {
|
||||
commit("queueRequest");
|
||||
if (init && init.hasOwnProperty("body")) {
|
||||
@@ -15,22 +37,89 @@ async function api ({state, getters, commit, dispatch}, [resource, init = {}, cb
|
||||
}
|
||||
// We also send Authorization: Bearer …
|
||||
if (getters.jwt) {
|
||||
init.credentials = "include";
|
||||
init.headers["Authorization"] = "Bearer "+getters.jwt;
|
||||
}
|
||||
if (typeof init.body != "string") {
|
||||
init.body = JSON.stringify(init.body);
|
||||
}
|
||||
const url = /^https?:\/\//i.test(resource) ? resource : (state.apiUrl + resource);
|
||||
const res = await fetch(url, init);
|
||||
if (typeof cb === 'function') {
|
||||
await cb(null, res);
|
||||
|
||||
let res; // The response
|
||||
let cache; // Potentially, a Cache API cache name
|
||||
let isCached;
|
||||
|
||||
if (opts?.cache === true) {
|
||||
opts.cache = { name: "dougal" };
|
||||
} else if (typeof opts?.cache === "string") {
|
||||
opts.cache = { name: opts.cache };
|
||||
} else if (opts?.cache) {
|
||||
if (!(opts.cache instanceof Object)) {
|
||||
opts.cache = { name: "dougal" }
|
||||
} else if (!(opts.cache.name)) {
|
||||
opts.cache.name = "dougal";
|
||||
}
|
||||
}
|
||||
|
||||
if (opts?.cache && window.caches) {
|
||||
cache = await caches.open(opts.cache.name);
|
||||
res = await cache.match(url);
|
||||
isCached = !!res;
|
||||
}
|
||||
|
||||
if (!res) {
|
||||
res = await limiter.enqueue(async () => await fetch(url, init));
|
||||
}
|
||||
|
||||
if (cache && !isCached && res.ok) { // Only cache successful responses
|
||||
cache.put(url, res.clone());
|
||||
}
|
||||
|
||||
if (typeof cb === 'function') {
|
||||
await cb(null, res.clone());
|
||||
}
|
||||
|
||||
if (res.headers.has("x-dougal-server")) {
|
||||
const header = res.headers.get("x-dougal-server")
|
||||
const entries = header
|
||||
.split(";")
|
||||
.map(part => part.trim())
|
||||
.filter(part => part.length > 0)
|
||||
.map(part => {
|
||||
const idx = part.indexOf('=');
|
||||
if (idx === -1) {
|
||||
return [part, true];
|
||||
}
|
||||
const key = part.slice(0, idx).trim();
|
||||
const value = part.slice(idx + 1).trim();
|
||||
return [key, value];
|
||||
});
|
||||
state.serverInfo = entries.length ? Object.fromEntries(entries) : {};
|
||||
|
||||
if (state.serverInfo["remote-frontend"]) {
|
||||
state.isGatewayReliable = ![ 502, 503, 504 ].includes(res.status);
|
||||
} else {
|
||||
state.isGatewayReliable = null;
|
||||
}
|
||||
}
|
||||
|
||||
if (res.ok) {
|
||||
|
||||
await dispatch('setCredentials');
|
||||
if (!isCached) {
|
||||
if (res.headers.has("x-jwt")) {
|
||||
await dispatch('setCredentials', { response: res });
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return init.text ? (await res.text()) : (await res.json());
|
||||
if (!res.bodyUsed) { // It may have been consumed by a callback
|
||||
const validFormats = [ "arrayBuffer", "blob", "formData", "json", "text" ];
|
||||
if (opts.format && validFormats.includes(opts.format)) {
|
||||
return await res[opts.format]();
|
||||
} else {
|
||||
return await res.json();
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof SyntaxError) {
|
||||
if (Number(res.headers.get("Content-Length")) === 0) {
|
||||
@@ -49,7 +138,9 @@ async function api ({state, getters, commit, dispatch}, [resource, init = {}, cb
|
||||
message = body.message;
|
||||
}
|
||||
}
|
||||
await dispatch('showSnack', [message, "warning"]);
|
||||
if (!opts?.silent) {
|
||||
await dispatch('showSnack', [message, "warning"]);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if (err && err.name == "AbortError") return;
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
const state = () => ({
|
||||
apiUrl: "/api",
|
||||
requestsCount: 0
|
||||
requestsCount: 0,
|
||||
maxConcurrent: 15,
|
||||
serverInfo: {}, // Contents of the last received X-Dougal-Server HTTP header
|
||||
isGatewayReliable: null, // True if we start seeing HTTP 502‒504 responses
|
||||
});
|
||||
|
||||
export default state;
|
||||
|
||||
@@ -17,6 +17,7 @@ async function refreshEvents ({commit, dispatch, state, rootState}, [modifiedAft
|
||||
? `/project/${pid}/event/changes/${(new Date(modifiedAfter)).toISOString()}?unique=t`
|
||||
: `/project/${pid}/event`;
|
||||
const init = {
|
||||
cache: "reload",
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
@@ -35,7 +36,7 @@ async function refreshEvents ({commit, dispatch, state, rootState}, [modifiedAft
|
||||
|
||||
/** Return a subset of events from state.events
|
||||
*/
|
||||
async function getEvents ({commit, dispatch, state}, [projectId, {sequence, date0, date1, sortBy, sortDesc, itemsPerPage, page, text, label}]) {
|
||||
async function getEvents ({commit, dispatch, state}, [projectId, {sequence, date0, date1, sortBy, sortDesc, itemsPerPage, page, text, label, excludeLabels}]) {
|
||||
let filteredEvents = [...state.events];
|
||||
|
||||
if (sortBy) {
|
||||
@@ -113,6 +114,10 @@ async function getEvents ({commit, dispatch, state}, [projectId, {sequence, date
|
||||
filteredEvents = filteredEvents.filter( event => event.labels?.includes(label) );
|
||||
}
|
||||
|
||||
if (excludeLabels) {
|
||||
filteredEvents = filteredEvents.filter( event => !excludeLabels?.some( label => event.labels?.includes(label) ) );
|
||||
}
|
||||
|
||||
const count = filteredEvents.length;
|
||||
|
||||
if (itemsPerPage && itemsPerPage > 0) {
|
||||
|
||||
@@ -11,6 +11,7 @@ async function refreshLabels ({commit, dispatch, state, rootState}) {
|
||||
const pid = rootState.project.projectId;
|
||||
const url = `/project/${pid}/label`;
|
||||
const init = {
|
||||
cache: "reload",
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
|
||||
@@ -11,6 +11,7 @@ async function refreshLines ({commit, dispatch, state, rootState}) {
|
||||
const pid = rootState.project.projectId;
|
||||
const url = `/project/${pid}/line`;
|
||||
const init = {
|
||||
cache: "reload",
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
|
||||
@@ -0,0 +1,83 @@
|
||||
import debounce from 'lodash/debounce';
|
||||
|
||||
function registerHandler({ commit }, { table, handler }) {
|
||||
commit('REGISTER_HANDLER', { table, handler });
|
||||
}
|
||||
|
||||
function unregisterHandler({ commit }, { table, handler }) {
|
||||
commit('UNREGISTER_HANDLER', { table, handler });
|
||||
}
|
||||
|
||||
function processServerEvent({ commit, dispatch, state, rootState }, message) {
|
||||
//console.log("processServerEvent", message);
|
||||
// Error handling for invalid messages
|
||||
if (!message) {
|
||||
console.error("processServerEvent called without arguments");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!message.channel) {
|
||||
console.error("processServerEvent message missing channel");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!message.payload) {
|
||||
console.error("processServerEvent message missing payload");
|
||||
return;
|
||||
}
|
||||
|
||||
if (message.payload.operation == "INSERT") {
|
||||
if (message.payload.new == null) {
|
||||
console.error("Expected payload.new to be non-null");
|
||||
return;
|
||||
}
|
||||
} else if (message.payload.operation == "UPDATE") {
|
||||
if (message.payload.old == null || message.payload.new == null) {
|
||||
console.error("Expected payload.old and paylaod.new to be non-null");
|
||||
return;
|
||||
}
|
||||
} else if (message.payload.operation == "DELETE") {
|
||||
if (message.payload.old == null) {
|
||||
console.error("Expected payload.old to be non-null");
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
console.warn(`Unrecognised operation: ${message.payload.operation}`);
|
||||
}
|
||||
|
||||
const table = message.channel; // or message.payload?.table;
|
||||
//console.log("table=", table);
|
||||
if (!table || !state.handlers[table] || state.handlers[table].length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Create a debounced runner per table if not exists
|
||||
if (!state.debouncedRunners) {
|
||||
state.debouncedRunners = {}; // Not reactive needed? Or use Vue.set
|
||||
}
|
||||
if (!state.debouncedRunners[table]) {
|
||||
const config = {
|
||||
wait: 300, // min silence in ms
|
||||
maxWait: 1000, // max wait before force run, adjustable
|
||||
trailing: true,
|
||||
leading: false
|
||||
};
|
||||
state.debouncedRunners[table] = debounce((lastMessage) => {
|
||||
const context = { commit, dispatch, state: rootState, rootState }; // Approximate action context
|
||||
state.handlers[table].forEach(handler => {
|
||||
try {
|
||||
//console.log("Trying handler:", handler);
|
||||
handler(context, lastMessage);
|
||||
} catch (e) {
|
||||
console.error(`Error in handler for table ${table}:`, e);
|
||||
}
|
||||
});
|
||||
}, config.wait, { maxWait: config.maxWait });
|
||||
}
|
||||
|
||||
// Call the debounced function with the current message
|
||||
// Debounce will use the last call's argument if multiple
|
||||
state.debouncedRunners[table](message);
|
||||
}
|
||||
|
||||
export default { registerHandler, unregisterHandler, processServerEvent };
|
||||
|
||||
@@ -11,4 +11,29 @@ function setServerConnectionState (state, isConnected) {
|
||||
state.serverConnected = !!isConnected;
|
||||
}
|
||||
|
||||
export default { setServerEvent, clearServerEvent, setServerConnectionState };
|
||||
function REGISTER_HANDLER(state, { table, handler }) {
|
||||
if (!state.handlers[table]) {
|
||||
state.handlers[table] = [];
|
||||
}
|
||||
if (!state.handlers[table].includes(handler)) {
|
||||
state.handlers[table].push(handler);
|
||||
}
|
||||
}
|
||||
|
||||
function UNREGISTER_HANDLER(state, { table, handler }) {
|
||||
if (state.handlers[table]) {
|
||||
const handlerIndex = state.handlers[table].findIndex(el => el === handler);
|
||||
if (handlerIndex != -1) {
|
||||
state.handlers[table].splice(handlerIndex, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export default {
|
||||
setServerEvent,
|
||||
clearServerEvent,
|
||||
setServerConnectionState,
|
||||
REGISTER_HANDLER,
|
||||
UNREGISTER_HANDLER
|
||||
};
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const state = () => ({
|
||||
serverEvent: null,
|
||||
serverConnected: false
|
||||
serverConnected: false,
|
||||
handlers: {}, // table: array of functions (each fn receives { commit, dispatch, state, rootState, message })
|
||||
});
|
||||
|
||||
export default state;
|
||||
|
||||
@@ -11,6 +11,7 @@ async function refreshPlan ({commit, dispatch, state, rootState}) {
|
||||
const pid = rootState.project.projectId;
|
||||
const url = `/project/${pid}/plan`;
|
||||
const init = {
|
||||
cache: "reload",
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
|
||||
@@ -1,18 +1,47 @@
|
||||
|
||||
|
||||
function transform (item) {
|
||||
item.ts0 = new Date(item.ts0);
|
||||
item.ts1 = new Date(item.ts1);
|
||||
return item;
|
||||
const newItem = {...item}
|
||||
newItem.ts0 = new Date(newItem.ts0);
|
||||
newItem.ts1 = new Date(newItem.ts1);
|
||||
return newItem;
|
||||
}
|
||||
|
||||
// ATTENTION: This relies on the new planner endpoint
|
||||
// as per issue #281.
|
||||
|
||||
function setRemarks (state, remarks) {
|
||||
state.remarks = remarks;
|
||||
}
|
||||
|
||||
function setSequence (state, sequence) {
|
||||
state.sequences.push(Object.freeze(transform(sequence)));
|
||||
}
|
||||
|
||||
function deleteSequence (state, sequence) {
|
||||
const seq = transform(sequence)
|
||||
const idx = state.sequences?.findIndex( s => Object.keys(seq).every( k => JSON.stringify(s[k]) == JSON.stringify(seq[k]) ));
|
||||
if (idx != -1) {
|
||||
state.sequences.splice(idx, 1)
|
||||
}
|
||||
}
|
||||
|
||||
function replaceSequence (state, [oldSequence, newSequence]) {
|
||||
console.log("replaceSequence", oldSequence, newSequence);
|
||||
const seq = transform(oldSequence)
|
||||
const idx = state.sequences?.findIndex( s => Object.keys(seq).every( k => JSON.stringify(s[k]) == JSON.stringify(seq[k]) ));
|
||||
console.log("idx", idx);
|
||||
if (idx != -1) {
|
||||
state.sequences.splice(idx, 1, transform(newSequence))
|
||||
console.log("spliced in");
|
||||
}
|
||||
}
|
||||
|
||||
function setPlan (state, plan) {
|
||||
// We don't need or want the planned sequences array to be reactive
|
||||
state.sequences = Object.freeze(plan.sequences.map(transform));
|
||||
state.remarks = plan.remarks;
|
||||
state.sequences = [];
|
||||
plan.sequences.forEach( sequence => setSequence(state, sequence) );
|
||||
setRemarks(state, plan.remarks);
|
||||
}
|
||||
|
||||
function setPlanLoading (state, abortController = new AbortController()) {
|
||||
@@ -51,6 +80,10 @@ function abortPlanLoading (state) {
|
||||
}
|
||||
|
||||
export default {
|
||||
setRemarks,
|
||||
setSequence,
|
||||
deleteSequence,
|
||||
replaceSequence,
|
||||
setPlan,
|
||||
setPlanLoading,
|
||||
clearPlanLoading,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const state = () => ({
|
||||
sequences: Object.freeze([]),
|
||||
sequences: [],
|
||||
remarks: null,
|
||||
loading: null,
|
||||
timestamp: null,
|
||||
|
||||
@@ -1,7 +1,13 @@
|
||||
|
||||
async function getProject ({commit, dispatch}, projectId) {
|
||||
if (projectId == null) {
|
||||
console.log(`Skipping call to getProject${projectId})`);
|
||||
return;
|
||||
}
|
||||
|
||||
const init = {
|
||||
headers: {
|
||||
cache: "reload",
|
||||
"If-None-Match": "" // Ensure we get a fresh response
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,25 +1,55 @@
|
||||
import * as d3a from 'd3-array';
|
||||
import { duration_to_ms, ms_to_duration, normalise_duration, add_durations } from '@/lib/durations';
|
||||
|
||||
/** Fetch projects from server
|
||||
*/
|
||||
async function refreshProjects ({commit, dispatch, state, rootState}) {
|
||||
|
||||
async function getSummary (project) {
|
||||
const url = `/project/${project.pid}/summary`;
|
||||
const init = {};
|
||||
const summary = await dispatch('api', [url, init, null, {silent:true}]);
|
||||
if (summary) {
|
||||
return {...project, ...summary};
|
||||
} else {
|
||||
return project;
|
||||
}
|
||||
}
|
||||
|
||||
if (state.loading) {
|
||||
commit('abortProjectsLoading');
|
||||
}
|
||||
|
||||
commit('setProjectsLoading');
|
||||
const tstamp = new Date();
|
||||
const pid = rootState.project.projectId;
|
||||
const url = `/project`;
|
||||
const init = {
|
||||
cache: "reload",
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
const res = await dispatch('api', [url, init, null, {silent:true}]);
|
||||
|
||||
if (res) {
|
||||
commit('setProjects', res);
|
||||
commit('setProjectsTimestamp');
|
||||
|
||||
let projects;
|
||||
|
||||
if (res.some( project => project.pid == null )) {
|
||||
console.warn("At least one project found with no PID!");
|
||||
projects = res.filter( project => project.pid != null );
|
||||
} else {
|
||||
projects = res;
|
||||
}
|
||||
|
||||
commit('setProjects', projects); // First without summaries
|
||||
commit('setProjectsTimestamp', tstamp);
|
||||
|
||||
projects = await Promise.all(projects.map( getSummary ));
|
||||
|
||||
commit('setProjects', projects); // Then with summaries
|
||||
}
|
||||
commit('clearProjectsLoading');
|
||||
dispatch('prepareGroups');
|
||||
}
|
||||
|
||||
/** Return a subset of projects from state.projects
|
||||
@@ -117,4 +147,83 @@ async function getProjects ({commit, dispatch, state}, [{pid, name, schema, grou
|
||||
return {projects: filteredProjects, count};
|
||||
}
|
||||
|
||||
export default { refreshProjects, getProjects };
|
||||
|
||||
|
||||
async function prepareGroups ({commit, dispatch, state, rootState}) {
|
||||
const groups = {};
|
||||
|
||||
for (const project of state.projects) {
|
||||
|
||||
if (!project.prod_distance) {
|
||||
// This project has no production data (either not started yet
|
||||
// or production data has not been imported) so we skip it.
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!project.prod_duration.days) {
|
||||
project.prod_duration = normalise_duration(project.prod_duration);
|
||||
}
|
||||
|
||||
for (const name of project.groups) {
|
||||
if (!(name in groups)) {
|
||||
groups[name] = {
|
||||
group: name,
|
||||
num_projects: 0,
|
||||
lines: 0,
|
||||
points: 0,
|
||||
sequences: 0,
|
||||
// Shots:
|
||||
prime: 0,
|
||||
other: 0,
|
||||
ntba: 0,
|
||||
prod_duration: {
|
||||
days: 0,
|
||||
hours: 0,
|
||||
minutes: 0,
|
||||
seconds: 0,
|
||||
milliseconds: 0
|
||||
},
|
||||
prod_distance: 0,
|
||||
shooting_rate: [],
|
||||
projects: []
|
||||
};
|
||||
}
|
||||
const group = groups[name];
|
||||
|
||||
group.num_projects++;
|
||||
group.lines = Math.max(group.lines, project.lines); // In case preplots changed
|
||||
group.points = Math.max(group.points, project.total); // Idem
|
||||
group.sequences += project.seq_final;
|
||||
group.prime += project.prime;
|
||||
group.other += project.other;
|
||||
//group.ntba += project.ntba;
|
||||
group.prod_duration = add_durations(group.prod_duration, project.prod_duration);
|
||||
group.prod_distance += project.prod_distance;
|
||||
group.shooting_rate.push(project.shooting_rate);
|
||||
group.projects.push(project);
|
||||
}
|
||||
}
|
||||
|
||||
const grouplist = [];
|
||||
for (const group of Object.values(groups)) {
|
||||
group.shooting_rate_mean = d3a.mean(group.shooting_rate);
|
||||
group.shooting_rate_sd = d3a.deviation(group.shooting_rate);
|
||||
delete group.shooting_rate;
|
||||
|
||||
grouplist.push(group);
|
||||
}
|
||||
|
||||
commit('setGroups', grouplist);
|
||||
|
||||
}
|
||||
|
||||
async function getGroups({commit, dispatch, state, rootState}) {
|
||||
if (!state.groups.length) {
|
||||
await dispatch('refreshProjects');
|
||||
}
|
||||
|
||||
return state.groups;
|
||||
}
|
||||
|
||||
|
||||
export default { refreshProjects, getProjects, prepareGroups, getGroups };
|
||||
|
||||
@@ -3,7 +3,7 @@ function projects (state) {
|
||||
return state.projects;
|
||||
}
|
||||
|
||||
function projectGroups (state) {
|
||||
function projectGroupNames (state) {
|
||||
return [...new Set(state.projects.map(i => i.groups).flat())].sort();
|
||||
}
|
||||
|
||||
@@ -15,4 +15,8 @@ function projectsLoading (state) {
|
||||
return !!state.loading;
|
||||
}
|
||||
|
||||
export default { projects, projectGroups, projectCount, projectsLoading };
|
||||
function groups (state) {
|
||||
return state.groups;
|
||||
}
|
||||
|
||||
export default { projects, projectGroupNames, projectCount, projectsLoading, groups };
|
||||
|
||||
@@ -39,10 +39,15 @@ function abortProjectsLoading (state) {
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
function setGroups (state, groups) {
|
||||
state.groups = Object.freeze(groups);
|
||||
}
|
||||
|
||||
export default {
|
||||
setProjects,
|
||||
setProjectsLoading,
|
||||
clearProjectsLoading,
|
||||
setProjectsTimestamp,
|
||||
setProjectsETag
|
||||
setProjectsETag,
|
||||
setGroups
|
||||
};
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const state = () => ({
|
||||
projects: Object.freeze([]),
|
||||
groups: Object.freeze([]),
|
||||
loading: null,
|
||||
timestamp: null,
|
||||
etag: null,
|
||||
|
||||
@@ -11,6 +11,7 @@ async function refreshSequences ({commit, dispatch, state, rootState}) {
|
||||
const pid = rootState.project.projectId;
|
||||
const url = `/project/${pid}/sequence?files=true`;
|
||||
const init = {
|
||||
cache: "reload",
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import jwt_decode from 'jwt-decode';
|
||||
import { User } from '@/lib/user';
|
||||
|
||||
async function login ({commit, dispatch}, loginRequest) {
|
||||
async function login ({ commit, dispatch }, loginRequest) {
|
||||
const url = "/login";
|
||||
const init = {
|
||||
method: "POST",
|
||||
@@ -8,93 +9,86 @@ async function login ({commit, dispatch}, loginRequest) {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: loginRequest
|
||||
};
|
||||
|
||||
const callback = async (err, res) => {
|
||||
if (!err && res) {
|
||||
const { token } = (await res.json());
|
||||
await dispatch('setCredentials', {token});
|
||||
}
|
||||
}
|
||||
const res = await dispatch('api', [url, init]);
|
||||
if (res && res.ok) {
|
||||
await dispatch('setCredentials', {force: true});
|
||||
await dispatch('loadUserPreferences');
|
||||
}
|
||||
|
||||
await dispatch('api', [url, init, callback]);
|
||||
await dispatch('loadUserPreferences');
|
||||
}
|
||||
|
||||
async function logout ({commit, dispatch}) {
|
||||
commit('setCookie', null);
|
||||
async function logout ({ commit, dispatch }) {
|
||||
commit('setToken', null);
|
||||
commit('setUser', null);
|
||||
// Should delete JWT cookie
|
||||
await dispatch('api', ["/logout"]);
|
||||
|
||||
// Clear preferences
|
||||
commit('setPreferences', {});
|
||||
}
|
||||
|
||||
function browserCookie (state) {
|
||||
return document.cookie.split(/; */).find(i => /^JWT=.+/.test(i));
|
||||
}
|
||||
function setCredentials({ state, commit, getters, dispatch, rootState }, { force, token, response } = {}) {
|
||||
try {
|
||||
let tokenValue = token;
|
||||
|
||||
function cookieChanged (cookie) {
|
||||
return browserCookie != cookie;
|
||||
}
|
||||
if (!tokenValue && response?.headers?.get('x-jwt')) {
|
||||
tokenValue = response.headers.get('x-jwt');
|
||||
}
|
||||
|
||||
function setCredentials ({state, commit, getters, dispatch}, {force, token} = {}) {
|
||||
if (token || force || cookieChanged(state.cookie)) {
|
||||
try {
|
||||
const cookie = browserCookie();
|
||||
const decoded = (token ?? cookie) ? jwt_decode(token ?? cookie.split("=")[1]) : null;
|
||||
commit('setCookie', (cookie ?? (token && ("JWT="+token))) || undefined);
|
||||
commit('setUser', decoded);
|
||||
} catch (err) {
|
||||
if (err.name == "InvalidTokenError") {
|
||||
console.warn("Failed to decode", browserCookie());
|
||||
} else {
|
||||
console.error("setCredentials", err);
|
||||
}
|
||||
if (!tokenValue) {
|
||||
console.log('No JWT found in token or response');
|
||||
return;
|
||||
}
|
||||
|
||||
if (force || tokenValue !== getters.jwt) {
|
||||
const decoded = jwt_decode(tokenValue);
|
||||
commit('setToken', tokenValue);
|
||||
commit('setUser', decoded ? new User(decoded, rootState.api.api) : null);
|
||||
commit('setCookie', {name: "JWT", value: tokenValue, expires: (decoded.exp??0)*1000});
|
||||
|
||||
console.log('Credentials refreshed at', new Date().toISOString());
|
||||
} else {
|
||||
console.log('JWT unchanged, skipping update');
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('setCredentials error:', err.message, 'token:', token, 'response:', response?.headers?.get('x-jwt'));
|
||||
if (err.name === 'InvalidTokenError') {
|
||||
commit('setToken', null);
|
||||
commit('setUser', null);
|
||||
commit('clearCookie', "JWT")
|
||||
}
|
||||
}
|
||||
dispatch('loadUserPreferences');
|
||||
}
|
||||
|
||||
/**
|
||||
* Save user preferences to localStorage and store.
|
||||
*
|
||||
* User preferences are identified by a key that gets
|
||||
* prefixed with the user name and role. The value can
|
||||
* be anything that JSON.stringify can parse.
|
||||
*/
|
||||
function saveUserPreference ({state, commit}, [key, value]) {
|
||||
const k = `${state.user?.name}.${state.user?.role}.${key}`;
|
||||
|
||||
function saveUserPreference({ state, commit }, [key, value]) {
|
||||
const k = `${state.user?.id}.${key}`;
|
||||
if (value !== undefined) {
|
||||
localStorage.setItem(k, JSON.stringify(value));
|
||||
|
||||
const preferences = state.preferences;
|
||||
preferences[key] = value;
|
||||
const preferences = { ...state.preferences, [key]: value };
|
||||
commit('setPreferences', preferences);
|
||||
} else {
|
||||
localStorage.removeItem(k);
|
||||
|
||||
const preferences = state.preferences;
|
||||
const preferences = { ...state.preferences };
|
||||
delete preferences[key];
|
||||
commit('setPreferences', preferences);
|
||||
}
|
||||
}
|
||||
|
||||
async function loadUserPreferences ({state, commit}) {
|
||||
// Get all keys which are of interest to us
|
||||
const prefix = `${state.user?.name}.${state.user?.role}`;
|
||||
const keys = Object.keys(localStorage).filter( k => k.startsWith(prefix) );
|
||||
|
||||
// Build the preferences object
|
||||
async function loadUserPreferences({ state, commit }) {
|
||||
const prefix = `${state.user?.id}`;
|
||||
const keys = Object.keys(localStorage).filter(k => k.startsWith(prefix));
|
||||
const preferences = {};
|
||||
keys.map(str => {
|
||||
keys.forEach(str => {
|
||||
const value = JSON.parse(localStorage.getItem(str));
|
||||
const key = str.split(".").slice(2).join(".");
|
||||
preferences[key] = value;
|
||||
});
|
||||
|
||||
// Commit it
|
||||
commit('setPreferences', preferences);
|
||||
}
|
||||
|
||||
|
||||
export default {
|
||||
login,
|
||||
logout,
|
||||
|
||||
@@ -4,21 +4,11 @@ function user (state) {
|
||||
}
|
||||
|
||||
function jwt (state) {
|
||||
if (state.cookie?.startsWith("JWT=")) {
|
||||
return state.cookie.substring(4);
|
||||
}
|
||||
}
|
||||
|
||||
function writeaccess (state) {
|
||||
return state.user && ["user", "admin"].includes(state.user.role);
|
||||
}
|
||||
|
||||
function adminaccess (state) {
|
||||
return state.user && state.user.role == "admin";
|
||||
return state.token;
|
||||
}
|
||||
|
||||
function preferences (state) {
|
||||
return state.preferences;
|
||||
}
|
||||
|
||||
export default { user, jwt, writeaccess, adminaccess, preferences };
|
||||
export default { user, jwt, preferences };
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
|
||||
function setCookie (state, cookie) {
|
||||
state.cookie = cookie;
|
||||
function setToken (state, token) {
|
||||
state.token = token;
|
||||
if (token) {
|
||||
localStorage?.setItem("jwt", token);
|
||||
} else {
|
||||
localStorage?.removeItem("jwt");
|
||||
}
|
||||
}
|
||||
|
||||
function setUser (state, user) {
|
||||
@@ -11,4 +16,18 @@ function setPreferences (state, preferences) {
|
||||
state.preferences = preferences;
|
||||
}
|
||||
|
||||
export default { setCookie, setUser, setPreferences };
|
||||
function setCookie (state, opts = {}) {
|
||||
const name = opts.name ?? "JWT";
|
||||
const value = opts.value ?? "";
|
||||
const expires = opts.expires ? (new Date(opts.expires)) : (new Date(0));
|
||||
const path = opts.path ?? "/";
|
||||
const sameSite = opts.sameSite ?? "Lax";
|
||||
|
||||
document.cookie = `${name}=${value};path=${path};SameSite=${sameSite};expires=${expires.toUTCString()}`;
|
||||
}
|
||||
|
||||
function clearCookie (state, name) {
|
||||
setCookie(state, {name});
|
||||
}
|
||||
|
||||
export default { setToken, setUser, setPreferences, setCookie, clearCookie };
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const state = () => ({
|
||||
cookie: null,
|
||||
token: localStorage?.getItem("jwt") ?? null,
|
||||
user: null,
|
||||
preferences: {}
|
||||
});
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
@input="closeDialog"
|
||||
>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-btn v-if="writeaccess"
|
||||
<v-btn v-if="writeaccess()"
|
||||
small
|
||||
color="primary"
|
||||
v-bind="attrs"
|
||||
@@ -182,7 +182,7 @@
|
||||
</v-container>
|
||||
</v-card-text>
|
||||
<v-card-actions>
|
||||
<v-btn v-if="writeaccess"
|
||||
<v-btn v-if="writeaccess()"
|
||||
small
|
||||
text
|
||||
color="primary"
|
||||
@@ -205,7 +205,7 @@
|
||||
</v-btn>
|
||||
</v-btn-toggle>
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn v-if="writeaccess"
|
||||
<v-btn v-if="writeaccess()"
|
||||
small
|
||||
dark
|
||||
color="red"
|
||||
@@ -247,7 +247,7 @@
|
||||
</v-card-text>
|
||||
<v-card-actions>
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn v-if="writeaccess"
|
||||
<v-btn v-if="writeaccess()"
|
||||
small
|
||||
dark
|
||||
color="red"
|
||||
@@ -303,10 +303,15 @@
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import AccessMixin from '@/mixins/access';
|
||||
|
||||
export default {
|
||||
name: "Equipment",
|
||||
|
||||
mixins: [
|
||||
AccessMixin
|
||||
],
|
||||
|
||||
data () {
|
||||
return {
|
||||
latest: [],
|
||||
@@ -395,7 +400,7 @@ export default {
|
||||
return null;
|
||||
},
|
||||
|
||||
...mapGetters(['user', 'writeaccess', 'loading', 'serverEvent'])
|
||||
...mapGetters(['user', 'loading', 'serverEvent'])
|
||||
|
||||
},
|
||||
|
||||
|
||||
@@ -91,7 +91,7 @@ export default {
|
||||
},
|
||||
|
||||
async refresh () {
|
||||
const text = await this.api([`/rss/?remote=${atob(this.$route.params.source)}`, {text:true}]);
|
||||
const text = await this.api([`/rss/?remote=${atob(this.$route.params.source)}`, {format:"text"}]);
|
||||
try {
|
||||
this.feed = this.parse(text);
|
||||
} catch (err) {
|
||||
|
||||
@@ -240,7 +240,7 @@ export default {
|
||||
return this.sequences[0]?.sequence;
|
||||
},
|
||||
|
||||
...mapGetters(['user', 'preferences', 'writeaccess', 'loading', 'serverEvent'])
|
||||
...mapGetters(['user', 'preferences', 'loading', 'serverEvent'])
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
339
lib/www/client/source/src/views/Group.vue
Normal file
339
lib/www/client/source/src/views/Group.vue
Normal file
@@ -0,0 +1,339 @@
|
||||
<template>
|
||||
<dougal-group-map v-if="mapView"
|
||||
:baseline="baseline"
|
||||
:monitor="monitor"
|
||||
:monitors="monitors"
|
||||
@input="mapView=$event"
|
||||
></dougal-group-map>
|
||||
<v-container fluid fill-height class="ma-0 pa-0" v-else>
|
||||
|
||||
<v-overlay :value="loading && !comparisons.length" absolute>
|
||||
<v-progress-circular
|
||||
indeterminate
|
||||
size="64"
|
||||
></v-progress-circular>
|
||||
</v-overlay>
|
||||
|
||||
<v-overlay :value="!loading && !groupFound" absolute opacity="0.8">
|
||||
<v-row justify="center">
|
||||
<v-alert
|
||||
type="error"
|
||||
>
|
||||
Group not found
|
||||
</v-alert>
|
||||
</v-row>
|
||||
<v-row justify="center">
|
||||
<v-btn color="primary" @click="refreshProjects">Retry</v-btn>
|
||||
</v-row>
|
||||
</v-overlay>
|
||||
|
||||
<v-row no-gutters align="stretch" class="fill-height">
|
||||
<v-col cols="12" v-if="groupFound">
|
||||
|
||||
<v-data-table class="ma-1"
|
||||
:headers="projectHeaders"
|
||||
:items="projects"
|
||||
dense
|
||||
>
|
||||
|
||||
<template v-slot:item.baseline="{item, value, index}">
|
||||
<v-simple-checkbox v-if="index+1 < projects.length"
|
||||
color="primary"
|
||||
:value="baseline === item"
|
||||
@input="setBaseline(item)"
|
||||
></v-simple-checkbox>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.monitor="{item, value, index}">
|
||||
<v-simple-checkbox v-if="index > 0 && !(index <= baselineIndex)"
|
||||
color="primary"
|
||||
:value="monitor === item"
|
||||
@input="setMonitor(item)"
|
||||
></v-simple-checkbox>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.pid="{item, value}">
|
||||
<v-chip
|
||||
label
|
||||
small
|
||||
outlined
|
||||
:href="`/projects/${item.pid}`"
|
||||
:color="!item.archived ? 'primary' : ''"
|
||||
>{{ value }}</v-chip>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.fsp="{item, value}">
|
||||
<span title="First production shot">{{value.tstamp.substr(0, 10)}}</span>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.lsp="{item, value}">
|
||||
<span title="Last production shot">{{value.tstamp.substr(0, 10)}}</span>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.prod_duration="{item, value}">
|
||||
<span v-if="value.days > 2" :title="`${value.days} d ${value.hours} h ${value.minutes} m ${(value.seconds + value.milliseconds/1000).toFixed(3)} s`">
|
||||
{{ value.days }} d
|
||||
</span>
|
||||
<span v-else>
|
||||
{{ value.days }} d {{ value.hours }} h {{ value.minutes }} m {{ (value.seconds + value.milliseconds/1000).toFixed(1) }} s
|
||||
</span>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.prod_distance="{item, value}">
|
||||
{{ (value/1000).toFixed(1) }} km
|
||||
</template>
|
||||
|
||||
<template v-slot:footer.prepend>
|
||||
|
||||
<v-btn v-if="baseline && !mapView"
|
||||
text
|
||||
color="primary"
|
||||
title="Switch to map view"
|
||||
@click="mapView = true"
|
||||
>View map</v-btn>
|
||||
|
||||
<v-btn v-if="comparison"
|
||||
text
|
||||
color="primary"
|
||||
title="Back to summary"
|
||||
@click="clearComparison"
|
||||
>Back</v-btn>
|
||||
</template>
|
||||
|
||||
</v-data-table>
|
||||
|
||||
<!-- BEGIN TEST -->
|
||||
|
||||
<dougal-group-comparison-summary v-if="comparison"
|
||||
:baseline="baseline"
|
||||
:monitor="monitor"
|
||||
:comparison="comparison"
|
||||
></dougal-group-comparison-summary>
|
||||
|
||||
<dougal-group-repeatability-summary v-else-if="comparisons.length"
|
||||
:comparisons="comparisons"
|
||||
:projects="projects"
|
||||
@input="setComparison"
|
||||
></dougal-group-repeatability-summary>
|
||||
|
||||
<!-- END TEST -->
|
||||
|
||||
|
||||
</v-col>
|
||||
<v-col cols="12" v-else>
|
||||
<v-card>
|
||||
<v-card-text>
|
||||
Group does not exist.
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</v-col>
|
||||
</v-row>
|
||||
</v-container>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex'
|
||||
import AccessMixin from '@/mixins/access';
|
||||
import DougalGroupRepeatabilitySummary from '@/components/groups/group-repeatability-summary.vue';
|
||||
import DougalGroupComparisonSummary from '@/components/groups/group-comparison-summary';
|
||||
import DougalGroupMap from '@/components/groups/group-map';
|
||||
|
||||
export default {
|
||||
name: 'Group',
|
||||
|
||||
mixins: [
|
||||
AccessMixin
|
||||
],
|
||||
|
||||
components: {
|
||||
DougalGroupRepeatabilitySummary,
|
||||
DougalGroupComparisonSummary,
|
||||
DougalGroupMap
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
projectHeaders: [
|
||||
{
|
||||
value: "baseline",
|
||||
text: "Baseline"
|
||||
},
|
||||
{
|
||||
value: "monitor",
|
||||
text: "Monitor"
|
||||
},
|
||||
{
|
||||
value: "pid",
|
||||
text: "ID"
|
||||
},
|
||||
{
|
||||
value: "name",
|
||||
text: "Name"
|
||||
},
|
||||
{
|
||||
value: "fsp",
|
||||
text: "Start"
|
||||
},
|
||||
{
|
||||
value: "lsp",
|
||||
text: "Finish"
|
||||
},
|
||||
{
|
||||
value: "lines",
|
||||
text: "Preplot lines"
|
||||
},
|
||||
{
|
||||
value: "seq_final",
|
||||
text: "Num. of sequences"
|
||||
},
|
||||
{
|
||||
value: "prod_duration",
|
||||
text: "Duration"
|
||||
},
|
||||
{
|
||||
value: "prod_distance",
|
||||
text: "Distance"
|
||||
},
|
||||
],
|
||||
|
||||
mapView: false,
|
||||
|
||||
baseline: null,
|
||||
monitor: null,
|
||||
comparisons: []
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
groupName () {
|
||||
return this.$route.params.group;
|
||||
},
|
||||
|
||||
group () {
|
||||
return this.groups.find( i => i.group === this.groupName );
|
||||
},
|
||||
|
||||
groupFound () {
|
||||
return !!(this.loading || this.group);
|
||||
},
|
||||
|
||||
projects () {
|
||||
return this.group?.projects.toSorted((a, b) => a.pid.localeCompare(b.pid));
|
||||
},
|
||||
|
||||
baselineIndex () {
|
||||
return this.projects.indexOf(this.baseline);
|
||||
},
|
||||
|
||||
monitors () {
|
||||
if (this.baseline && this.comparisons) {
|
||||
return this.comparisons
|
||||
.filter( i => i.baseline_pid == this.baseline.pid )
|
||||
.map( i => this.projects.find( p => p.pid == i.monitor_pid ));
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
comparison () {
|
||||
return this.comparisons.find( row =>
|
||||
row.baseline_pid == this.baseline?.pid && row.monitor_pid == this.monitor?.pid
|
||||
)?.meta;
|
||||
},
|
||||
|
||||
...mapGetters(["loading", "groups"])
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
setBaseline (project) {
|
||||
if (project === this.baseline) {
|
||||
this.baseline = null;
|
||||
} else {
|
||||
this.baseline = project;
|
||||
if (this.monitor) {
|
||||
if (this.projects.indexOf(this.monitor) <= this.projects.indexOf(this.baseline)) {
|
||||
this.monitor = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
setMonitor (project) {
|
||||
if (project === this.monitor) {
|
||||
this.monitor = null;
|
||||
} else {
|
||||
this.monitor = project;
|
||||
}
|
||||
},
|
||||
|
||||
clearComparison () {
|
||||
this.baseline = null;
|
||||
this.monitor = null;
|
||||
},
|
||||
|
||||
setComparison (baseline, monitor) {
|
||||
this.clearComparison();
|
||||
this.setBaseline(baseline);
|
||||
this.setMonitor(monitor);
|
||||
},
|
||||
|
||||
async getComparisons () {
|
||||
const url = `/comparison/group/${this.$route.params.group}`;
|
||||
this.comparisons = await this.api([url]);
|
||||
},
|
||||
|
||||
// TODO Should this go in a Vuex action rather?
|
||||
async refreshComparisons () {
|
||||
await this.getGroups();
|
||||
if (this.groupFound) {
|
||||
await this.getComparisons();
|
||||
}
|
||||
},
|
||||
|
||||
/*
|
||||
async getComparison () {
|
||||
if (this.baseline && this.monitor) {
|
||||
const url = `/comparison/group/${this.$route.params.group}/baseline/${this.baseline.pid}/monitor/${this.monitor.pid}`;
|
||||
const comparison = await this.api([url]);
|
||||
if (comparison) {
|
||||
this.comparison = comparison;
|
||||
}
|
||||
}
|
||||
},
|
||||
*/
|
||||
|
||||
handleComparisons (context, {payload}) {
|
||||
this.refreshComparisons();
|
||||
},
|
||||
|
||||
registerNotificationHandlers (action = "registerHandler") {
|
||||
|
||||
this.$store.dispatch(action, {
|
||||
table: 'comparisons',
|
||||
handler: this.handleComparisons
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
unregisterNotificationHandlers () {
|
||||
return this.registerNotificationHandlers("unregisterHandler");
|
||||
},
|
||||
|
||||
|
||||
...mapActions(["api", "getGroups", "refreshProjects"])
|
||||
},
|
||||
|
||||
async mounted () {
|
||||
this.registerNotificationHandlers();
|
||||
this.refreshComparisons()
|
||||
},
|
||||
|
||||
beforeDestroy () {
|
||||
this.unregisterNotificationHandlers();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
</script>
|
||||
396
lib/www/client/source/src/views/GroupList.vue
Normal file
396
lib/www/client/source/src/views/GroupList.vue
Normal file
@@ -0,0 +1,396 @@
|
||||
<template>
|
||||
<v-container fluid>
|
||||
|
||||
<v-data-table
|
||||
:headers="headers"
|
||||
:items="displayItems"
|
||||
item-key="group"
|
||||
:options.sync="options"
|
||||
:expanded.sync="expanded"
|
||||
show-expand
|
||||
:loading="loading"
|
||||
>
|
||||
|
||||
<template v-slot:item.group="{item, value}">
|
||||
<v-chip
|
||||
label
|
||||
small
|
||||
:href="`./${value}`"
|
||||
>{{ value }}</v-chip>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.shots_total="{item, value}">
|
||||
<div>{{ item.prime + item.other }}</div>
|
||||
<v-progress-linear
|
||||
background-color="secondary"
|
||||
color="primary"
|
||||
:value="item.prime/(item.prime+item.other)*100"
|
||||
></v-progress-linear>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.prime="{item, value}">
|
||||
{{ value }}
|
||||
({{ (value / (item.prime + item.other) * 100).toFixed(1) }}%)
|
||||
</template>
|
||||
|
||||
<template v-slot:item.other="{item, value}">
|
||||
{{ value }}
|
||||
({{ (value / (item.prime + item.other) * 100).toFixed(1) }}%)
|
||||
</template>
|
||||
|
||||
<template v-slot:item.prod_duration="{item, value}">
|
||||
<span v-if="value.days > 2" :title="`${value.days} d ${value.hours} h ${value.minutes} m ${(value.seconds + value.milliseconds/1000).toFixed(3)} s`">
|
||||
{{ value.days }} d
|
||||
</span>
|
||||
<span v-else>
|
||||
{{ value.days }} d {{ value.hours }} h {{ value.minutes }} m {{ (value.seconds + value.milliseconds/1000).toFixed(1) }} s
|
||||
</span>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.prod_distance="{item, value}">
|
||||
{{ (value/1000).toFixed(1) }} km
|
||||
</template>
|
||||
|
||||
<template v-slot:item.shooting_rate_mean="{item, value}">
|
||||
{{ (value).toFixed(2) }} s ±{{ (item.shooting_rate_sd).toFixed(3) }} s
|
||||
</template>
|
||||
|
||||
<template v-slot:item.shots_per_point="{item, value}">
|
||||
<div>
|
||||
{{ ((item.prime + item.other)/item.points).toFixed(1) }}
|
||||
({{ ((((item.prime + item.other)/item.points) / item.num_projects)*100).toFixed(2) }}%)
|
||||
</div>
|
||||
<v-progress-linear
|
||||
:value="((((item.prime + item.other)/item.points) / item.num_projects)*100)"
|
||||
></v-progress-linear>
|
||||
</template>
|
||||
|
||||
<template v-slot:expanded-item="{ headers, item }">
|
||||
<td :colspan="headers.length">
|
||||
<v-data-table class="ma-1"
|
||||
:headers="projectHeaders"
|
||||
:items="item.projects"
|
||||
dense
|
||||
hide-default-footer
|
||||
>
|
||||
|
||||
<template v-slot:item.pid="{item, value}">
|
||||
<a :href="`/projects/${value}`" title="Go to project">{{ value }}</a>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.fsp="{item, value}">
|
||||
<span title="First production shot">{{value.tstamp.substr(0, 10)}}</span>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.lsp="{item, value}">
|
||||
<span title="Last production shot">{{value.tstamp.substr(0, 10)}}</span>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.prod_duration="{item, value}">
|
||||
<span v-if="value.days > 2" :title="`${value.days} d ${value.hours} h ${value.minutes} m ${(value.seconds + value.milliseconds/1000).toFixed(3)} s`">
|
||||
{{ value.days }} d
|
||||
</span>
|
||||
<span v-else>
|
||||
{{ value.days }} d {{ value.hours }} h {{ value.minutes }} m {{ (value.seconds + value.milliseconds/1000).toFixed(1) }} s
|
||||
</span>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.prod_distance="{item, value}">
|
||||
{{ (value/1000).toFixed(1) }} km
|
||||
</template>
|
||||
|
||||
</v-data-table>
|
||||
</td>
|
||||
</template>
|
||||
|
||||
</v-data-table>
|
||||
|
||||
</v-container>
|
||||
</template>
|
||||
|
||||
<style>
|
||||
td p:last-of-type {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import AccessMixin from '@/mixins/access';
|
||||
|
||||
|
||||
// FIXME send to lib/utils or so
|
||||
/*
|
||||
function duration_to_ms(v) {
|
||||
if (v instanceof Object) {
|
||||
return (
|
||||
(v.days || 0) * 86400000 +
|
||||
(v.hours || 0) * 3600000 +
|
||||
(v.minutes || 0) * 60000 +
|
||||
(v.seconds || 0) * 1000 +
|
||||
(v.milliseconds || 0)
|
||||
);
|
||||
} else {
|
||||
return {
|
||||
days: 0,
|
||||
hours: 0,
|
||||
minutes: 0,
|
||||
seconds: 0,
|
||||
milliseconds: 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function ms_to_duration(v) {
|
||||
const days = Math.floor(v / 86400000);
|
||||
v %= 86400000;
|
||||
const hours = Math.floor(v / 3600000);
|
||||
v %= 3600000;
|
||||
const minutes = Math.floor(v / 60000);
|
||||
v %= 60000;
|
||||
const seconds = Math.floor(v / 1000);
|
||||
const milliseconds = v % 1000;
|
||||
return { days, hours, minutes, seconds, milliseconds };
|
||||
}
|
||||
|
||||
function normalise_duration (v) {
|
||||
return ms_to_duration(duration_to_ms(v));
|
||||
}
|
||||
|
||||
function add_durations(a, b) {
|
||||
return ms_to_duration(duration_to_ms(a) + duration_to_ms(b));
|
||||
}
|
||||
*/
|
||||
|
||||
export default {
|
||||
name: "GroupList",
|
||||
|
||||
components: {
|
||||
},
|
||||
|
||||
mixins: [
|
||||
AccessMixin
|
||||
],
|
||||
|
||||
data () {
|
||||
return {
|
||||
headers: [
|
||||
{
|
||||
value: "group",
|
||||
text: "Group name"
|
||||
},
|
||||
{
|
||||
value: "num_projects",
|
||||
text: "Number of campaigns"
|
||||
},
|
||||
{
|
||||
value: "lines",
|
||||
text: "Preplot lines"
|
||||
},
|
||||
{
|
||||
value: "points",
|
||||
text: "Preplot points"
|
||||
},
|
||||
{
|
||||
value: "sequences",
|
||||
text: "Total sequences"
|
||||
},
|
||||
{
|
||||
value: "shots_total",
|
||||
text: "Total shots"
|
||||
},
|
||||
{
|
||||
value: "prime",
|
||||
text: "Total prime"
|
||||
},
|
||||
{
|
||||
value: "other",
|
||||
text: "Total reshoot + infill"
|
||||
},
|
||||
/*
|
||||
{
|
||||
value: "ntba",
|
||||
text: "Total NTBA"
|
||||
},
|
||||
*/
|
||||
{
|
||||
value: "prod_duration",
|
||||
text: "Total duration"
|
||||
},
|
||||
{
|
||||
value: "prod_distance",
|
||||
text: "Total distance"
|
||||
},
|
||||
{
|
||||
value: "shooting_rate_mean",
|
||||
text: "Shooting rate (mean)"
|
||||
},
|
||||
{
|
||||
value: "shots_per_point",
|
||||
text: "Shots per point"
|
||||
},
|
||||
],
|
||||
items: [],
|
||||
expanded: [],
|
||||
options: { sortBy: ["group"], sortDesc: [false] },
|
||||
|
||||
projectHeaders: [
|
||||
{
|
||||
value: "pid",
|
||||
text: "ID"
|
||||
},
|
||||
{
|
||||
value: "name",
|
||||
text: "Name"
|
||||
},
|
||||
{
|
||||
value: "fsp",
|
||||
text: "Start"
|
||||
},
|
||||
{
|
||||
value: "lsp",
|
||||
text: "Finish"
|
||||
},
|
||||
{
|
||||
value: "lines",
|
||||
text: "Preplot lines"
|
||||
},
|
||||
{
|
||||
value: "seq_final",
|
||||
text: "Num. of sequences"
|
||||
},
|
||||
{
|
||||
value: "prod_duration",
|
||||
text: "Duration"
|
||||
},
|
||||
{
|
||||
value: "prod_distance",
|
||||
text: "Distance"
|
||||
},
|
||||
],
|
||||
|
||||
// Context menu stuff
|
||||
contextMenuShow: false,
|
||||
contextMenuX: 0,
|
||||
contextMenuY: 0,
|
||||
contextMenuItem: null,
|
||||
|
||||
/*
|
||||
// FIXME Eventually need to move this into Vuex
|
||||
groups: []
|
||||
*/
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
displayItems () {
|
||||
return this.items.filter(i => i.prod_distance);
|
||||
},
|
||||
|
||||
...mapGetters(['loading', 'groups'])
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
/*
|
||||
async prepareGroups () {
|
||||
//const groups = await this.api(["/prospects"]);
|
||||
//console.log("groups", groups);
|
||||
const groups = {};
|
||||
|
||||
for (const project of this.projects) {
|
||||
|
||||
if (!project.prod_distance) {
|
||||
// This project has no production data (either not started yet
|
||||
// or production data has not been imported) so we skip it.
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!project.prod_duration.days) {
|
||||
project.prod_duration = normalise_duration(project.prod_duration);
|
||||
}
|
||||
|
||||
for (const name of project.groups) {
|
||||
if (!(name in groups)) {
|
||||
groups[name] = {
|
||||
group: name,
|
||||
num_projects: 0,
|
||||
lines: 0,
|
||||
points: 0,
|
||||
sequences: 0,
|
||||
// Shots:
|
||||
prime: 0,
|
||||
other: 0,
|
||||
ntba: 0,
|
||||
prod_duration: {
|
||||
days: 0,
|
||||
hours: 0,
|
||||
minutes: 0,
|
||||
seconds: 0,
|
||||
milliseconds: 0
|
||||
},
|
||||
prod_distance: 0,
|
||||
shooting_rate: [],
|
||||
projects: []
|
||||
};
|
||||
}
|
||||
const group = groups[name];
|
||||
|
||||
group.num_projects++;
|
||||
group.lines = Math.max(group.lines, project.lines); // In case preplots changed
|
||||
group.points = Math.max(group.points, project.total); // Idem
|
||||
group.sequences += project.seq_final;
|
||||
group.prime += project.prime;
|
||||
group.other += project.other;
|
||||
//group.ntba += project.ntba;
|
||||
group.prod_duration = add_durations(group.prod_duration, project.prod_duration);
|
||||
group.prod_distance += project.prod_distance;
|
||||
group.shooting_rate.push(project.shooting_rate);
|
||||
group.projects.push(project);
|
||||
}
|
||||
}
|
||||
|
||||
this.groups = [];
|
||||
for (const group of Object.values(groups)) {
|
||||
group.shooting_rate_mean = d3a.mean(group.shooting_rate);
|
||||
group.shooting_rate_sd = d3a.deviation(group.shooting_rate);
|
||||
delete group.shooting_rate;
|
||||
|
||||
this.groups.push(group);
|
||||
}
|
||||
|
||||
},
|
||||
*/
|
||||
|
||||
async list () {
|
||||
this.items = [...this.groups];
|
||||
},
|
||||
|
||||
async load () {
|
||||
await this.refreshProjects();
|
||||
//await this.prepareGroups();
|
||||
await this.list();
|
||||
},
|
||||
|
||||
registerNotificationHandlers () {
|
||||
this.$store.dispatch('registerHandler', {
|
||||
table: 'project`',
|
||||
|
||||
handler: (context, message) => {
|
||||
if (message.payload?.table == "public") {
|
||||
this.load();
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
...mapActions(["api", "showSnack", "refreshProjects"])
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.registerNotificationHandlers();
|
||||
this.load();
|
||||
}
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -17,7 +17,7 @@
|
||||
</v-card-title>
|
||||
<v-card-text>
|
||||
|
||||
<v-menu v-if="writeaccess"
|
||||
<v-menu v-if="writeaccess()"
|
||||
v-model="contextMenuShow"
|
||||
:position-x="contextMenuX"
|
||||
:position-y="contextMenuY"
|
||||
@@ -164,7 +164,7 @@
|
||||
</v-text-field>
|
||||
<div v-else>
|
||||
<span v-html="$options.filters.markdownInline(item.remarks)"></span>
|
||||
<v-btn v-if="writeaccess && edit === null"
|
||||
<v-btn v-if="writeaccess() && edit === null"
|
||||
icon
|
||||
small
|
||||
title="Edit"
|
||||
@@ -196,6 +196,7 @@
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import DougalLineStatus from '@/components/line-status';
|
||||
import AccessMixin from '@/mixins/access';
|
||||
|
||||
export default {
|
||||
name: "LineList",
|
||||
@@ -204,6 +205,10 @@ export default {
|
||||
DougalLineStatus
|
||||
},
|
||||
|
||||
mixins: [
|
||||
AccessMixin
|
||||
],
|
||||
|
||||
data () {
|
||||
return {
|
||||
headers: [
|
||||
@@ -281,7 +286,7 @@ export default {
|
||||
},
|
||||
|
||||
computed: {
|
||||
...mapGetters(['user', 'writeaccess', 'linesLoading', 'lines', 'sequences', 'plannedSequences'])
|
||||
...mapGetters(['user', 'linesLoading', 'lines', 'sequences', 'plannedSequences'])
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
@@ -5,6 +5,22 @@
|
||||
<v-card-title>
|
||||
<v-toolbar flat>
|
||||
<v-toolbar-title>
|
||||
<template v-if="$route.params.sequence">
|
||||
<v-btn icon small
|
||||
:disabled="sequenceIndex >= (sequences.length - 1)"
|
||||
:to="{name: 'logBySequence', params: { sequence: (sequences[sequences.length-1]||{}).sequence }}"
|
||||
title="Go to the first sequence"
|
||||
>
|
||||
<v-icon dense>mdi-chevron-double-left</v-icon>
|
||||
</v-btn>
|
||||
<v-btn icon small
|
||||
:disabled="sequenceIndex >= (sequences.length - 1)"
|
||||
:to="{name: 'logBySequence', params: { sequence: (sequences[sequenceIndex+1]||{}).sequence }}"
|
||||
title="Go to the previous sequence"
|
||||
>
|
||||
<v-icon dense>mdi-chevron-left</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
<span class="d-none d-lg-inline">
|
||||
{{
|
||||
$route.params.sequence
|
||||
@@ -31,20 +47,40 @@
|
||||
: ""
|
||||
}}
|
||||
</span>
|
||||
|
||||
<template v-if="$route.params.sequence">
|
||||
<v-btn icon small
|
||||
:disabled="sequenceIndex==0"
|
||||
:to="{name: 'logBySequence', params: { sequence: (sequences[sequenceIndex-1]||{}).sequence }}"
|
||||
title="Go to the next sequence"
|
||||
>
|
||||
<v-icon dense>mdi-chevron-right</v-icon>
|
||||
</v-btn>
|
||||
<v-btn icon small class="mr-1"
|
||||
:disabled="sequenceIndex==0"
|
||||
:to="{name: 'logBySequence', params: { sequence: (sequences[0]||{}).sequence }}"
|
||||
title="Go to the last sequence"
|
||||
>
|
||||
<v-icon dense>mdi-chevron-double-right</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
|
||||
<a v-if="$route.params.sequence"
|
||||
class="mr-3"
|
||||
:href="`/projects/${$route.params.project}/sequences/${$route.params.sequence}`"
|
||||
title="View the shotlog for this sequence"
|
||||
>
|
||||
<v-icon
|
||||
right
|
||||
color="teal"
|
||||
>mdi-format-list-numbered</v-icon>
|
||||
</a>
|
||||
|
||||
</v-toolbar-title>
|
||||
|
||||
<a v-if="$route.params.sequence"
|
||||
class="mr-3"
|
||||
:href="`/projects/${$route.params.project}/sequences/${$route.params.sequence}`"
|
||||
title="View the shotlog for this sequence"
|
||||
>
|
||||
<v-icon
|
||||
right
|
||||
color="teal"
|
||||
>mdi-format-list-numbered</v-icon>
|
||||
</a>
|
||||
|
||||
<dougal-event-edit v-if="writeaccess"
|
||||
|
||||
<dougal-event-edit v-if="$parent.writeaccess()"
|
||||
v-model="eventDialog"
|
||||
v-bind="editedEvent"
|
||||
:available-labels="userLabels"
|
||||
@@ -54,7 +90,7 @@
|
||||
>
|
||||
</dougal-event-edit>
|
||||
|
||||
<dougal-event-edit-labels v-if="writeaccess"
|
||||
<dougal-event-edit-labels v-if="$parent.writeaccess()"
|
||||
v-model="eventLabelsDialog"
|
||||
:labels="userLabels"
|
||||
:selected="contextMenuItem ? contextMenuItem.labels||[] : []"
|
||||
@@ -171,7 +207,7 @@
|
||||
<v-card-text>
|
||||
|
||||
<!-- BEGIN Context menu for log entries -->
|
||||
<v-menu v-if="writeaccess"
|
||||
<v-menu v-if="$parent.writeaccess()"
|
||||
v-model="contextMenuShow"
|
||||
:position-x="contextMenuX"
|
||||
:position-y="contextMenuY"
|
||||
@@ -325,7 +361,13 @@
|
||||
@click="labelSearch=label"
|
||||
>{{label}}</v-chip>
|
||||
</span>
|
||||
<dougal-event-edit-history v-if="entry.has_edits && writeaccess"
|
||||
<v-icon v-if="entry.meta.auto || entry.meta.author"
|
||||
x-small
|
||||
left
|
||||
color="primary"
|
||||
:title="entry.meta.author?`Automatic event by ${entry.meta.author}`:'Automatic event'"
|
||||
>mdi-robot</v-icon>
|
||||
<dougal-event-edit-history v-if="entry.has_edits && $parent.writeaccess()"
|
||||
:id="entry.id"
|
||||
:disabled="eventsLoading"
|
||||
:labels="labels"
|
||||
@@ -488,17 +530,6 @@ export default {
|
||||
rows () {
|
||||
const rows = {};
|
||||
this.items
|
||||
.filter(i => {
|
||||
return !this.$route.params.sequence || (this.$route.params.sequence == i.sequence)
|
||||
})
|
||||
.filter(i => {
|
||||
for (const label of this.filterableLabels) {
|
||||
if (!this.shownLabels.includes(label) && i.labels.includes(label)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
})
|
||||
.forEach(i => {
|
||||
const key = (i.sequence && i.point) ? (i.sequence+"@"+i.point) : i.tstamp;
|
||||
if (!rows[key]) {
|
||||
@@ -529,6 +560,10 @@ export default {
|
||||
.sort( (a, b) => b[1]-a[1] );
|
||||
},
|
||||
|
||||
filteredLabels () {
|
||||
return this.filterableLabels.filter( label => !this.shownLabels.includes(label) );
|
||||
},
|
||||
|
||||
presetRemarks () {
|
||||
return this.projectConfiguration?.events?.presetRemarks ?? [];
|
||||
},
|
||||
@@ -541,7 +576,17 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
...mapGetters(['user', 'writeaccess', 'eventsLoading', 'online', 'sequence', 'line', 'point', 'position', 'timestamp', 'lineName', 'events', 'labels', 'userLabels', 'projectConfiguration']),
|
||||
sequenceIndex () {
|
||||
if ("sequence" in this.$route.params) {
|
||||
const index = this.sequences.findIndex( i => i.sequence == this.$route.params.sequence );
|
||||
if (index != -1) {
|
||||
return index;
|
||||
}
|
||||
}
|
||||
// return undefined
|
||||
},
|
||||
|
||||
...mapGetters(['user', 'eventsLoading', 'online', 'sequence', 'sequences', 'line', 'point', 'position', 'timestamp', 'lineName', 'events', 'labels', 'userLabels', 'projectConfiguration']),
|
||||
...mapState({projectSchema: state => state.project.projectSchema})
|
||||
|
||||
},
|
||||
@@ -549,6 +594,7 @@ export default {
|
||||
watch: {
|
||||
options: {
|
||||
async handler () {
|
||||
this.savePrefs(),
|
||||
await this.fetchEvents();
|
||||
},
|
||||
deep: true
|
||||
@@ -567,12 +613,19 @@ export default {
|
||||
},
|
||||
|
||||
filter (newVal, oldVal) {
|
||||
this.savePrefs();
|
||||
if (newVal?.toLowerCase() != oldVal?.toLowerCase()) {
|
||||
this.fetchEvents();
|
||||
}
|
||||
},
|
||||
|
||||
labelSearch () {
|
||||
this.savePrefs();
|
||||
this.fetchEvents();
|
||||
},
|
||||
|
||||
filteredLabels () {
|
||||
this.savePrefs()
|
||||
this.fetchEvents();
|
||||
},
|
||||
|
||||
@@ -581,7 +634,7 @@ export default {
|
||||
},
|
||||
|
||||
user (newVal, oldVal) {
|
||||
this.itemsPerPage = Number(localStorage.getItem(`dougal/prefs/${this.user?.name}/${this.$route.params.project}/${this.$options.name}/items-per-page`)) || 25;
|
||||
this.loadPrefs();
|
||||
}
|
||||
|
||||
},
|
||||
@@ -632,8 +685,10 @@ export default {
|
||||
|
||||
async fetchEvents (opts = {}) {
|
||||
const options = {
|
||||
sequence: this.$route.params.sequence,
|
||||
text: this.filter,
|
||||
label: this.labelSearch,
|
||||
excludeLabels: this.filteredLabels,
|
||||
...this.options
|
||||
};
|
||||
const res = await this.getEvents([this.$route.params.project, options]);
|
||||
@@ -737,6 +792,13 @@ export default {
|
||||
if (event.id) {
|
||||
const id = event.id;
|
||||
delete event.id;
|
||||
|
||||
// If this is an edit, ensure that it is *either*
|
||||
// a timestamp event or a sequence + point one.
|
||||
if (event.sequence && event.point && event.tstamp) {
|
||||
delete event.tstamp;
|
||||
}
|
||||
|
||||
this.putEvent(id, event, callback); // No await
|
||||
} else {
|
||||
this.postEvent(event, callback); // No await
|
||||
@@ -829,7 +891,7 @@ export default {
|
||||
viewOnMap(item) {
|
||||
if (item?.meta && item.meta?.geometry?.type == "Point") {
|
||||
const [ lon, lat ] = item.meta.geometry.coordinates;
|
||||
return `map#15/${lon.toFixed(6)}/${lat.toFixed(6)}`;
|
||||
return `map#z15x${lon.toFixed(6)}y${lat.toFixed(6)}::${lon.toFixed(6)},${lat.toFixed(6)}`;
|
||||
} else if (item?.items) {
|
||||
return this.viewOnMap(item.items[0]);
|
||||
}
|
||||
@@ -864,10 +926,36 @@ export default {
|
||||
*/
|
||||
},
|
||||
|
||||
getPrefsKey () {
|
||||
return `dougal/prefs/${this.user?.name}/${this.$route.params.project}/Log/v1`;
|
||||
},
|
||||
|
||||
savePrefs () {
|
||||
const prefs = {
|
||||
shownLabels: this.shownLabels,
|
||||
labelSearch: this.labelSearch,
|
||||
filter: this.filter,
|
||||
options: this.options
|
||||
};
|
||||
localStorage.setItem(this.getPrefsKey(), JSON.stringify(prefs));
|
||||
},
|
||||
|
||||
loadPrefs () {
|
||||
const stored = localStorage.getItem(this.getPrefsKey());
|
||||
if (stored) {
|
||||
const prefs = JSON.parse(stored);
|
||||
if (prefs.shownLabels !== undefined) this.shownLabels = prefs.shownLabels;
|
||||
if (prefs.labelSearch !== undefined) this.labelSearch = prefs.labelSearch;
|
||||
if (prefs.filter !== undefined) this.filter = prefs.filter;
|
||||
if (prefs.options !== undefined) this.options = prefs.options;
|
||||
}
|
||||
},
|
||||
|
||||
...mapActions(["api", "showSnack", "refreshEvents", "getEvents"])
|
||||
},
|
||||
|
||||
async mounted () {
|
||||
this.loadPrefs();
|
||||
this.fetchEvents();
|
||||
|
||||
window.addEventListener('keyup', this.handleKeyboardEvent);
|
||||
|
||||
@@ -81,6 +81,13 @@ export default {
|
||||
await this.logout();
|
||||
await this.login(this.credentials);
|
||||
|
||||
if (this.user) {
|
||||
console.log("Login successful");
|
||||
// Should trigger auto-refresh over ws as well as authenticating the
|
||||
// user over ws.
|
||||
this.$root.sendJwt();
|
||||
}
|
||||
|
||||
if (this.user && !this.user.autologin) {
|
||||
this.$router.replace("/");
|
||||
} else {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
761
lib/www/client/source/src/views/MapLayersMixin.vue
Normal file
761
lib/www/client/source/src/views/MapLayersMixin.vue
Normal file
@@ -0,0 +1,761 @@
|
||||
<script>
|
||||
// Important info about performance:
|
||||
// https://deck.gl/docs/developer-guide/performance#supply-attributes-directly
|
||||
|
||||
import { Deck, WebMercatorViewport, FlyToInterpolator, CompositeLayer } from '@deck.gl/core';
|
||||
import { GeoJsonLayer, LineLayer, PathLayer, BitmapLayer, ScatterplotLayer, ColumnLayer, IconLayer } from '@deck.gl/layers';
|
||||
import {HeatmapLayer} from '@deck.gl/aggregation-layers';
|
||||
import { TileLayer, MVTLayer, TripsLayer } from '@deck.gl/geo-layers';
|
||||
import { SimpleMeshLayer } from '@deck.gl/mesh-layers';
|
||||
import { OBJLoader } from '@loaders.gl/obj';
|
||||
|
||||
//import { json } from 'd3-fetch';
|
||||
import * as d3a from 'd3-array';
|
||||
|
||||
import { DougalBinaryBundle, DougalBinaryChunkSequential, DougalBinaryChunkInterleaved } from '@dougal/binary';
|
||||
import { DougalShotLayer } from '@/lib/deck.gl';
|
||||
import { DougalSequenceLayer, DougalEventsLayer } from '@/lib/deck.gl';
|
||||
import DougalBinaryLoader from '@/lib/deck.gl/DougalBinaryLoader';
|
||||
|
||||
import { colors } from 'vuetify/lib'
|
||||
|
||||
function hexToArray (hex, defaultValue = [ 0xc0, 0xc0, 0xc0, 0xff ]) {
|
||||
|
||||
if (typeof hex != "string" || hex.length < 6) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
if (hex[0] == "#") {
|
||||
hex = hex.slice(1); // remove the '#' character
|
||||
}
|
||||
|
||||
return [
|
||||
parseInt(hex.slice(0, 2), 16),
|
||||
parseInt(hex.slice(2, 4), 16),
|
||||
parseInt(hex.slice(4, 6), 16),
|
||||
hex.length > 6 ? parseInt(hex.slice(6, 8), 16) : 255
|
||||
];
|
||||
}
|
||||
|
||||
function namedColourToArray (name) {
|
||||
const parts = name.split(/\s+/).map( (s, i) =>
|
||||
i
|
||||
? s.replace("-", "")
|
||||
: s.replace(/-([a-z])/g, (match, group1) => group1.toUpperCase())
|
||||
);
|
||||
parts[0]
|
||||
if (parts.length == 1) parts[1] = "base";
|
||||
const hex = parts.reduce((acc, key) => acc[key], colors);
|
||||
return hexToArray(hex);
|
||||
}
|
||||
|
||||
export default {
|
||||
name: "MapLayersMixin",
|
||||
|
||||
data () {
|
||||
|
||||
return {
|
||||
|
||||
COLOUR_SCALE_1: [
|
||||
// negative
|
||||
[65, 182, 196],
|
||||
[127, 205, 187],
|
||||
[199, 233, 180],
|
||||
[237, 248, 177],
|
||||
|
||||
// positive
|
||||
[255, 255, 204],
|
||||
[255, 237, 160],
|
||||
[254, 217, 118],
|
||||
[254, 178, 76],
|
||||
[253, 141, 60],
|
||||
[252, 78, 42],
|
||||
[227, 26, 28],
|
||||
[189, 0, 38],
|
||||
[128, 0, 38]
|
||||
]
|
||||
|
||||
};
|
||||
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
normalisedColourScale(v, scale = this.COLOUR_SCALE_1, min = 0, max = 1) {
|
||||
const range = max-min;
|
||||
const i = Math.min(scale.length, Math.max(Math.round((v-min) / range * scale.length), 0));
|
||||
//console.log(`v=${v}, scale.length=${scale.length}, min=${min}, max=${max}, i=${i}, → ${scale[i]}`);
|
||||
return scale[i];
|
||||
},
|
||||
|
||||
|
||||
makeDataFromBinary ( {positions, values, udv} ) {
|
||||
const totalCount = positions.length / 2;
|
||||
|
||||
const attributes = {
|
||||
getPosition: {
|
||||
value: positions,
|
||||
type: 'float32',
|
||||
size: 2
|
||||
},
|
||||
udv
|
||||
};
|
||||
|
||||
values.forEach((valArray, k) => {
|
||||
let value = valArray;
|
||||
if (valArray instanceof BigUint64Array) {
|
||||
value = Float64Array.from(valArray, v => Number(v));
|
||||
}
|
||||
attributes[`value${k}`] = {
|
||||
value,
|
||||
type: value instanceof Float64Array ? 'float64' :
|
||||
value instanceof Uint16Array ? 'uint16' :
|
||||
value instanceof Uint32Array ? 'uint32' : 'float32',
|
||||
size: 1
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
length: totalCount,
|
||||
attributes
|
||||
};
|
||||
},
|
||||
|
||||
loadOptions (options = {}) {
|
||||
return {
|
||||
loadOptions: {
|
||||
fetch: {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${this.$store.getters.jwt}`,
|
||||
}
|
||||
},
|
||||
...options
|
||||
},
|
||||
};
|
||||
|
||||
},
|
||||
|
||||
|
||||
osmLayer (options = {}) {
|
||||
return new TileLayer({
|
||||
id: "osm",
|
||||
// https://wiki.openstreetmap.org/wiki/Slippy_map_tilenames#Tile_servers
|
||||
data: 'https://c.tile.openstreetmap.org/{z}/{x}/{y}.png',
|
||||
|
||||
minZoom: 0,
|
||||
maxZoom: 19,
|
||||
tileSize: 256,
|
||||
|
||||
renderSubLayers: props => {
|
||||
const {
|
||||
bbox: {west, south, east, north}
|
||||
} = props.tile;
|
||||
|
||||
return new BitmapLayer(props, {
|
||||
data: null,
|
||||
image: props.data,
|
||||
bounds: [west, south, east, north]
|
||||
});
|
||||
},
|
||||
...options
|
||||
})
|
||||
},
|
||||
|
||||
|
||||
// OSM tiles layer. Handy to make water transparent
|
||||
// but not super reliable yet
|
||||
|
||||
osmVectorLayer (options = {}) {
|
||||
return new MVTLayer({
|
||||
id: 'osm',
|
||||
data: 'https://vector.openstreetmap.org/shortbread_v1/{z}/{x}/{y}.mvt',
|
||||
minZoom: 0,
|
||||
maxZoom: 14,
|
||||
getFillColor: feature => {
|
||||
const layer = feature.properties.layerName;
|
||||
//console.log("layer =", layer, feature.properties.kind);
|
||||
switch (layer) {
|
||||
case "ocean":
|
||||
return [0, 0, 0, 0];
|
||||
case "land":
|
||||
return [ 0x54, 0x6E, 0x7A, 255 ];
|
||||
default:
|
||||
return [ 240, 240, 240, 255 ];
|
||||
}
|
||||
},
|
||||
getLineColor: feature => {
|
||||
if (feature.properties.layer === 'water') {
|
||||
return [0, 0, 0, 0]; // No outline for water
|
||||
}
|
||||
return [192, 192, 192, 255]; // Default line color for roads, etc.
|
||||
},
|
||||
getLineWidth: feature => {
|
||||
if (feature.properties.highway) {
|
||||
return feature.properties.highway === 'motorway' ? 6 : 3; // Example road widths
|
||||
}
|
||||
return 1;
|
||||
},
|
||||
stroked: true,
|
||||
filled: true,
|
||||
pickable: true
|
||||
});
|
||||
},
|
||||
|
||||
|
||||
openSeaMapLayer (options = {}) {
|
||||
return new TileLayer({
|
||||
id: "sea",
|
||||
data: 'https://tiles.openseamap.org/seamark/{z}/{x}/{y}.png',
|
||||
|
||||
minZoom: 0,
|
||||
maxZoom: 19,
|
||||
tileSize: 256,
|
||||
|
||||
renderSubLayers: props => {
|
||||
const {
|
||||
bbox: {west, south, east, north}
|
||||
} = props.tile;
|
||||
|
||||
return new BitmapLayer(props, {
|
||||
data: null,
|
||||
image: props.data,
|
||||
bounds: [west, south, east, north]
|
||||
});
|
||||
},
|
||||
...options
|
||||
})
|
||||
},
|
||||
|
||||
|
||||
// Norwegian nautical charts
|
||||
// As of 2025, not available for some weird reason
|
||||
nauLayer (options = {}) {
|
||||
return new TileLayer({
|
||||
id: "nau",
|
||||
// https://wiki.openstreetmap.org/wiki/Slippy_map_tilenames#Tile_servers
|
||||
data: 'https://opencache.statkart.no/gatekeeper/gk/gk.open_gmaps?layers=sjokartraster&zoom={z}&x={x}&y={y}',
|
||||
|
||||
minZoom: 0,
|
||||
maxZoom: 19,
|
||||
tileSize: 256,
|
||||
|
||||
renderSubLayers: props => {
|
||||
const {
|
||||
bbox: {west, south, east, north}
|
||||
} = props.tile;
|
||||
|
||||
return new BitmapLayer(props, {
|
||||
data: null,
|
||||
image: props.data,
|
||||
bounds: [west, south, east, north]
|
||||
});
|
||||
},
|
||||
...options
|
||||
})
|
||||
},
|
||||
|
||||
vesselTrackPointsLayer (options = {}) {
|
||||
|
||||
if (!this.vesselPosition) return;
|
||||
|
||||
return new SimpleMeshLayer({
|
||||
id: 'navp',
|
||||
data: [ this.vesselPosition ],
|
||||
//getColor: [ 255, 48, 0 ],
|
||||
getColor: [ 174, 1, 174 ],
|
||||
getOrientation: d => [0, (270 - (d.heading ?? d.cmg ?? d.bearing ?? d.lineBearing ?? 0)) % 360 , 0],
|
||||
getPosition: d => [ d.x, d.y ],
|
||||
mesh: `/assets/boat0.obj`,
|
||||
sizeScale: 0.1,
|
||||
loaders: [OBJLoader],
|
||||
pickable: true,
|
||||
...options
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
vesselTrackLinesLayer (options = {}) {
|
||||
|
||||
const cfg = this.vesselTrackPeriodSettings[this.vesselTrackPeriod];
|
||||
|
||||
let ts1 = new Date(this.vesselTrackLastRefresh*1000);
|
||||
let ts0 = new Date(ts1.valueOf() - cfg.offset);
|
||||
let di = cfg.decimation;
|
||||
let l = 10000;
|
||||
|
||||
const breakLimit = (di ? di*20 : 5 * 60) * 1000;
|
||||
|
||||
let trailLength = (ts1 - ts0) / 1000;
|
||||
|
||||
return new TripsLayer({
|
||||
id: 'navl',
|
||||
data: `/api/vessel/track/?di=${di}&l=${l}&project=&ts0=${ts0.toISOString()}&ts1=${ts1.toISOString()}`,
|
||||
...this.loadOptions({
|
||||
fetch: {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.$store.getters.jwt}`,
|
||||
}
|
||||
}
|
||||
}),
|
||||
dataTransform: (data) => {
|
||||
if (data.length >= l) {
|
||||
console.warn(`Vessel track data may be truncated! Limit: ${l}`);
|
||||
}
|
||||
|
||||
const paths = [];
|
||||
let prevTstamp;
|
||||
paths.push({path: [], timestamps: [], num: 0, ts0: +Infinity, ts1: -Infinity});
|
||||
for (const el of data) {
|
||||
const tstamp = new Date(el.tstamp).valueOf();
|
||||
const curPath = () => paths[paths.length-1];
|
||||
if (prevTstamp && Math.abs(tstamp - prevTstamp) > breakLimit) {
|
||||
// Start a new path
|
||||
console.log(`Breaking path on interval ${Math.abs(tstamp - prevTstamp)} > ${breakLimit}`);
|
||||
paths.push({path: [], timestamps: [], num: paths.length, ts0: +Infinity, ts1: -Infinity});
|
||||
}
|
||||
|
||||
if (tstamp < curPath().ts0) {
|
||||
curPath().ts0 = tstamp;
|
||||
}
|
||||
if (tstamp > curPath().ts1) {
|
||||
curPath().ts1 = tstamp;
|
||||
}
|
||||
|
||||
curPath().path.push([el.x, el.y]);
|
||||
curPath().timestamps.push(tstamp/1000);
|
||||
prevTstamp = tstamp;
|
||||
}
|
||||
|
||||
paths.forEach (path => {
|
||||
path.nums = paths.length;
|
||||
path.ts0 = new Date(path.ts0);
|
||||
path.ts1 = new Date(path.ts1);
|
||||
});
|
||||
|
||||
return paths;
|
||||
},
|
||||
getPath: d => d.path,
|
||||
getTimestamps: d => d.timestamps,
|
||||
currentTime: ts1.valueOf() / 1000,
|
||||
trailLength,
|
||||
widthUnits: "meters",
|
||||
widthMinPixels: 4,
|
||||
getWidth: 10,
|
||||
getColor: [ 174, 1, 126, 200 ],
|
||||
stroked: true,
|
||||
pickable: true,
|
||||
...options
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
eventsLogLayer (options = {}) {
|
||||
|
||||
const labelColour = (d, i, t, c = [127, 65, 90]) => {
|
||||
const label = d?.properties?.labels?.[0];
|
||||
const colour = this.labels[label]?.view?.colour ?? "#cococo";
|
||||
|
||||
if (colour) {
|
||||
if (colour[0] == "#") {
|
||||
c = hexToArray(colour);
|
||||
} else {
|
||||
c = namedColourToArray(colour);
|
||||
}
|
||||
} else {
|
||||
//return [127, 65, 90];
|
||||
}
|
||||
|
||||
if (t != null) {
|
||||
c[3] = t;
|
||||
}
|
||||
|
||||
return c;
|
||||
};
|
||||
|
||||
return new DougalEventsLayer({
|
||||
id: 'log',
|
||||
data: `/api/project/${this.$route.params.project}/event?mime=application/geo%2Bjson`,
|
||||
...this.loadOptions(),
|
||||
lineWidthMinPixels: 2,
|
||||
getPosition: d => d.geometry.coordinates,
|
||||
jitter: 0.00015,
|
||||
getElevation: d => Math.min(Math.max(d.properties.remarks?.length || 10, 10), 200),
|
||||
getFillColor: (d, i) => labelColour(d, i, 200),
|
||||
getLineColor: (d, i) => labelColour(d, i, 200),
|
||||
radius: 0.001,
|
||||
radiusScale: 1,
|
||||
// This just won't work with radiusUnits = "pixels".
|
||||
// See: https://grok.com/share/c2hhcmQtMw%3D%3D_16578be4-20fd-4000-a765-f082503d0495
|
||||
radiusUnits: "pixels",
|
||||
radiusMinPixels: 1.5,
|
||||
radiusMaxPixels: 2.5,
|
||||
|
||||
pickable: true,
|
||||
...options
|
||||
})
|
||||
|
||||
},
|
||||
|
||||
preplotSaillinesLinesLayer (options = {}) {
|
||||
return new GeoJsonLayer({
|
||||
id: 'psll',
|
||||
data: `/api/project/${this.$route.params.project}/gis/preplot/line?class=V&v=${this.lineTStamp?.valueOf()}`,
|
||||
...this.loadOptions(),
|
||||
lineWidthMinPixels: 1,
|
||||
getLineColor: (d) => d.properties.ntba ? [240, 248, 255, 200] : [85, 170, 255, 200],
|
||||
getLineWidth: 1,
|
||||
getPointRadius: 2,
|
||||
radiusUnits: "pixels",
|
||||
pointRadiusMinPixels: 2,
|
||||
pickable: true,
|
||||
...options
|
||||
})
|
||||
},
|
||||
|
||||
preplotLinesLayer (options = {}) {
|
||||
return new GeoJsonLayer({
|
||||
id: 'ppll',
|
||||
data: `/api/project/${this.$route.params.project}/gis/preplot/line?v=${this.lineTStamp?.valueOf()}`,
|
||||
...this.loadOptions(),
|
||||
lineWidthMinPixels: 1,
|
||||
getLineColor: (d) => d.properties.ntba ? [240, 248, 255, 200] : [85, 170, 255, 200],
|
||||
getLineWidth: 1,
|
||||
getPointRadius: 2,
|
||||
radiusUnits: "pixels",
|
||||
pointRadiusMinPixels: 2,
|
||||
pickable: true,
|
||||
...options
|
||||
})
|
||||
},
|
||||
|
||||
plannedLinesLinesLayer (options = {}) {
|
||||
return new PathLayer({
|
||||
id: 'planl',
|
||||
data: [...this.plannedSequences], // Create new array to trigger Deck.gl update
|
||||
dataTransform: (sequences) => {
|
||||
// Raise the data 10 m above ground so that it's visible over heatmaps, etc.
|
||||
return sequences.map( seq => ({
|
||||
...seq,
|
||||
geometry: {
|
||||
...seq.geometry,
|
||||
coordinates: seq.geometry.coordinates.map( pos => [...pos, 10] )
|
||||
}
|
||||
}))
|
||||
},
|
||||
getPath: d => d.geometry.coordinates,
|
||||
//getSourcePosition: d => d.geometry.coordinates[0],
|
||||
//getTargetPosition: d => d.geometry.coordinates[1],
|
||||
widthUnits: "meters",
|
||||
widthMinPixels: 4,
|
||||
getWidth: 25,
|
||||
//getLineWidth: 10,
|
||||
getColor: (d) => {
|
||||
const k = (d?.azimuth??0)/360*255;
|
||||
return [ k, 128, k, 200 ];
|
||||
},
|
||||
stroked: true,
|
||||
pickable: true,
|
||||
...options
|
||||
});
|
||||
},
|
||||
|
||||
rawSequencesLinesLayer (options = {}) {
|
||||
return new GeoJsonLayer({
|
||||
id: 'seqrl',
|
||||
data: `/api/project/${this.$route.params.project}/gis/raw/line?v=${this.sequenceTStamp?.valueOf()}`,
|
||||
...this.loadOptions(),
|
||||
lineWidthMinPixels: 1,
|
||||
getLineColor: (d) => d.properties.ntbp ? [0xe6, 0x51, 0x00, 200] : [0xff, 0x98, 0x00, 200],
|
||||
getLineWidth: 1,
|
||||
getPointRadius: 2,
|
||||
radiusUnits: "pixels",
|
||||
pointRadiusMinPixels: 2,
|
||||
pickable: true,
|
||||
...options
|
||||
})
|
||||
},
|
||||
|
||||
finalSequencesLinesLayer (options = {}) {
|
||||
return new GeoJsonLayer({
|
||||
id: 'seqfl',
|
||||
data: `/api/project/${this.$route.params.project}/gis/final/line?v=${this.sequenceTStamp?.valueOf()}`,
|
||||
...this.loadOptions(),
|
||||
lineWidthMinPixels: 1,
|
||||
getLineColor: (d) => d.properties.pending ? [0xa7, 0xff, 0xab, 200] : [0x00, 0x96, 0x88, 200],
|
||||
getLineWidth: 1,
|
||||
getPointRadius: 2,
|
||||
radiusUnits: "pixels",
|
||||
pointRadiusMinPixels: 2,
|
||||
pickable: true,
|
||||
...options
|
||||
})
|
||||
},
|
||||
|
||||
preplotSaillinesPointLayer (options = {}) {
|
||||
return new DougalSequenceLayer({
|
||||
id: 'pslp',
|
||||
data: `/api/project/${this.$route.params.project}/line/sail?v=${this.lineTStamp?.valueOf()}`, // API endpoint returning binary data
|
||||
loaders: [DougalBinaryLoader],
|
||||
...this.loadOptions({
|
||||
fetch: {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.$store.getters.jwt}`,
|
||||
Accept: 'application/vnd.aaltronav.dougal+octet-stream'
|
||||
}
|
||||
}
|
||||
}),
|
||||
getRadius: 2,
|
||||
getFillColor: (d, {data, index}) => data.attributes.value2.value[index] ? [240, 248, 255, 200] : [85, 170, 255, 200],
|
||||
//getFillColor: [0, 120, 220, 200],
|
||||
pickable: true,
|
||||
...options
|
||||
});
|
||||
},
|
||||
|
||||
preplotPointsLayer (options = {}) {
|
||||
return new DougalSequenceLayer({
|
||||
id: 'pplp',
|
||||
data: `/api/project/${this.$route.params.project}/line/source?v=${this.lineTStamp?.valueOf()}`, // API endpoint returning binary data
|
||||
loaders: [DougalBinaryLoader],
|
||||
...this.loadOptions({
|
||||
fetch: {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.$store.getters.jwt}`,
|
||||
Accept: 'application/vnd.aaltronav.dougal+octet-stream'
|
||||
}
|
||||
}
|
||||
}),
|
||||
getRadius: 2,
|
||||
getFillColor: (d, {data, index}) => data.attributes.value2.value[index] ? [240, 248, 255, 200] : [85, 170, 255, 200],
|
||||
//getFillColor: [0, 120, 220, 200],
|
||||
pickable: true,
|
||||
...options
|
||||
});
|
||||
},
|
||||
|
||||
plannedLinesPointsLayer (options = {}) {
|
||||
},
|
||||
|
||||
rawSequencesPointsLayer (options = {}) {
|
||||
|
||||
return new DougalSequenceLayer({
|
||||
id: 'seqrp',
|
||||
data: this.makeDataFromBinary(this.sequenceBinaryData),
|
||||
getRadius: 2,
|
||||
getFillColor: [0, 120, 220, 200],
|
||||
pickable: true,
|
||||
...options
|
||||
});
|
||||
},
|
||||
|
||||
finalSequencesPointsLayer (options = {}) {
|
||||
|
||||
return new DougalSequenceLayer({
|
||||
id: 'seqfp',
|
||||
data: this.makeDataFromBinary(this.sequenceBinaryDataFinal),
|
||||
getRadius: 2,
|
||||
getFillColor: [220, 120, 0, 200],
|
||||
pickable: true,
|
||||
...options
|
||||
});
|
||||
},
|
||||
|
||||
heatmapLayer(options = {}) {
|
||||
const { positions, values } = this.heatmapValue?.startsWith("co_")
|
||||
? this.sequenceBinaryDataFinal
|
||||
: this.sequenceBinaryData;
|
||||
|
||||
if (!positions?.length || !values?.length) {
|
||||
console.warn('No valid data for heatmapLayer');
|
||||
|
||||
return new HeatmapLayer({
|
||||
id: 'seqrh',
|
||||
data: [],
|
||||
...options
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
let weights, offset = 0, scaler = 1;
|
||||
let colorDomain = null;
|
||||
let aggregation = "MEAN";
|
||||
let transform = (v) => v;
|
||||
|
||||
switch (this.heatmapValue) {
|
||||
case "total_error":
|
||||
weights = Float32Array.from(values[3], (ei, i) => {
|
||||
const ej = values[4][i];
|
||||
return Math.sqrt(ei * ei + ej * ej) / 100; // Euclidean distance in meters
|
||||
});
|
||||
colorDomain = [2, 20]; // scale: 1 (already divided by 100 above)
|
||||
break;
|
||||
case "delta_i":
|
||||
weights = values[3];
|
||||
scaler = 0.1;
|
||||
colorDomain = [100, 1200]; // scale: 100 (1 ‒ 12 m)
|
||||
break;
|
||||
case "delta_j":
|
||||
weights = values[4];
|
||||
scaler = 0.1;
|
||||
colorDomain = [10, 80]; // scale: 100 (0.1 ‒ 0.8 m)
|
||||
break;
|
||||
|
||||
case "co_total_error":
|
||||
weights = Float32Array.from(values[3], (ei, i) => {
|
||||
const ej = values[4][i];
|
||||
return Math.sqrt(ei * ei + ej * ej) / 100; // Euclidean distance in meters
|
||||
});
|
||||
colorDomain = [10, 150]; // Scale: 100 (0.1 ‒ 1 m)
|
||||
break;
|
||||
case "co_delta_i":
|
||||
weights = values[5];
|
||||
scaler = 0.1;
|
||||
colorDomain = [10, 150];
|
||||
break;
|
||||
case "co_delta_j":
|
||||
weights = values[6];
|
||||
scaler = 0.1;
|
||||
colorDomain = [0.2, 2];
|
||||
break;
|
||||
|
||||
case "delta_μ":
|
||||
weights = values[5];
|
||||
scaler = 0.1;
|
||||
break;
|
||||
case "delta_σ":
|
||||
weights = values[6];
|
||||
scaler = 0.1;
|
||||
colorDomain = [ 0.1, 1.5 ];
|
||||
break;
|
||||
case "delta_R":
|
||||
weights = values[7];
|
||||
scaler = 0.1;
|
||||
colorDomain = [ 0.5, 1.0 ];
|
||||
break;
|
||||
case "press_μ":
|
||||
weights = values[8];
|
||||
offset = -2000;
|
||||
colorDomain = [ 5, 50 ];
|
||||
break;
|
||||
case "press_σ":
|
||||
weights = values[9];
|
||||
colorDomain = [ 1, 19 ];
|
||||
break;
|
||||
case "press_R":
|
||||
weights = values[10];
|
||||
colorDomain = [ 3, 50 ];
|
||||
break;
|
||||
case "depth_μ":
|
||||
weights = values[11];
|
||||
offset = -6;
|
||||
scaler = 0.1;
|
||||
colorDomain = [ 0.1, 1 ];
|
||||
break;
|
||||
case "depth_σ":
|
||||
weights = values[12];
|
||||
scaler = 0.1;
|
||||
break;
|
||||
case "depth_R":
|
||||
weights = values[13];
|
||||
scaler = 0.1;
|
||||
break;
|
||||
case "fill_μ":
|
||||
weights = values[14];
|
||||
colorDomain = [ 300, 1000 ];
|
||||
break;
|
||||
case "fill_σ":
|
||||
weights = values[15];
|
||||
offset = -250;
|
||||
colorDomain = [ 0, 250 ];
|
||||
break;
|
||||
case "fill_R":
|
||||
weights = values[16];
|
||||
offset = -500;
|
||||
colorDomain = [ 0, 500 ];
|
||||
break;
|
||||
case "delay_μ":
|
||||
weights = values[17];
|
||||
offset = -150;
|
||||
colorDomain = [ 1.5, 25 ];
|
||||
//transform = (v) => {console.log("τ(v)", v); return v;};
|
||||
break;
|
||||
case "delay_σ":
|
||||
weights = values[18];
|
||||
break;
|
||||
case "delay_R":
|
||||
weights = values[19];
|
||||
break;
|
||||
case "no_fire":
|
||||
weights = values[20];
|
||||
transform = v => v >> 4;
|
||||
aggregation = "SUM";
|
||||
colorDomain = [ 0.1, 1.5 ];
|
||||
break;
|
||||
case "autofire":
|
||||
weights = values[20];
|
||||
transform = v => v & 0xf;
|
||||
aggregation = "SUM";
|
||||
colorDomain = [ 0.5, 1.5 ];
|
||||
break;
|
||||
case "misfire":
|
||||
weights = values[20];
|
||||
aggregation = "SUM";
|
||||
colorDomain = [ 0.5, 1.5 ];
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
const stats = {
|
||||
min: d3a.min(weights),
|
||||
mode: d3a.mode(weights),
|
||||
mean: d3a.mean(weights),
|
||||
max: d3a.max(weights),
|
||||
sd: d3a.deviation(weights),
|
||||
};
|
||||
const sr0 = [ stats.mean - 2.1*stats.sd, stats.mean + 2.1*stats.sd ];
|
||||
const sr1 = [ stats.mode - 2.1*stats.sd, stats.mode + 2.1*stats.sd ];
|
||||
|
||||
/*
|
||||
console.log('Positions sample:', positions.slice(0, 10));
|
||||
console.log('Weights sample:', weights.slice(0, 10));
|
||||
console.log("Mode:", this.heatmapValue);
|
||||
console.log('Weight stats:', stats);
|
||||
console.log("Suggested ranges");
|
||||
console.log(sr0);
|
||||
console.log(sr1);
|
||||
console.log("Actual ranges");
|
||||
console.log(colorDomain);
|
||||
*/
|
||||
|
||||
return new HeatmapLayer({
|
||||
id: 'seqrh',
|
||||
data: {
|
||||
length: weights.length,
|
||||
positions,
|
||||
weights
|
||||
/*
|
||||
attributes: {
|
||||
getPosition: { value: positions, type: 'float32', size: 2 },
|
||||
getWeight: { value: weights, type: 'float32', size: 1 }
|
||||
}
|
||||
*/
|
||||
},
|
||||
getPosition: (d, {index, data}) => [ data.positions[index*2], data.positions[index*2+1] ],
|
||||
getWeight: (d, {index, data}) => transform(Math.abs(data.weights[index] * scaler + offset)),
|
||||
colorDomain,
|
||||
radiusPixels: 25,
|
||||
aggregation,
|
||||
pickable: false,
|
||||
...options
|
||||
});
|
||||
},
|
||||
|
||||
},
|
||||
|
||||
}
|
||||
|
||||
</script>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user