mirror of
https://gitlab.com/wgp/dougal/software.git
synced 2025-12-06 06:57:07 +00:00
Compare commits
76 Commits
2fab06d340
...
344-improv
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
979438d00e | ||
|
|
c96ada6b78 | ||
|
|
673c60a359 | ||
|
|
99e425270c | ||
|
|
63633715e2 | ||
|
|
8afac5c150 | ||
|
|
11168def68 | ||
|
|
0f477b8e65 | ||
|
|
03b00a4ea7 | ||
|
|
c5faa53bee | ||
|
|
46b2512530 | ||
|
|
db4c9a0235 | ||
|
|
1a12ea13ed | ||
|
|
81717c37f1 | ||
|
|
6377e8854c | ||
|
|
d3446d03bd | ||
|
|
a52f7811f2 | ||
|
|
ef2bd4888e | ||
|
|
8801442c92 | ||
|
|
30f65dbeaa | ||
|
|
c2f53ac150 | ||
|
|
4328fc4d2a | ||
|
|
2c2eb8fceb | ||
|
|
767c2f2cb1 | ||
|
|
57a73f7d1c | ||
|
|
9f299056d8 | ||
|
|
5d3c59867c | ||
|
|
76b8355ede | ||
|
|
76b55f514d | ||
|
|
4e1d3209df | ||
|
|
f21ff7ee38 | ||
|
|
2446b42785 | ||
|
|
196e772004 | ||
|
|
674d818fee | ||
|
|
5527576679 | ||
|
|
fe7c016dea | ||
|
|
b7543aa6c4 | ||
|
|
b48a060dc0 | ||
|
|
c0f9a2de5a | ||
|
|
32a9c7a5f2 | ||
|
|
f1f74080f6 | ||
|
|
c5eb8e45f1 | ||
|
|
caab968fd6 | ||
|
|
5f28d1be7b | ||
|
|
22c9537889 | ||
|
|
e95aaa7de7 | ||
|
|
4f44f5a10c | ||
|
|
0ba467d34c | ||
|
|
2b5b302e54 | ||
|
|
28938e27a9 | ||
|
|
97f96fdc1e | ||
|
|
1e3ce35f76 | ||
|
|
619a886781 | ||
|
|
c054e63325 | ||
|
|
fd94b3b6f4 | ||
|
|
7b67b4afc9 | ||
|
|
7c52ada922 | ||
|
|
9072bbe389 | ||
|
|
6639b7110b | ||
|
|
be6652b539 | ||
|
|
bf054d3902 | ||
|
|
2734870871 | ||
|
|
52f49e6799 | ||
|
|
30150a8728 | ||
|
|
ef8466992c | ||
|
|
8e4e70cbdc | ||
|
|
4dadffbbe7 | ||
|
|
24dcebd0d9 | ||
|
|
12a762f44f | ||
|
|
ebf13abc28 | ||
|
|
b3552db02f | ||
|
|
cd882c0611 | ||
|
|
6fc9c020a4 | ||
|
|
75284322f1 | ||
|
|
e849c47f01 | ||
|
|
387d20a4f0 |
@@ -1,5 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Maximum runtime in seconds before killing an overdue instance (e.g., 10 minutes)
|
||||
MAX_RUNTIME_SECONDS=$((15 * 60))
|
||||
|
||||
DOUGAL_ROOT=${DOUGAL_ROOT:-$(dirname "$0")/..}
|
||||
|
||||
@@ -80,8 +82,9 @@ function run () {
|
||||
# DESCRIPTION=""
|
||||
SERVICE="deferred_imports"
|
||||
|
||||
$BINDIR/send_alert.py -t "$TITLE" -s "$SERVICE" -l "critical" \
|
||||
-O "$(cat $STDOUTLOG)" -E "$(cat $STDERRLOG)"
|
||||
# Disable GitLab alerts. They're just not very practical
|
||||
# $BINDIR/send_alert.py -t "$TITLE" -s "$SERVICE" -l "critical" \
|
||||
# -O "$(cat $STDOUTLOG)" -E "$(cat $STDERRLOG)"
|
||||
|
||||
exit 2
|
||||
}
|
||||
@@ -97,14 +100,37 @@ function cleanup () {
|
||||
}
|
||||
|
||||
if [[ -f $LOCKFILE ]]; then
|
||||
PID=$(cat "$LOCKFILE")
|
||||
if pgrep -F "$LOCKFILE"; then
|
||||
print_warning $(printf "The previous process is still running (%d)" $PID)
|
||||
exit 1
|
||||
else
|
||||
rm "$LOCKFILE"
|
||||
print_warning $(printf "Previous process (%d) not found. Must have died unexpectedly" $PID)
|
||||
fi
|
||||
PID=$(cat "$LOCKFILE")
|
||||
if kill -0 "$PID" 2>/dev/null; then # Check if process is running
|
||||
# Get elapsed time in D-HH:MM:SS format and convert to seconds
|
||||
ELAPSED_STR=$(ps -p "$PID" -o etime= | tr -d '[:space:]')
|
||||
if [ -n "$ELAPSED_STR" ]; then
|
||||
# Convert D-HH:MM:SS to seconds
|
||||
ELAPSED_SECONDS=$(echo "$ELAPSED_STR" | awk -F'[-:]' '{
|
||||
seconds = 0
|
||||
if (NF == 4) { seconds += $1 * 86400 } # Days
|
||||
if (NF >= 3) { seconds += $NF-2 * 3600 } # Hours
|
||||
if (NF >= 2) { seconds += $NF-1 * 60 } # Minutes
|
||||
seconds += $NF # Seconds
|
||||
print seconds
|
||||
}')
|
||||
if [ "$ELAPSED_SECONDS" -gt "$MAX_RUNTIME_SECONDS" ]; then
|
||||
# Kill the overdue process (SIGTERM; use -9 for SIGKILL if needed)
|
||||
kill "$PID" 2>/dev/null
|
||||
print_warning $(printf "Killed overdue process (%d) that ran for %s (%d seconds)" "$PID" "$ELAPSED_STR" "$ELAPSED_SECONDS")
|
||||
rm "$LOCKFILE"
|
||||
else
|
||||
print_warning $(printf "Previous process is still running (%d) for %s (%d seconds)" "$PID" "$ELAPSED_STR" "$ELAPSED_SECONDS")
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
print_warning $(printf "Could not retrieve elapsed time for process (%d)" "$PID")
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
rm "$LOCKFILE"
|
||||
print_warning $(printf "Previous process (%d) not found. Must have died unexpectedly" "$PID")
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "$$" > "$LOCKFILE" || {
|
||||
|
||||
89
bin/update_comparisons.js
Executable file
89
bin/update_comparisons.js
Executable file
@@ -0,0 +1,89 @@
|
||||
#!/usr/bin/node
|
||||
|
||||
const cmp = require('../lib/www/server/lib/comparisons');
|
||||
|
||||
async function purgeComparisons () {
|
||||
const groups = await cmp.groups();
|
||||
const comparisons = await cmp.getGroup();
|
||||
|
||||
const pids = new Set(Object.values(groups).flat().map( p => p.pid ));
|
||||
const comparison_pids = new Set(comparisons.map( c => [ c.baseline_pid, c.monitor_pid ] ).flat());
|
||||
|
||||
for (const pid of comparison_pids) {
|
||||
if (!pids.has(pid)) {
|
||||
console.log(`${pid} no longer par of a group. Deleting comparisons`);
|
||||
|
||||
staleComps = comparisons.filter( c => c.baseline_pid == pid || c.monitor_pid == pid );
|
||||
for (c of staleComps) {
|
||||
console.log(`Deleting comparison ${c.baseline_pid} → ${c.monitor_pid}`);
|
||||
await cmp.remove(c.baseline_pid, c.monitor_pid);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async function main () {
|
||||
|
||||
console.log("Looking for unreferenced comparisons to purge");
|
||||
await purgeComparisons();
|
||||
|
||||
console.log("Retrieving project groups");
|
||||
const groups = await cmp.groups();
|
||||
|
||||
if (!Object.keys(groups??{})?.length) {
|
||||
console.log("No groups found");
|
||||
return 0;
|
||||
}
|
||||
|
||||
console.log(`Found ${Object.keys(groups)?.length} groups: ${Object.keys(groups).join(", ")}`);
|
||||
|
||||
for (const groupName of Object.keys(groups)) {
|
||||
const projects = groups[groupName];
|
||||
|
||||
console.log(`Fetching saved comparisons for ${groupName}`);
|
||||
|
||||
const comparisons = await cmp.getGroup(groupName);
|
||||
|
||||
if (!comparisons || !comparisons.length) {
|
||||
console.log(`No comparisons found for ${groupName}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if there are any projects that have been modified since last comparison
|
||||
// or if there are any pairs that are no longer part of the group
|
||||
|
||||
const outdated = comparisons.filter( c => {
|
||||
const baseline_tstamp = projects.find( p => p.pid === c.baseline_pid )?.tstamp;
|
||||
const monitor_tstamp = projects.find( p => p.pid === c.monitor_pid )?.tstamp;
|
||||
return (c.tstamp < baseline_tstamp) || (c.tstamp < monitor_tstamp) ||
|
||||
baseline_tstamp == null || monitor_tstamp == null;
|
||||
});
|
||||
|
||||
for (const comparison of outdated) {
|
||||
console.log(`Removing stale comparison: ${comparison.baseline_pid} → ${comparison.monitor_pid}`);
|
||||
await cmp.remove(comparison.baseline_pid, comparison.monitor_pid);
|
||||
}
|
||||
|
||||
if (projects?.length < 2) {
|
||||
console.log(`Group ${groupName} has less than two projects. No comparisons are possible`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Re-run the comparisons that are not in the database. They may
|
||||
// be missing either beacause they were not there to start with
|
||||
// or because we just removed them due to being stale
|
||||
|
||||
console.log(`Recalculating group ${groupName}`);
|
||||
await cmp.saveGroup(groupName);
|
||||
}
|
||||
|
||||
console.log("Comparisons update done");
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
main();
|
||||
} else {
|
||||
module.exports = main;
|
||||
}
|
||||
109
etc/db/upgrades/upgrade41-v0.6.3-add-comparisons.sql
Normal file
109
etc/db/upgrades/upgrade41-v0.6.3-add-comparisons.sql
Normal file
@@ -0,0 +1,109 @@
|
||||
-- Add procedure to decimate old nav data
|
||||
--
|
||||
-- New schema version: 0.6.3
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade creates a new schema called `comparisons`.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update adds a `comparisons` table to a `comparisons` schema.
|
||||
-- The `comparisons.comparisons` table holds 4D prospect comparison data.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
-- BEGIN
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS comparisons
|
||||
AUTHORIZATION postgres;
|
||||
|
||||
COMMENT ON SCHEMA comparisons
|
||||
IS 'Holds 4D comparison data and logic';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS comparisons.comparisons
|
||||
(
|
||||
type text COLLATE pg_catalog."default" NOT NULL,
|
||||
baseline_pid text COLLATE pg_catalog."default" NOT NULL,
|
||||
monitor_pid text COLLATE pg_catalog."default" NOT NULL,
|
||||
data bytea,
|
||||
meta jsonb NOT NULL DEFAULT '{}'::jsonb,
|
||||
CONSTRAINT comparisons_pkey PRIMARY KEY (baseline_pid, monitor_pid, type)
|
||||
)
|
||||
|
||||
TABLESPACE pg_default;
|
||||
|
||||
ALTER TABLE IF EXISTS comparisons.comparisons
|
||||
OWNER to postgres;
|
||||
|
||||
-- END
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.3"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.3"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
169
etc/db/upgrades/upgrade42-v0.6.4-notify-exclude-columns.sql
Normal file
169
etc/db/upgrades/upgrade42-v0.6.4-notify-exclude-columns.sql
Normal file
@@ -0,0 +1,169 @@
|
||||
-- Add procedure to decimate old nav data
|
||||
--
|
||||
-- New schema version: 0.6.4
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects the public schema only.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update modifies notify() to accept, as optional arguments, the
|
||||
-- names of columns that are to be *excluded* from the notification.
|
||||
-- It is intended for tables with large columns which are however of
|
||||
-- no particular interest in a notification.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
-- BEGIN
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.notify()
|
||||
RETURNS trigger
|
||||
LANGUAGE 'plpgsql'
|
||||
COST 100
|
||||
VOLATILE NOT LEAKPROOF
|
||||
AS $BODY$
|
||||
DECLARE
|
||||
channel text := TG_ARGV[0];
|
||||
pid text;
|
||||
payload text;
|
||||
notification text;
|
||||
payload_id integer;
|
||||
old_json jsonb;
|
||||
new_json jsonb;
|
||||
excluded_col text;
|
||||
i integer;
|
||||
BEGIN
|
||||
|
||||
-- Fetch pid
|
||||
SELECT projects.pid INTO pid FROM projects WHERE schema = TG_TABLE_SCHEMA;
|
||||
|
||||
-- Build old and new as jsonb, excluding specified columns if provided
|
||||
IF OLD IS NOT NULL THEN
|
||||
old_json := row_to_json(OLD)::jsonb;
|
||||
FOR i IN 1 .. TG_NARGS - 1 LOOP
|
||||
excluded_col := TG_ARGV[i];
|
||||
old_json := old_json - excluded_col;
|
||||
END LOOP;
|
||||
ELSE
|
||||
old_json := NULL;
|
||||
END IF;
|
||||
|
||||
IF NEW IS NOT NULL THEN
|
||||
new_json := row_to_json(NEW)::jsonb;
|
||||
FOR i IN 1 .. TG_NARGS - 1 LOOP
|
||||
excluded_col := TG_ARGV[i];
|
||||
new_json := new_json - excluded_col;
|
||||
END LOOP;
|
||||
ELSE
|
||||
new_json := NULL;
|
||||
END IF;
|
||||
|
||||
-- Build payload
|
||||
payload := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'old', old_json,
|
||||
'new', new_json,
|
||||
'pid', pid
|
||||
)::text;
|
||||
|
||||
-- Handle large payloads
|
||||
IF octet_length(payload) < 1000 THEN
|
||||
PERFORM pg_notify(channel, payload);
|
||||
ELSE
|
||||
-- Store large payload and notify with ID (as before)
|
||||
INSERT INTO notify_payloads (payload) VALUES (payload) RETURNING id INTO payload_id;
|
||||
|
||||
notification := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'pid', pid,
|
||||
'payload_id', payload_id
|
||||
)::text;
|
||||
|
||||
PERFORM pg_notify(channel, notification);
|
||||
RAISE INFO 'Payload over limit';
|
||||
END IF;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$BODY$;
|
||||
|
||||
ALTER FUNCTION public.notify()
|
||||
OWNER TO postgres;
|
||||
|
||||
-- END
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.4"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.4"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,96 @@
|
||||
-- Add procedure to decimate old nav data
|
||||
--
|
||||
-- New schema version: 0.6.5
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects the public schema only.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update modifies notify() to accept, as optional arguments, the
|
||||
-- names of columns that are to be *excluded* from the notification.
|
||||
-- It is intended for tables with large columns which are however of
|
||||
-- no particular interest in a notification.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
-- BEGIN
|
||||
|
||||
CREATE OR REPLACE TRIGGER comparisons_tg
|
||||
AFTER INSERT OR DELETE OR UPDATE
|
||||
ON comparisons.comparisons
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.notify('comparisons', 'data');
|
||||
|
||||
-- END
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.5' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.5"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.5"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,157 @@
|
||||
-- Add procedure to decimate old nav data
|
||||
--
|
||||
-- New schema version: 0.6.6
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects the public schema only.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This adds a last_project_update(pid) function. It takes a project ID
|
||||
-- and returns the last known timestamp from that project. Timestamps
|
||||
-- are derived from multiple sources:
|
||||
--
|
||||
-- - raw_shots table
|
||||
-- - final_shots table
|
||||
-- - events_log_full table
|
||||
-- - info table where key = 'qc'
|
||||
-- - files table, from the hashes (which contain the file's mtime)
|
||||
-- - project configuration, looking for an _updatedOn property
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
-- BEGIN
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.last_project_update(p_pid text)
|
||||
RETURNS timestamp with time zone
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
DECLARE
|
||||
v_last_ts timestamptz := NULL;
|
||||
v_current_ts timestamptz;
|
||||
v_current_str text;
|
||||
v_current_unix numeric;
|
||||
v_sid_rec record;
|
||||
BEGIN
|
||||
-- From raw_shots, final_shots, info, and files
|
||||
FOR v_sid_rec IN SELECT schema FROM public.projects WHERE pid = p_pid
|
||||
LOOP
|
||||
-- From raw_shots
|
||||
EXECUTE 'SELECT max(tstamp) FROM ' || v_sid_rec.schema || '.raw_shots' INTO v_current_ts;
|
||||
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
|
||||
v_last_ts := v_current_ts;
|
||||
END IF;
|
||||
|
||||
-- From final_shots
|
||||
EXECUTE 'SELECT max(tstamp) FROM ' || v_sid_rec.schema || '.final_shots' INTO v_current_ts;
|
||||
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
|
||||
v_last_ts := v_current_ts;
|
||||
END IF;
|
||||
|
||||
-- From info where key = 'qc'
|
||||
EXECUTE 'SELECT value->>''updatedOn'' FROM ' || v_sid_rec.schema || '.info WHERE key = ''qc''' INTO v_current_str;
|
||||
IF v_current_str IS NOT NULL THEN
|
||||
v_current_ts := v_current_str::timestamptz;
|
||||
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
|
||||
v_last_ts := v_current_ts;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- From files hash second part, only for valid colon-separated hashes
|
||||
EXECUTE 'SELECT max( split_part(hash, '':'', 2)::numeric ) FROM ' || v_sid_rec.schema || '.files WHERE hash ~ ''^[0-9]+:[0-9]+\\.[0-9]+:[0-9]+\\.[0-9]+:[0-9a-f]+$''' INTO v_current_unix;
|
||||
IF v_current_unix IS NOT NULL THEN
|
||||
v_current_ts := to_timestamp(v_current_unix);
|
||||
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
|
||||
v_last_ts := v_current_ts;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- From event_log_full
|
||||
EXECUTE 'SELECT max(tstamp) FROM ' || v_sid_rec.schema || '.event_log_full' INTO v_current_ts;
|
||||
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
|
||||
v_last_ts := v_current_ts;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
-- From projects.meta->_updatedOn
|
||||
SELECT (meta->>'_updatedOn')::timestamptz FROM public.projects WHERE pid = p_pid INTO v_current_ts;
|
||||
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
|
||||
v_last_ts := v_current_ts;
|
||||
END IF;
|
||||
|
||||
RETURN v_last_ts;
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- END
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.6' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.5' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.6"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.6"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -503,6 +503,37 @@ class DougalBinaryBundle extends ArrayBuffer {
|
||||
return ab;
|
||||
}
|
||||
|
||||
get records () {
|
||||
const data = [];
|
||||
for (const record of this) {
|
||||
data.push(record.slice(1));
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
const chunks = this.chunks();
|
||||
let chunkIndex = 0;
|
||||
let chunkIterator = chunks.length > 0 ? chunks[0][Symbol.iterator]() : null;
|
||||
|
||||
return {
|
||||
next() {
|
||||
if (!chunkIterator) {
|
||||
return { done: true };
|
||||
}
|
||||
|
||||
let result = chunkIterator.next();
|
||||
while (result.done && chunkIndex < chunks.length - 1) {
|
||||
chunkIndex++;
|
||||
chunkIterator = chunks[chunkIndex][Symbol.iterator]();
|
||||
result = chunkIterator.next();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -693,7 +724,7 @@ class DougalBinaryChunkSequential extends ArrayBuffer {
|
||||
getRecord (index) {
|
||||
if (index < 0 || index >= this.jCount) throw new Error(`Invalid record index: ${index}`);
|
||||
|
||||
const arr = [thid.udv, this.i, this.j0 + index * this.Δj];
|
||||
const arr = [this.udv, this.i, this.j0 + index * this.Δj];
|
||||
|
||||
for (let m = 0; m < this.ΔelemCount; m++) {
|
||||
const values = this.Δelem(m);
|
||||
@@ -707,6 +738,21 @@ class DougalBinaryChunkSequential extends ArrayBuffer {
|
||||
|
||||
return arr;
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
let index = 0;
|
||||
const chunk = this;
|
||||
return {
|
||||
next() {
|
||||
if (index < chunk.jCount) {
|
||||
return { value: chunk.getRecord(index++), done: false };
|
||||
} else {
|
||||
return { done: true };
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -901,6 +947,21 @@ class DougalBinaryChunkInterleaved extends ArrayBuffer {
|
||||
|
||||
return arr;
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
let index = 0;
|
||||
const chunk = this;
|
||||
return {
|
||||
next() {
|
||||
if (index < chunk.jCount) {
|
||||
return { value: chunk.getRecord(index++), done: false };
|
||||
} else {
|
||||
return { done: true };
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -85,7 +85,9 @@ export default {
|
||||
},
|
||||
|
||||
handleProject (context, {payload}) {
|
||||
this.refreshProjects();
|
||||
if (payload?.table == "public") {
|
||||
this.refreshProjects();
|
||||
}
|
||||
},
|
||||
|
||||
registerNotificationHandlers () {
|
||||
|
||||
@@ -39,7 +39,8 @@ export default {
|
||||
default:
|
||||
return {
|
||||
editable: false,
|
||||
displaylogo: false
|
||||
displaylogo: false,
|
||||
responsive: true
|
||||
};
|
||||
}
|
||||
},
|
||||
@@ -48,7 +49,8 @@ export default {
|
||||
const base = {
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
}
|
||||
},
|
||||
autosize: true
|
||||
};
|
||||
|
||||
switch (this.facet) {
|
||||
@@ -274,18 +276,25 @@ export default {
|
||||
replot () {
|
||||
if (this.plotted) {
|
||||
const ref = this.$refs.graph;
|
||||
Plotly.relayout(ref, {
|
||||
width: ref.clientWidth,
|
||||
height: ref.clientHeight
|
||||
});
|
||||
if (ref && ref.clientWidth > 0 && ref.clientHeight > 0) {
|
||||
Plotly.relayout(ref, {
|
||||
width: ref.clientWidth,
|
||||
height: ref.clientHeight
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graph);
|
||||
this.$nextTick( () => {
|
||||
if (this.items?.length) {
|
||||
this.plot();
|
||||
}
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graph);
|
||||
});
|
||||
},
|
||||
|
||||
beforeDestroy () {
|
||||
|
||||
@@ -36,7 +36,8 @@ export default {
|
||||
config () {
|
||||
return {
|
||||
editable: false,
|
||||
displaylogo: false
|
||||
displaylogo: false,
|
||||
responsive: true
|
||||
};
|
||||
},
|
||||
|
||||
@@ -53,7 +54,8 @@ export default {
|
||||
title: "Time (s)"
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)"
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
autosize: true
|
||||
};
|
||||
},
|
||||
|
||||
@@ -154,10 +156,12 @@ export default {
|
||||
replot () {
|
||||
if (this.plotted) {
|
||||
const ref = this.$refs.graph;
|
||||
Plotly.relayout(ref, {
|
||||
width: ref.clientWidth,
|
||||
height: ref.clientHeight
|
||||
});
|
||||
if (ref && ref.clientWidth > 0 && ref.clientHeight > 0) {
|
||||
Plotly.relayout(ref, {
|
||||
width: ref.clientWidth,
|
||||
height: ref.clientHeight
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -190,8 +194,13 @@ export default {
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graph);
|
||||
this.$nextTick( () => {
|
||||
if (this.items?.length) {
|
||||
this.plot();
|
||||
}
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graph);
|
||||
});
|
||||
},
|
||||
|
||||
beforeDestroy () {
|
||||
|
||||
@@ -0,0 +1,187 @@
|
||||
<template>
|
||||
<v-card v-if="comparison" class="ma-1">
|
||||
<v-card-title>Comparison Summary: Baseline {{ baseline.pid }} vs Monitor {{ monitor.pid }}</v-card-title>
|
||||
<v-card-text>
|
||||
<v-row>
|
||||
<v-col cols="12" md="6">
|
||||
<h3>Deviation Statistics</h3>
|
||||
<v-simple-table dense>
|
||||
<template v-slot:default>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Metric</th>
|
||||
<th>I (m)</th>
|
||||
<th>J (m)</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Mean (μ)</td>
|
||||
<td>{{ comparison['μ'][0].toFixed(3) }}</td>
|
||||
<td>{{ comparison['μ'][1].toFixed(3) }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Std Dev (σ)</td>
|
||||
<td>{{ comparison['σ'][0].toFixed(3) }}</td>
|
||||
<td>{{ comparison['σ'][1].toFixed(3) }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>RMS</td>
|
||||
<td>{{ comparison.rms[0].toFixed(3) }}</td>
|
||||
<td>{{ comparison.rms[1].toFixed(3) }}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</template>
|
||||
</v-simple-table>
|
||||
|
||||
<h3 class="mt-4">Error distribution</h3>
|
||||
<ul>
|
||||
<li title="Relative to I-axis positive direction">Primary Direction: {{ (comparison.primaryDirection * 180 / Math.PI).toFixed(2) }}°</li>
|
||||
<li>Anisotropy: {{ comparison.anisotropy.toFixed(2) }}</li>
|
||||
<li title="Length of the semi-major axis of the error ellipse">Semi-Major Axis: {{ semiMajorAxis.toFixed(2) }} m</li>
|
||||
<li title="Length of the semi-minor axis of the error ellipse">Semi-Minor Axis: {{ semiMinorAxis.toFixed(2) }} m</li>
|
||||
<li title="Area of the error ellipse">Error Ellipse Area: {{ ellipseArea.toFixed(2) }} m²</li>
|
||||
</ul>
|
||||
|
||||
<h3 class="mt-4">Counts</h3>
|
||||
<ul>
|
||||
<li title="Unique line / point pairs found in both projects">Common Points: {{ comparison.common }}</li>
|
||||
<li title="Total number of points compared, including reshoots, infills, etc.">Comparison Length: {{ comparison.length }}</li>
|
||||
<li title="Number of points in the baseline project">Baseline Points: {{ comparison.baselineLength }} (Unique: {{ comparison.baselineUniqueLength }})</li>
|
||||
<li title="Number of points in the monitor project">Monitor Points: {{ comparison.monitorLength }} (Unique: {{ comparison.monitorUniqueLength }})</li>
|
||||
</ul>
|
||||
|
||||
<p class="mt-3" title="Date and time when the comparison was last performed">Computation timestamp: {{ new Date(comparison.tstamp).toLocaleString() }}</p>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="12" md="6">
|
||||
<h3>Error Ellipse</h3>
|
||||
<svg width="300" height="300" style="border: 1px solid #ccc;">
|
||||
<g :transform="`translate(150, 150) scale(${ellipseScale})`">
|
||||
<line x1="0" y1="-150" x2="0" y2="150" stroke="lightgray" stroke-dasharray="5,5"/>
|
||||
<line x1="-150" y1="0" x2="150" y2="0" stroke="lightgray" stroke-dasharray="5,5"/>
|
||||
<ellipse
|
||||
:rx="Math.sqrt(comparison.eigenvalues[0])"
|
||||
:ry="Math.sqrt(comparison.eigenvalues[1])"
|
||||
:transform="`rotate(${ellipseAngle})`"
|
||||
fill="none"
|
||||
stroke="blue"
|
||||
stroke-width="2"
|
||||
/>
|
||||
<line
|
||||
:x1="0"
|
||||
:y1="0"
|
||||
:x2="Math.sqrt(comparison.eigenvalues[0]) * Math.cos(ellipseRad)"
|
||||
:y2="Math.sqrt(comparison.eigenvalues[0]) * Math.sin(ellipseRad)"
|
||||
stroke="red"
|
||||
stroke-width="2"
|
||||
arrow-end="classic-wide-long"
|
||||
/>
|
||||
<line
|
||||
:x1="0"
|
||||
:y1="0"
|
||||
:x2="Math.sqrt(comparison.eigenvalues[1]) * Math.cos(ellipseRad + Math.PI / 2)"
|
||||
:y2="Math.sqrt(comparison.eigenvalues[1]) * Math.sin(ellipseRad + Math.PI / 2)"
|
||||
stroke="green"
|
||||
stroke-width="2"
|
||||
arrow-end="classic-wide-long"
|
||||
/>
|
||||
</g>
|
||||
</svg>
|
||||
<p class="text-caption">Ellipse scaled for visibility (factor: {{ ellipseScale.toFixed(1) }}). Axes represent sqrt(eigenvalues).</p>
|
||||
</v-col>
|
||||
</v-row>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: "DougalGroupComparisonSummary",
|
||||
|
||||
props: {
|
||||
baseline: { type: Object, required: true },
|
||||
monitor: { type: Object, required: true },
|
||||
comparison: { type: Object, required: true }
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
ellipseAngle () {
|
||||
if (!this.comparison) return 0;
|
||||
const ev = this.comparison.eigenvectors[0];
|
||||
return Math.atan2(ev[1], ev[0]) * 180 / Math.PI;
|
||||
},
|
||||
|
||||
ellipseRad () {
|
||||
return this.ellipseAngle * Math.PI / 180;
|
||||
},
|
||||
|
||||
ellipseRx () {
|
||||
if (!this.comparison) return 0;
|
||||
return Math.sqrt(this.comparison.eigenvalues[0]) * this.ellipseScale;
|
||||
},
|
||||
|
||||
ellipseRy () {
|
||||
if (!this.comparison) return 0;
|
||||
return Math.sqrt(this.comparison.eigenvalues[1]) * this.ellipseScale;
|
||||
},
|
||||
|
||||
ellipseScale () {
|
||||
if (!this.comparison) return 1;
|
||||
const maxSigma = Math.max(
|
||||
Math.sqrt(this.comparison.eigenvalues[0]),
|
||||
Math.sqrt(this.comparison.eigenvalues[1])
|
||||
);
|
||||
const maxMu = Math.max(
|
||||
Math.abs(this.comparison['μ'][0]),
|
||||
Math.abs(this.comparison['μ'][1])
|
||||
);
|
||||
//const maxExtent = maxMu + 3 * maxSigma;
|
||||
const maxExtent = 20;
|
||||
return 100 / maxExtent; // Adjust scale to fit within ~200 pixels diameter
|
||||
},
|
||||
|
||||
ellipseArea () {
|
||||
if (!this.comparison) return 0;
|
||||
const a = Math.sqrt(this.comparison.eigenvalues[0]);
|
||||
const b = Math.sqrt(this.comparison.eigenvalues[1]);
|
||||
return Math.PI * a * b;
|
||||
},
|
||||
|
||||
semiMajorAxis () {
|
||||
if (!this.comparison) return 0;
|
||||
return Math.max(
|
||||
Math.sqrt(this.comparison.eigenvalues[0]),
|
||||
Math.sqrt(this.comparison.eigenvalues[1])
|
||||
);
|
||||
},
|
||||
|
||||
semiMinorAxis () {
|
||||
if (!this.comparison) return 0;
|
||||
return Math.min(
|
||||
Math.sqrt(this.comparison.eigenvalues[0]),
|
||||
Math.sqrt(this.comparison.eigenvalues[1])
|
||||
);
|
||||
},
|
||||
|
||||
meanX () {
|
||||
return this.comparison ? this.comparison['μ'][0] : 0;
|
||||
},
|
||||
|
||||
meanY () {
|
||||
return this.comparison ? this.comparison['μ'][1] : 0;
|
||||
},
|
||||
|
||||
ellipseViewBox () {
|
||||
return '-150 -150 300 300';
|
||||
},
|
||||
|
||||
}
|
||||
}
|
||||
</script>
|
||||
1302
lib/www/client/source/src/components/groups/group-map.vue
Normal file
1302
lib/www/client/source/src/components/groups/group-map.vue
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,118 @@
|
||||
<template>
|
||||
<v-card class="ma-1">
|
||||
<v-card-title>Group Repeatability Summary</v-card-title>
|
||||
<v-card-text>
|
||||
<p>Error ellipse area for each baseline-monitor pair. Lower values indicate better repeatability. Colors range from green (best) to red (worst).</p>
|
||||
<v-simple-table dense>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Baseline \ Monitor</th>
|
||||
<th v-for="project in projects" :key="project.pid">{{ project.pid }}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr v-for="(baselineProject, rowIndex) in projects" :key="baselineProject.pid">
|
||||
<td>{{ baselineProject.pid }}</td>
|
||||
<td v-for="(monitorProject, colIndex) in projects" :key="monitorProject.pid">
|
||||
<v-tooltip v-if="colIndex > rowIndex" top>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<div
|
||||
:style="{ backgroundColor: getEllipseAreaColor(baselineProject.pid, monitorProject.pid), color: 'white', textAlign: 'center', padding: '4px' }"
|
||||
v-bind="attrs"
|
||||
v-on="on"
|
||||
@click="emitInput(baselineProject, monitorProject)"
|
||||
>
|
||||
{{ formatEllipseArea(baselineProject.pid, monitorProject.pid) }}
|
||||
</div>
|
||||
</template>
|
||||
<span v-if="getComp(baselineProject.pid, monitorProject.pid)">
|
||||
<div>σ_i: {{ getComp(baselineProject.pid, monitorProject.pid).meta['σ'][0].toFixed(2) }} m</div>
|
||||
<div>σ_j: {{ getComp(baselineProject.pid, monitorProject.pid).meta['σ'][1].toFixed(2) }} m</div>
|
||||
<div>Anisotropy: {{ getComp(baselineProject.pid, monitorProject.pid).meta.anisotropy.toFixed(0) }}</div>
|
||||
<div>Ellipse Area: {{ getEllipseArea(baselineProject.pid, monitorProject.pid).toFixed(2) }} m²</div>
|
||||
<div>Primary Direction: {{ formatPrimaryDirection(getComp(baselineProject.pid, monitorProject.pid)) }}°</div>
|
||||
</span>
|
||||
</v-tooltip>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</v-simple-table>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: 'DougalGroupRepeatabilitySummary',
|
||||
|
||||
props: {
|
||||
comparisons: {
|
||||
type: Array,
|
||||
required: true
|
||||
},
|
||||
projects: {
|
||||
type: Array,
|
||||
required: true
|
||||
}
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
compMap () {
|
||||
return new Map(this.comparisons.map(c => [`${c.baseline_pid}-${c.monitor_pid}`, c]));
|
||||
},
|
||||
minEllipseArea () {
|
||||
if (!this.comparisons.length) return 0;
|
||||
return Math.min(...this.comparisons.map(c => {
|
||||
const a = Math.sqrt(c.meta.eigenvalues[0]);
|
||||
const b = Math.sqrt(c.meta.eigenvalues[1]);
|
||||
return Math.PI * a * b;
|
||||
}));
|
||||
},
|
||||
maxEllipseArea () {
|
||||
if (!this.comparisons.length) return 0;
|
||||
return Math.max(...this.comparisons.map(c => {
|
||||
const a = Math.sqrt(c.meta.eigenvalues[0]);
|
||||
const b = Math.sqrt(c.meta.eigenvalues[1]);
|
||||
return Math.PI * a * b;
|
||||
}));
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
getComp (basePid, monPid) {
|
||||
return this.compMap.get(`${basePid}-${monPid}`);
|
||||
},
|
||||
getEllipseArea (basePid, monPid) {
|
||||
const comp = this.getComp(basePid, monPid);
|
||||
if (!comp) return null;
|
||||
const a = Math.sqrt(comp.meta.eigenvalues[0]);
|
||||
const b = Math.sqrt(comp.meta.eigenvalues[1]);
|
||||
return Math.PI * a * b;
|
||||
},
|
||||
formatEllipseArea (basePid, monPid) {
|
||||
const val = this.getEllipseArea(basePid, monPid);
|
||||
return val !== null ? val.toFixed(1) : '';
|
||||
},
|
||||
getEllipseAreaColor (basePid, monPid) {
|
||||
const val = this.getEllipseArea(basePid, monPid);
|
||||
if (val === null) return '';
|
||||
const ratio = (val - this.minEllipseArea) / (this.maxEllipseArea - this.minEllipseArea);
|
||||
const hue = (1 - ratio) * 120;
|
||||
return `hsl(${hue}, 70%, 70%)`;
|
||||
},
|
||||
formatPrimaryDirection (comp) {
|
||||
if (!comp) return '';
|
||||
return (comp.meta.primaryDirection * 180 / Math.PI).toFixed(1);
|
||||
},
|
||||
emitInput (baselineProject, monitorProject) {
|
||||
if (this.getComp(baselineProject.pid, monitorProject.pid)) {
|
||||
this.$emit('input', baselineProject, monitorProject);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
@@ -2,6 +2,7 @@
|
||||
<v-dialog
|
||||
v-model="dialog"
|
||||
max-width="500"
|
||||
scrollable
|
||||
style="z-index:2020;"
|
||||
>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
@@ -58,6 +59,9 @@
|
||||
|
||||
|
||||
</v-window-item>
|
||||
<v-window-item value="serverinfo">
|
||||
<dougal-server-status :status="serverStatus"></dougal-server-status>
|
||||
</v-window-item>
|
||||
</v-window>
|
||||
|
||||
<v-divider></v-divider>
|
||||
@@ -69,8 +73,7 @@
|
||||
text
|
||||
:href="`mailto:${email}?Subject=Question`"
|
||||
>
|
||||
<v-icon class="d-lg-none">mdi-help-circle</v-icon>
|
||||
<span class="d-none d-lg-inline">Ask a question</span>
|
||||
<v-icon title="Ask a question">mdi-help-circle</v-icon>
|
||||
</v-btn>
|
||||
|
||||
<v-btn
|
||||
@@ -78,8 +81,7 @@
|
||||
text
|
||||
href="mailto:dougal-support@aaltronav.eu?Subject=Bug report"
|
||||
>
|
||||
<v-icon class="d-lg-none">mdi-bug</v-icon>
|
||||
<span class="d-none d-lg-inline">Report a bug</span>
|
||||
<v-icon title="Report a bug">mdi-bug</v-icon>
|
||||
</v-btn>
|
||||
|
||||
<!---
|
||||
@@ -93,16 +95,36 @@
|
||||
</v-btn>
|
||||
--->
|
||||
|
||||
<v-btn
|
||||
color="info"
|
||||
text
|
||||
title="View support info"
|
||||
:input-value="page == 'support'"
|
||||
@click="page = 'support'"
|
||||
>
|
||||
<v-icon>mdi-account-question</v-icon>
|
||||
</v-btn>
|
||||
|
||||
<v-btn v-if="versionHistory"
|
||||
color="info"
|
||||
text
|
||||
:title="page == 'support' ? 'View release notes' : 'View support info'"
|
||||
title="View release notes"
|
||||
:input-value="page == 'changelog'"
|
||||
@click="page = page == 'support' ? 'changelog' : 'support'"
|
||||
@click="page = 'changelog'"
|
||||
>
|
||||
<v-icon>mdi-history</v-icon>
|
||||
</v-btn>
|
||||
|
||||
<v-btn v-if="serverStatus"
|
||||
color="info"
|
||||
text
|
||||
title="View server status"
|
||||
:input-value="page == 'serverinfo'"
|
||||
@click="page = 'serverinfo'"
|
||||
>
|
||||
<v-icon>mdi-server-network</v-icon>
|
||||
</v-btn>
|
||||
|
||||
<v-spacer></v-spacer>
|
||||
|
||||
|
||||
@@ -124,46 +146,110 @@
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import DougalServerStatus from './server-status';
|
||||
|
||||
export default {
|
||||
name: 'DougalHelpDialog',
|
||||
|
||||
components: {
|
||||
DougalServerStatus
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
dialog: false,
|
||||
email: "dougal-support@aaltronav.eu",
|
||||
feed: btoa(encodeURIComponent("https://gitlab.com/wgp/dougal/software.atom?feed_token=XSPpvsYEny8YmH75Nz5W")),
|
||||
serverStatus: null,
|
||||
clientVersion: process.env.DOUGAL_FRONTEND_VERSION ?? "(unknown)",
|
||||
serverVersion: null,
|
||||
versionHistory: null,
|
||||
releaseHistory: [],
|
||||
releaseShown: null,
|
||||
page: "support"
|
||||
page: "support",
|
||||
|
||||
lastUpdate: 0,
|
||||
updateInterval: 12000,
|
||||
refreshTimer: null
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
sinceUpdate () {
|
||||
return this.lastUpdate
|
||||
? (Date.now() - this.lastUpdate)
|
||||
: +Infinity;
|
||||
}
|
||||
},
|
||||
|
||||
watch: {
|
||||
dialog(newVal) {
|
||||
if (newVal) {
|
||||
this.startAutoRefresh();
|
||||
} else {
|
||||
this.stopAutoRefresh();
|
||||
}
|
||||
},
|
||||
page(newVal) {
|
||||
if (newVal === 'serverinfo' && this.dialog) {
|
||||
this.getServerStatus(); // Immediate update when switching to serverinfo
|
||||
this.startAutoRefresh();
|
||||
} else {
|
||||
this.stopAutoRefresh();
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
async getServerVersion () {
|
||||
if (!this.serverVersion) {
|
||||
const version = await this.api(['/version', {}, null, {silent:true}]);
|
||||
this.serverVersion = version?.tag ?? "(unknown)";
|
||||
if (version) this.lastUpdate = Date.now();
|
||||
}
|
||||
if (!this.versionHistory) {
|
||||
const history = await this.api(['/version/history?count=3', {}, null, {silent:true}]);
|
||||
const history = await this.api(['/version/history?count=6', {}, null, {silent:true}]);
|
||||
this.releaseHistory = history;
|
||||
this.versionHistory = history?.[this.serverVersion.replace(/-.*$/, "")] ?? null;
|
||||
}
|
||||
},
|
||||
|
||||
async getServerStatus () {
|
||||
const status = await this.api(['/diagnostics', {}, null, {silent: true}]);
|
||||
if (status) {
|
||||
this.serverStatus = status;
|
||||
this.lastUpdate = Date.now();
|
||||
}
|
||||
},
|
||||
|
||||
startAutoRefresh() {
|
||||
if (this.refreshTimer) return; // Prevent multiple timers
|
||||
this.refreshTimer = setInterval(() => {
|
||||
if (this.dialog && this.page === 'serverinfo') {
|
||||
this.getServerStatus();
|
||||
// Optionally refresh server version if needed
|
||||
// this.getServerVersion();
|
||||
}
|
||||
}, this.updateInterval);
|
||||
},
|
||||
|
||||
stopAutoRefresh() {
|
||||
if (this.refreshTimer) {
|
||||
clearInterval(this.refreshTimer);
|
||||
this.refreshTimer = null;
|
||||
}
|
||||
},
|
||||
|
||||
...mapActions(["api"])
|
||||
},
|
||||
|
||||
async mounted () {
|
||||
this.getServerVersion();
|
||||
this.getServerStatus();
|
||||
},
|
||||
|
||||
async beforeUpdate () {
|
||||
this.getServerVersion();
|
||||
beforeDestroy() {
|
||||
this.stopAutoRefresh(); // Clean up timer on component destruction
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
213
lib/www/client/source/src/components/server-status.vue
Normal file
213
lib/www/client/source/src/components/server-status.vue
Normal file
@@ -0,0 +1,213 @@
|
||||
<template>
|
||||
<v-card max-width="800" max-height="600" class="mx-auto" style="overflow-y: auto;">
|
||||
<v-card-title class="headline">
|
||||
Server status – {{ status.hostname }}
|
||||
</v-card-title>
|
||||
<v-card-text>
|
||||
<v-expansion-panels accordion>
|
||||
<!-- System Info -->
|
||||
<v-expansion-panel>
|
||||
<v-expansion-panel-header>System Info</v-expansion-panel-header>
|
||||
<v-expansion-panel-content>
|
||||
<v-row>
|
||||
<v-col cols="6">
|
||||
<strong>Uptime:</strong> {{ formatUptime(status.uptime) }}
|
||||
</v-col>
|
||||
<v-col cols="6">
|
||||
<strong>Load:</strong> {{ status.loadavg[0].toFixed(2) }} / {{ status.loadavg[1].toFixed(2) }} / {{ status.loadavg[2].toFixed(2) }}
|
||||
<v-progress-linear
|
||||
:value="loadAvgPercent"
|
||||
:color="getLoadAvgColor(status.loadavg[0])"
|
||||
height="6"
|
||||
rounded
|
||||
></v-progress-linear>
|
||||
<div class="text-caption">
|
||||
1-min Load: {{ status.loadavg[0].toFixed(2) }} ({{ loadAvgPercent.toFixed(1) }}% of max)
|
||||
</div>
|
||||
</v-col>
|
||||
</v-row>
|
||||
</v-expansion-panel-content>
|
||||
</v-expansion-panel>
|
||||
|
||||
<!-- Memory -->
|
||||
<v-expansion-panel>
|
||||
<v-expansion-panel-header>Memory</v-expansion-panel-header>
|
||||
<v-expansion-panel-content>
|
||||
<v-progress-linear
|
||||
:value="memoryUsedPercent"
|
||||
:color="getProgressColor(memoryUsedPercent)"
|
||||
height="10"
|
||||
rounded
|
||||
></v-progress-linear>
|
||||
<div class="text-caption mt-2">
|
||||
Used: {{ formatBytes(status.memory.total - status.memory.free) }} / Total: {{ formatBytes(status.memory.total) }} ({{ memoryUsedPercent.toFixed(1) }}%)
|
||||
</div>
|
||||
</v-expansion-panel-content>
|
||||
</v-expansion-panel>
|
||||
|
||||
<!-- CPUs -->
|
||||
<v-expansion-panel>
|
||||
<v-expansion-panel-header>CPUs ({{ status.cpus.length }} cores)</v-expansion-panel-header>
|
||||
<v-expansion-panel-content>
|
||||
<v-row dense>
|
||||
<v-col v-for="(cpu, index) in status.cpus" :key="index" cols="12" sm="6">
|
||||
<v-card outlined class="pa-2">
|
||||
<div class="text-caption">Core {{ index + 1 }}: {{ cpu.model }} @ {{ cpu.speed }} MHz</div>
|
||||
<v-progress-linear
|
||||
:value="cpuUsagePercent(cpu)"
|
||||
:color="getProgressColor(cpuUsagePercent(cpu))"
|
||||
height="8"
|
||||
rounded
|
||||
></v-progress-linear>
|
||||
<div class="text-caption">
|
||||
Usage: {{ cpuUsagePercent(cpu).toFixed(1) }}% (Idle: {{ cpuIdlePercent(cpu).toFixed(1) }}%)
|
||||
</div>
|
||||
</v-card>
|
||||
</v-col>
|
||||
</v-row>
|
||||
</v-expansion-panel-content>
|
||||
</v-expansion-panel>
|
||||
|
||||
<!-- Network Interfaces -->
|
||||
<v-expansion-panel>
|
||||
<v-expansion-panel-header>Network Interfaces</v-expansion-panel-header>
|
||||
<v-expansion-panel-content>
|
||||
<v-list dense>
|
||||
<v-list-item v-for="(iface, name) in status.networkInterfaces" :key="name">
|
||||
<v-list-item-content>
|
||||
<v-list-item-title>{{ name }}</v-list-item-title>
|
||||
<v-list-item-subtitle v-for="(addr, idx) in iface" :key="idx">
|
||||
{{ addr.family }}: {{ addr.address }} (Netmask: {{ addr.netmask }})
|
||||
</v-list-item-subtitle>
|
||||
</v-list-item-content>
|
||||
</v-list-item>
|
||||
</v-list>
|
||||
</v-expansion-panel-content>
|
||||
</v-expansion-panel>
|
||||
|
||||
<!-- Storage -->
|
||||
<v-expansion-panel>
|
||||
<v-expansion-panel-header>Storage</v-expansion-panel-header>
|
||||
<v-expansion-panel-content>
|
||||
<!-- Root -->
|
||||
<div class="mb-4">
|
||||
<strong>Root (/):</strong>
|
||||
<v-progress-linear
|
||||
:value="status.storage.root.usedPercent"
|
||||
:color="getProgressColor(status.storage.root.usedPercent)"
|
||||
height="10"
|
||||
rounded
|
||||
></v-progress-linear>
|
||||
<div class="text-caption">
|
||||
Used: {{ formatBytes(status.storage.root.used) }} / Total: {{ formatBytes(status.storage.root.total) }} ({{ status.storage.root.usedPercent.toFixed(1) }}%)
|
||||
</div>
|
||||
</div>
|
||||
<!-- Data subfolders -->
|
||||
<div>
|
||||
<strong>Data:</strong>
|
||||
<v-expansion-panels flat>
|
||||
<v-expansion-panel v-for="(folder, name) in status.storage.data" :key="name">
|
||||
<v-expansion-panel-header disable-icon-rotate>{{ name }}</v-expansion-panel-header>
|
||||
<v-expansion-panel-content>
|
||||
<v-progress-linear
|
||||
:value="folder.usedPercent"
|
||||
:color="getProgressColor(folder.usedPercent)"
|
||||
height="10"
|
||||
rounded
|
||||
></v-progress-linear>
|
||||
<div class="text-caption">
|
||||
Used: {{ formatBytes(folder.used) }} / Total: {{ formatBytes(folder.total) }} ({{ folder.usedPercent.toFixed(1) }}%)
|
||||
</div>
|
||||
</v-expansion-panel-content>
|
||||
</v-expansion-panel>
|
||||
</v-expansion-panels>
|
||||
</div>
|
||||
</v-expansion-panel-content>
|
||||
</v-expansion-panel>
|
||||
|
||||
<!-- Database -->
|
||||
<v-expansion-panel>
|
||||
<v-expansion-panel-header>Database</v-expansion-panel-header>
|
||||
<v-expansion-panel-content>
|
||||
<div class="mb-2">
|
||||
<strong>Total Size:</strong> {{ formatBytes(status.database.size) }}
|
||||
</div>
|
||||
<v-list dense>
|
||||
<v-list-item v-for="(project, name) in status.database.projects" :key="name">
|
||||
<v-list-item-content>
|
||||
<v-list-item-title>{{ name }}</v-list-item-title>
|
||||
<v-progress-linear
|
||||
:value="project.percent"
|
||||
:color="getProgressColor(project.percent)"
|
||||
height="8"
|
||||
rounded
|
||||
></v-progress-linear>
|
||||
<v-list-item-subtitle>
|
||||
Size: {{ formatBytes(project.size) }} ({{ project.percent.toFixed(2) }}%)
|
||||
</v-list-item-subtitle>
|
||||
</v-list-item-content>
|
||||
</v-list-item>
|
||||
</v-list>
|
||||
</v-expansion-panel-content>
|
||||
</v-expansion-panel>
|
||||
</v-expansion-panels>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: "DougalServerStatus",
|
||||
props: {
|
||||
status: {
|
||||
type: Object,
|
||||
required: true
|
||||
}
|
||||
},
|
||||
computed: {
|
||||
memoryUsedPercent() {
|
||||
return ((this.status.memory.total - this.status.memory.free) / this.status.memory.total) * 100;
|
||||
},
|
||||
loadAvgPercent() {
|
||||
const maxLoad = this.status.cpus.length * 2; // Assume 4x cores as max for scaling
|
||||
return Math.min((this.status.loadavg[0] / maxLoad) * 100, 100); // Cap at 100%
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
getProgressColor(value) {
|
||||
if (value >= 80) return 'error'; // Red for 80–100%
|
||||
if (value >= 60) return 'warning'; // Yellow for 60–80%
|
||||
return 'success'; // Green for 0–60%
|
||||
},
|
||||
getLoadAvgColor(load) {
|
||||
const coreCount = this.status.cpus.length;
|
||||
if (load >= coreCount * 2) return 'error'; // Red for load ≥ 2x cores
|
||||
if (load >= coreCount) return 'warning'; // Yellow for load ≥ 1x cores but < 2x
|
||||
return 'success'; // Green for load < 1x cores
|
||||
},
|
||||
formatBytes(bytes) {
|
||||
if (bytes === 0) return '0 Bytes';
|
||||
const k = 1024;
|
||||
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
},
|
||||
formatUptime(seconds) {
|
||||
const days = Math.floor(seconds / 86400);
|
||||
seconds %= 86400;
|
||||
const hours = Math.floor(seconds / 3600);
|
||||
seconds %= 3600;
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
return `${days}d ${hours}h ${minutes}m`;
|
||||
},
|
||||
cpuUsagePercent(cpu) {
|
||||
const total = cpu.times.user + cpu.times.nice + cpu.times.sys + cpu.times.idle + cpu.times.irq;
|
||||
return ((total - cpu.times.idle) / total) * 100;
|
||||
},
|
||||
cpuIdlePercent(cpu) {
|
||||
const total = cpu.times.user + cpu.times.nice + cpu.times.sys + cpu.times.idle + cpu.times.irq;
|
||||
return (cpu.times.idle / total) * 100;
|
||||
}
|
||||
}
|
||||
};
|
||||
</script>
|
||||
@@ -84,8 +84,12 @@ const DougalBinaryLoader = {
|
||||
for (let k = 0; k < values.length; k++) {
|
||||
values[k] = new (k === 0 ? Uint16Array : k === 1 ? Uint32Array : k === 2 ? BigUint64Array : Float32Array)(totalCount);
|
||||
}
|
||||
} else if (udv == 4) {
|
||||
for (let k = 0; k < values.length; k++) {
|
||||
values[k] = new (k === 0 ? Uint16Array : k === 1 ? Uint32Array : k === 2 ? Uint16Array : Float32Array)(totalCount);
|
||||
}
|
||||
} else {
|
||||
throw new Error(`Invalid udv: Expected 0, 1, or 2; found ${udv}`);
|
||||
throw new Error(`Invalid udv: Expected 0, 1, 2, or 4; found ${udv}`);
|
||||
}
|
||||
|
||||
let offset = 0;
|
||||
@@ -110,7 +114,7 @@ const DougalBinaryLoader = {
|
||||
offset += chunk.jCount;
|
||||
}
|
||||
|
||||
console.log(`Parsed ${totalCount} points, ${values.length} value arrays`);
|
||||
console.log(`Parsed ${totalCount} points, ${values.length} value arrays, udv = ${udv}`);
|
||||
|
||||
const attributes = {
|
||||
getPosition: {
|
||||
|
||||
47
lib/www/client/source/src/lib/durations.js
Normal file
47
lib/www/client/source/src/lib/durations.js
Normal file
@@ -0,0 +1,47 @@
|
||||
|
||||
function duration_to_ms(v) {
|
||||
if (v instanceof Object) {
|
||||
return (
|
||||
(v.days || 0) * 86400000 +
|
||||
(v.hours || 0) * 3600000 +
|
||||
(v.minutes || 0) * 60000 +
|
||||
(v.seconds || 0) * 1000 +
|
||||
(v.milliseconds || 0)
|
||||
);
|
||||
} else {
|
||||
return {
|
||||
days: 0,
|
||||
hours: 0,
|
||||
minutes: 0,
|
||||
seconds: 0,
|
||||
milliseconds: 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function ms_to_duration(v) {
|
||||
const days = Math.floor(v / 86400000);
|
||||
v %= 86400000;
|
||||
const hours = Math.floor(v / 3600000);
|
||||
v %= 3600000;
|
||||
const minutes = Math.floor(v / 60000);
|
||||
v %= 60000;
|
||||
const seconds = Math.floor(v / 1000);
|
||||
const milliseconds = v % 1000;
|
||||
return { days, hours, minutes, seconds, milliseconds };
|
||||
}
|
||||
|
||||
function normalise_duration (v) {
|
||||
return ms_to_duration(duration_to_ms(v));
|
||||
}
|
||||
|
||||
function add_durations(a, b) {
|
||||
return ms_to_duration(duration_to_ms(a) + duration_to_ms(b));
|
||||
}
|
||||
|
||||
export {
|
||||
duration_to_ms,
|
||||
ms_to_duration,
|
||||
normalise_duration,
|
||||
add_durations
|
||||
}
|
||||
@@ -20,6 +20,9 @@ import ProjectSettings from '../views/ProjectSettings.vue'
|
||||
import Users from '../views/Users.vue'
|
||||
import DougalAppBarExtensionProject from '../components/app-bar-extension-project'
|
||||
import DougalAppBarExtensionProjectList from '../components/app-bar-extension-project-list'
|
||||
import GroupList from '../views/GroupList.vue'
|
||||
import Group from '../views/Group.vue'
|
||||
|
||||
|
||||
Vue.use(VueRouter)
|
||||
|
||||
@@ -155,6 +158,7 @@ Vue.use(VueRouter)
|
||||
component: SequenceList
|
||||
},
|
||||
{
|
||||
name: "shotlog",
|
||||
path: "sequences/:sequence",
|
||||
component: SequenceSummary
|
||||
},
|
||||
@@ -196,7 +200,57 @@ Vue.use(VueRouter)
|
||||
component: ProjectSettings
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
pathToRegexpOptions: { strict: true },
|
||||
path: "/groups",
|
||||
redirect: "/groups/"
|
||||
},
|
||||
{
|
||||
pathToRegexpOptions: { strict: true },
|
||||
path: "/groups/",
|
||||
component: GroupList,
|
||||
meta: {
|
||||
breadcrumbs: [
|
||||
{ text: "Comparisons", href: "/groups", disabled: true }
|
||||
],
|
||||
appBarExtension: {
|
||||
// component: DougalAppBarExtensionProjectList
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
pathToRegexpOptions: { strict: true },
|
||||
path: "/groups/:group",
|
||||
redirect: "/groups/:group/"
|
||||
},
|
||||
{
|
||||
pathToRegexpOptions: { strict: true },
|
||||
path: "/groups/:group/",
|
||||
name: "Group",
|
||||
component: Group,
|
||||
meta: {
|
||||
breadcrumbs: [
|
||||
{ text: "Comparisons", href: "/groups" },
|
||||
{ text: (ctx) => ctx.$route.params.group }
|
||||
/*
|
||||
{
|
||||
text: (ctx) => ctx.$store.state.project.projectName || "…",
|
||||
href: (ctx) => `/projects/${ctx.$store.state.project.projectId || ctx.$route.params.project || ""}/`,
|
||||
title: (ctx) => Object.entries(ctx.$store.getters.projectConfiguration?.organisations ?? {}).map( ([org, ops]) => `* ${org}: ${Object.entries(ops).filter( ([k, v]) => v ).map( ([k, v]) => k ).join(", ")}`).join("\n"),
|
||||
organisations: (ctx) => ctx.$store.getters.projectConfiguration?.organisations ?? {}
|
||||
}
|
||||
*/
|
||||
],
|
||||
/*
|
||||
appBarExtension: {
|
||||
component: DougalAppBarExtensionGroup
|
||||
}
|
||||
*/
|
||||
},
|
||||
children: [
|
||||
]
|
||||
},
|
||||
]
|
||||
|
||||
const router = new VueRouter({
|
||||
|
||||
@@ -36,7 +36,7 @@ async function refreshEvents ({commit, dispatch, state, rootState}, [modifiedAft
|
||||
|
||||
/** Return a subset of events from state.events
|
||||
*/
|
||||
async function getEvents ({commit, dispatch, state}, [projectId, {sequence, date0, date1, sortBy, sortDesc, itemsPerPage, page, text, label}]) {
|
||||
async function getEvents ({commit, dispatch, state}, [projectId, {sequence, date0, date1, sortBy, sortDesc, itemsPerPage, page, text, label, excludeLabels}]) {
|
||||
let filteredEvents = [...state.events];
|
||||
|
||||
if (sortBy) {
|
||||
@@ -114,6 +114,10 @@ async function getEvents ({commit, dispatch, state}, [projectId, {sequence, date
|
||||
filteredEvents = filteredEvents.filter( event => event.labels?.includes(label) );
|
||||
}
|
||||
|
||||
if (excludeLabels) {
|
||||
filteredEvents = filteredEvents.filter( event => !excludeLabels?.some( label => event.labels?.includes(label) ) );
|
||||
}
|
||||
|
||||
const count = filteredEvents.length;
|
||||
|
||||
if (itemsPerPage && itemsPerPage > 0) {
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
|
||||
async function getProject ({commit, dispatch}, projectId) {
|
||||
if (projectId == null) {
|
||||
console.log(`Skipping call to getProject${projectId})`);
|
||||
return;
|
||||
}
|
||||
|
||||
const init = {
|
||||
headers: {
|
||||
cache: "reload",
|
||||
|
||||
@@ -1,13 +1,27 @@
|
||||
import * as d3a from 'd3-array';
|
||||
import { duration_to_ms, ms_to_duration, normalise_duration, add_durations } from '@/lib/durations';
|
||||
|
||||
/** Fetch projects from server
|
||||
*/
|
||||
async function refreshProjects ({commit, dispatch, state, rootState}) {
|
||||
|
||||
async function getSummary (project) {
|
||||
const url = `/project/${project.pid}/summary`;
|
||||
const init = {};
|
||||
const summary = await dispatch('api', [url, init, null, {silent:true}]);
|
||||
if (summary) {
|
||||
return {...project, ...summary};
|
||||
} else {
|
||||
return project;
|
||||
}
|
||||
}
|
||||
|
||||
if (state.loading) {
|
||||
commit('abortProjectsLoading');
|
||||
}
|
||||
|
||||
commit('setProjectsLoading');
|
||||
const tstamp = new Date();
|
||||
const pid = rootState.project.projectId;
|
||||
const url = `/project`;
|
||||
const init = {
|
||||
@@ -17,10 +31,25 @@ async function refreshProjects ({commit, dispatch, state, rootState}) {
|
||||
const res = await dispatch('api', [url, init, null, {silent:true}]);
|
||||
|
||||
if (res) {
|
||||
commit('setProjects', res);
|
||||
commit('setProjectsTimestamp');
|
||||
|
||||
let projects;
|
||||
|
||||
if (res.some( project => project.pid == null )) {
|
||||
console.warn("At least one project found with no PID!");
|
||||
projects = res.filter( project => project.pid != null );
|
||||
} else {
|
||||
projects = res;
|
||||
}
|
||||
|
||||
commit('setProjects', projects); // First without summaries
|
||||
commit('setProjectsTimestamp', tstamp);
|
||||
|
||||
projects = await Promise.all(projects.map( getSummary ));
|
||||
|
||||
commit('setProjects', projects); // Then with summaries
|
||||
}
|
||||
commit('clearProjectsLoading');
|
||||
dispatch('prepareGroups');
|
||||
}
|
||||
|
||||
/** Return a subset of projects from state.projects
|
||||
@@ -118,4 +147,83 @@ async function getProjects ({commit, dispatch, state}, [{pid, name, schema, grou
|
||||
return {projects: filteredProjects, count};
|
||||
}
|
||||
|
||||
export default { refreshProjects, getProjects };
|
||||
|
||||
|
||||
async function prepareGroups ({commit, dispatch, state, rootState}) {
|
||||
const groups = {};
|
||||
|
||||
for (const project of state.projects) {
|
||||
|
||||
if (!project.prod_distance) {
|
||||
// This project has no production data (either not started yet
|
||||
// or production data has not been imported) so we skip it.
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!project.prod_duration.days) {
|
||||
project.prod_duration = normalise_duration(project.prod_duration);
|
||||
}
|
||||
|
||||
for (const name of project.groups) {
|
||||
if (!(name in groups)) {
|
||||
groups[name] = {
|
||||
group: name,
|
||||
num_projects: 0,
|
||||
lines: 0,
|
||||
points: 0,
|
||||
sequences: 0,
|
||||
// Shots:
|
||||
prime: 0,
|
||||
other: 0,
|
||||
ntba: 0,
|
||||
prod_duration: {
|
||||
days: 0,
|
||||
hours: 0,
|
||||
minutes: 0,
|
||||
seconds: 0,
|
||||
milliseconds: 0
|
||||
},
|
||||
prod_distance: 0,
|
||||
shooting_rate: [],
|
||||
projects: []
|
||||
};
|
||||
}
|
||||
const group = groups[name];
|
||||
|
||||
group.num_projects++;
|
||||
group.lines = Math.max(group.lines, project.lines); // In case preplots changed
|
||||
group.points = Math.max(group.points, project.total); // Idem
|
||||
group.sequences += project.seq_final;
|
||||
group.prime += project.prime;
|
||||
group.other += project.other;
|
||||
//group.ntba += project.ntba;
|
||||
group.prod_duration = add_durations(group.prod_duration, project.prod_duration);
|
||||
group.prod_distance += project.prod_distance;
|
||||
group.shooting_rate.push(project.shooting_rate);
|
||||
group.projects.push(project);
|
||||
}
|
||||
}
|
||||
|
||||
const grouplist = [];
|
||||
for (const group of Object.values(groups)) {
|
||||
group.shooting_rate_mean = d3a.mean(group.shooting_rate);
|
||||
group.shooting_rate_sd = d3a.deviation(group.shooting_rate);
|
||||
delete group.shooting_rate;
|
||||
|
||||
grouplist.push(group);
|
||||
}
|
||||
|
||||
commit('setGroups', grouplist);
|
||||
|
||||
}
|
||||
|
||||
async function getGroups({commit, dispatch, state, rootState}) {
|
||||
if (!state.groups.length) {
|
||||
await dispatch('refreshProjects');
|
||||
}
|
||||
|
||||
return state.groups;
|
||||
}
|
||||
|
||||
|
||||
export default { refreshProjects, getProjects, prepareGroups, getGroups };
|
||||
|
||||
@@ -3,7 +3,7 @@ function projects (state) {
|
||||
return state.projects;
|
||||
}
|
||||
|
||||
function projectGroups (state) {
|
||||
function projectGroupNames (state) {
|
||||
return [...new Set(state.projects.map(i => i.groups).flat())].sort();
|
||||
}
|
||||
|
||||
@@ -15,4 +15,8 @@ function projectsLoading (state) {
|
||||
return !!state.loading;
|
||||
}
|
||||
|
||||
export default { projects, projectGroups, projectCount, projectsLoading };
|
||||
function groups (state) {
|
||||
return state.groups;
|
||||
}
|
||||
|
||||
export default { projects, projectGroupNames, projectCount, projectsLoading, groups };
|
||||
|
||||
@@ -39,10 +39,15 @@ function abortProjectsLoading (state) {
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
function setGroups (state, groups) {
|
||||
state.groups = Object.freeze(groups);
|
||||
}
|
||||
|
||||
export default {
|
||||
setProjects,
|
||||
setProjectsLoading,
|
||||
clearProjectsLoading,
|
||||
setProjectsTimestamp,
|
||||
setProjectsETag
|
||||
setProjectsETag,
|
||||
setGroups
|
||||
};
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const state = () => ({
|
||||
projects: Object.freeze([]),
|
||||
groups: Object.freeze([]),
|
||||
loading: null,
|
||||
timestamp: null,
|
||||
etag: null,
|
||||
|
||||
339
lib/www/client/source/src/views/Group.vue
Normal file
339
lib/www/client/source/src/views/Group.vue
Normal file
@@ -0,0 +1,339 @@
|
||||
<template>
|
||||
<dougal-group-map v-if="mapView"
|
||||
:baseline="baseline"
|
||||
:monitor="monitor"
|
||||
:monitors="monitors"
|
||||
@input="mapView=$event"
|
||||
></dougal-group-map>
|
||||
<v-container fluid fill-height class="ma-0 pa-0" v-else>
|
||||
|
||||
<v-overlay :value="loading && !comparisons.length" absolute>
|
||||
<v-progress-circular
|
||||
indeterminate
|
||||
size="64"
|
||||
></v-progress-circular>
|
||||
</v-overlay>
|
||||
|
||||
<v-overlay :value="!loading && !groupFound" absolute opacity="0.8">
|
||||
<v-row justify="center">
|
||||
<v-alert
|
||||
type="error"
|
||||
>
|
||||
Group not found
|
||||
</v-alert>
|
||||
</v-row>
|
||||
<v-row justify="center">
|
||||
<v-btn color="primary" @click="refreshProjects">Retry</v-btn>
|
||||
</v-row>
|
||||
</v-overlay>
|
||||
|
||||
<v-row no-gutters align="stretch" class="fill-height">
|
||||
<v-col cols="12" v-if="groupFound">
|
||||
|
||||
<v-data-table class="ma-1"
|
||||
:headers="projectHeaders"
|
||||
:items="projects"
|
||||
dense
|
||||
>
|
||||
|
||||
<template v-slot:item.baseline="{item, value, index}">
|
||||
<v-simple-checkbox v-if="index+1 < projects.length"
|
||||
color="primary"
|
||||
:value="baseline === item"
|
||||
@input="setBaseline(item)"
|
||||
></v-simple-checkbox>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.monitor="{item, value, index}">
|
||||
<v-simple-checkbox v-if="index > 0 && !(index <= baselineIndex)"
|
||||
color="primary"
|
||||
:value="monitor === item"
|
||||
@input="setMonitor(item)"
|
||||
></v-simple-checkbox>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.pid="{item, value}">
|
||||
<v-chip
|
||||
label
|
||||
small
|
||||
outlined
|
||||
:href="`/projects/${item.pid}`"
|
||||
:color="!item.archived ? 'primary' : ''"
|
||||
>{{ value }}</v-chip>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.fsp="{item, value}">
|
||||
<span title="First production shot">{{value.tstamp.substr(0, 10)}}</span>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.lsp="{item, value}">
|
||||
<span title="Last production shot">{{value.tstamp.substr(0, 10)}}</span>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.prod_duration="{item, value}">
|
||||
<span v-if="value.days > 2" :title="`${value.days} d ${value.hours} h ${value.minutes} m ${(value.seconds + value.milliseconds/1000).toFixed(3)} s`">
|
||||
{{ value.days }} d
|
||||
</span>
|
||||
<span v-else>
|
||||
{{ value.days }} d {{ value.hours }} h {{ value.minutes }} m {{ (value.seconds + value.milliseconds/1000).toFixed(1) }} s
|
||||
</span>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.prod_distance="{item, value}">
|
||||
{{ (value/1000).toFixed(1) }} km
|
||||
</template>
|
||||
|
||||
<template v-slot:footer.prepend>
|
||||
|
||||
<v-btn v-if="baseline && !mapView"
|
||||
text
|
||||
color="primary"
|
||||
title="Switch to map view"
|
||||
@click="mapView = true"
|
||||
>View map</v-btn>
|
||||
|
||||
<v-btn v-if="comparison"
|
||||
text
|
||||
color="primary"
|
||||
title="Back to summary"
|
||||
@click="clearComparison"
|
||||
>Back</v-btn>
|
||||
</template>
|
||||
|
||||
</v-data-table>
|
||||
|
||||
<!-- BEGIN TEST -->
|
||||
|
||||
<dougal-group-comparison-summary v-if="comparison"
|
||||
:baseline="baseline"
|
||||
:monitor="monitor"
|
||||
:comparison="comparison"
|
||||
></dougal-group-comparison-summary>
|
||||
|
||||
<dougal-group-repeatability-summary v-else-if="comparisons.length"
|
||||
:comparisons="comparisons"
|
||||
:projects="projects"
|
||||
@input="setComparison"
|
||||
></dougal-group-repeatability-summary>
|
||||
|
||||
<!-- END TEST -->
|
||||
|
||||
|
||||
</v-col>
|
||||
<v-col cols="12" v-else>
|
||||
<v-card>
|
||||
<v-card-text>
|
||||
Group does not exist.
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</v-col>
|
||||
</v-row>
|
||||
</v-container>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex'
|
||||
import AccessMixin from '@/mixins/access';
|
||||
import DougalGroupRepeatabilitySummary from '@/components/groups/group-repeatability-summary.vue';
|
||||
import DougalGroupComparisonSummary from '@/components/groups/group-comparison-summary';
|
||||
import DougalGroupMap from '@/components/groups/group-map';
|
||||
|
||||
export default {
|
||||
name: 'Group',
|
||||
|
||||
mixins: [
|
||||
AccessMixin
|
||||
],
|
||||
|
||||
components: {
|
||||
DougalGroupRepeatabilitySummary,
|
||||
DougalGroupComparisonSummary,
|
||||
DougalGroupMap
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
projectHeaders: [
|
||||
{
|
||||
value: "baseline",
|
||||
text: "Baseline"
|
||||
},
|
||||
{
|
||||
value: "monitor",
|
||||
text: "Monitor"
|
||||
},
|
||||
{
|
||||
value: "pid",
|
||||
text: "ID"
|
||||
},
|
||||
{
|
||||
value: "name",
|
||||
text: "Name"
|
||||
},
|
||||
{
|
||||
value: "fsp",
|
||||
text: "Start"
|
||||
},
|
||||
{
|
||||
value: "lsp",
|
||||
text: "Finish"
|
||||
},
|
||||
{
|
||||
value: "lines",
|
||||
text: "Preplot lines"
|
||||
},
|
||||
{
|
||||
value: "seq_final",
|
||||
text: "Num. of sequences"
|
||||
},
|
||||
{
|
||||
value: "prod_duration",
|
||||
text: "Duration"
|
||||
},
|
||||
{
|
||||
value: "prod_distance",
|
||||
text: "Distance"
|
||||
},
|
||||
],
|
||||
|
||||
mapView: false,
|
||||
|
||||
baseline: null,
|
||||
monitor: null,
|
||||
comparisons: []
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
groupName () {
|
||||
return this.$route.params.group;
|
||||
},
|
||||
|
||||
group () {
|
||||
return this.groups.find( i => i.group === this.groupName );
|
||||
},
|
||||
|
||||
groupFound () {
|
||||
return !!(this.loading || this.group);
|
||||
},
|
||||
|
||||
projects () {
|
||||
return this.group?.projects.toSorted((a, b) => a.pid.localeCompare(b.pid));
|
||||
},
|
||||
|
||||
baselineIndex () {
|
||||
return this.projects.indexOf(this.baseline);
|
||||
},
|
||||
|
||||
monitors () {
|
||||
if (this.baseline && this.comparisons) {
|
||||
return this.comparisons
|
||||
.filter( i => i.baseline_pid == this.baseline.pid )
|
||||
.map( i => this.projects.find( p => p.pid == i.monitor_pid ));
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
comparison () {
|
||||
return this.comparisons.find( row =>
|
||||
row.baseline_pid == this.baseline?.pid && row.monitor_pid == this.monitor?.pid
|
||||
)?.meta;
|
||||
},
|
||||
|
||||
...mapGetters(["loading", "groups"])
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
setBaseline (project) {
|
||||
if (project === this.baseline) {
|
||||
this.baseline = null;
|
||||
} else {
|
||||
this.baseline = project;
|
||||
if (this.monitor) {
|
||||
if (this.projects.indexOf(this.monitor) <= this.projects.indexOf(this.baseline)) {
|
||||
this.monitor = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
setMonitor (project) {
|
||||
if (project === this.monitor) {
|
||||
this.monitor = null;
|
||||
} else {
|
||||
this.monitor = project;
|
||||
}
|
||||
},
|
||||
|
||||
clearComparison () {
|
||||
this.baseline = null;
|
||||
this.monitor = null;
|
||||
},
|
||||
|
||||
setComparison (baseline, monitor) {
|
||||
this.clearComparison();
|
||||
this.setBaseline(baseline);
|
||||
this.setMonitor(monitor);
|
||||
},
|
||||
|
||||
async getComparisons () {
|
||||
const url = `/comparison/group/${this.$route.params.group}`;
|
||||
this.comparisons = await this.api([url]);
|
||||
},
|
||||
|
||||
// TODO Should this go in a Vuex action rather?
|
||||
async refreshComparisons () {
|
||||
await this.getGroups();
|
||||
if (this.groupFound) {
|
||||
await this.getComparisons();
|
||||
}
|
||||
},
|
||||
|
||||
/*
|
||||
async getComparison () {
|
||||
if (this.baseline && this.monitor) {
|
||||
const url = `/comparison/group/${this.$route.params.group}/baseline/${this.baseline.pid}/monitor/${this.monitor.pid}`;
|
||||
const comparison = await this.api([url]);
|
||||
if (comparison) {
|
||||
this.comparison = comparison;
|
||||
}
|
||||
}
|
||||
},
|
||||
*/
|
||||
|
||||
handleComparisons (context, {payload}) {
|
||||
this.refreshComparisons();
|
||||
},
|
||||
|
||||
registerNotificationHandlers (action = "registerHandler") {
|
||||
|
||||
this.$store.dispatch(action, {
|
||||
table: 'comparisons',
|
||||
handler: this.handleComparisons
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
unregisterNotificationHandlers () {
|
||||
return this.registerNotificationHandlers("unregisterHandler");
|
||||
},
|
||||
|
||||
|
||||
...mapActions(["api", "getGroups", "refreshProjects"])
|
||||
},
|
||||
|
||||
async mounted () {
|
||||
this.registerNotificationHandlers();
|
||||
this.refreshComparisons()
|
||||
},
|
||||
|
||||
beforeDestroy () {
|
||||
this.unregisterNotificationHandlers();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
</script>
|
||||
396
lib/www/client/source/src/views/GroupList.vue
Normal file
396
lib/www/client/source/src/views/GroupList.vue
Normal file
@@ -0,0 +1,396 @@
|
||||
<template>
|
||||
<v-container fluid>
|
||||
|
||||
<v-data-table
|
||||
:headers="headers"
|
||||
:items="displayItems"
|
||||
item-key="group"
|
||||
:options.sync="options"
|
||||
:expanded.sync="expanded"
|
||||
show-expand
|
||||
:loading="loading"
|
||||
>
|
||||
|
||||
<template v-slot:item.group="{item, value}">
|
||||
<v-chip
|
||||
label
|
||||
small
|
||||
:href="`./${value}`"
|
||||
>{{ value }}</v-chip>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.shots_total="{item, value}">
|
||||
<div>{{ item.prime + item.other }}</div>
|
||||
<v-progress-linear
|
||||
background-color="secondary"
|
||||
color="primary"
|
||||
:value="item.prime/(item.prime+item.other)*100"
|
||||
></v-progress-linear>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.prime="{item, value}">
|
||||
{{ value }}
|
||||
({{ (value / (item.prime + item.other) * 100).toFixed(1) }}%)
|
||||
</template>
|
||||
|
||||
<template v-slot:item.other="{item, value}">
|
||||
{{ value }}
|
||||
({{ (value / (item.prime + item.other) * 100).toFixed(1) }}%)
|
||||
</template>
|
||||
|
||||
<template v-slot:item.prod_duration="{item, value}">
|
||||
<span v-if="value.days > 2" :title="`${value.days} d ${value.hours} h ${value.minutes} m ${(value.seconds + value.milliseconds/1000).toFixed(3)} s`">
|
||||
{{ value.days }} d
|
||||
</span>
|
||||
<span v-else>
|
||||
{{ value.days }} d {{ value.hours }} h {{ value.minutes }} m {{ (value.seconds + value.milliseconds/1000).toFixed(1) }} s
|
||||
</span>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.prod_distance="{item, value}">
|
||||
{{ (value/1000).toFixed(1) }} km
|
||||
</template>
|
||||
|
||||
<template v-slot:item.shooting_rate_mean="{item, value}">
|
||||
{{ (value).toFixed(2) }} s ±{{ (item.shooting_rate_sd).toFixed(3) }} s
|
||||
</template>
|
||||
|
||||
<template v-slot:item.shots_per_point="{item, value}">
|
||||
<div>
|
||||
{{ ((item.prime + item.other)/item.points).toFixed(1) }}
|
||||
({{ ((((item.prime + item.other)/item.points) / item.num_projects)*100).toFixed(2) }}%)
|
||||
</div>
|
||||
<v-progress-linear
|
||||
:value="((((item.prime + item.other)/item.points) / item.num_projects)*100)"
|
||||
></v-progress-linear>
|
||||
</template>
|
||||
|
||||
<template v-slot:expanded-item="{ headers, item }">
|
||||
<td :colspan="headers.length">
|
||||
<v-data-table class="ma-1"
|
||||
:headers="projectHeaders"
|
||||
:items="item.projects"
|
||||
dense
|
||||
hide-default-footer
|
||||
>
|
||||
|
||||
<template v-slot:item.pid="{item, value}">
|
||||
<a :href="`/projects/${value}`" title="Go to project">{{ value }}</a>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.fsp="{item, value}">
|
||||
<span title="First production shot">{{value.tstamp.substr(0, 10)}}</span>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.lsp="{item, value}">
|
||||
<span title="Last production shot">{{value.tstamp.substr(0, 10)}}</span>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.prod_duration="{item, value}">
|
||||
<span v-if="value.days > 2" :title="`${value.days} d ${value.hours} h ${value.minutes} m ${(value.seconds + value.milliseconds/1000).toFixed(3)} s`">
|
||||
{{ value.days }} d
|
||||
</span>
|
||||
<span v-else>
|
||||
{{ value.days }} d {{ value.hours }} h {{ value.minutes }} m {{ (value.seconds + value.milliseconds/1000).toFixed(1) }} s
|
||||
</span>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.prod_distance="{item, value}">
|
||||
{{ (value/1000).toFixed(1) }} km
|
||||
</template>
|
||||
|
||||
</v-data-table>
|
||||
</td>
|
||||
</template>
|
||||
|
||||
</v-data-table>
|
||||
|
||||
</v-container>
|
||||
</template>
|
||||
|
||||
<style>
|
||||
td p:last-of-type {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import AccessMixin from '@/mixins/access';
|
||||
|
||||
|
||||
// FIXME send to lib/utils or so
|
||||
/*
|
||||
function duration_to_ms(v) {
|
||||
if (v instanceof Object) {
|
||||
return (
|
||||
(v.days || 0) * 86400000 +
|
||||
(v.hours || 0) * 3600000 +
|
||||
(v.minutes || 0) * 60000 +
|
||||
(v.seconds || 0) * 1000 +
|
||||
(v.milliseconds || 0)
|
||||
);
|
||||
} else {
|
||||
return {
|
||||
days: 0,
|
||||
hours: 0,
|
||||
minutes: 0,
|
||||
seconds: 0,
|
||||
milliseconds: 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function ms_to_duration(v) {
|
||||
const days = Math.floor(v / 86400000);
|
||||
v %= 86400000;
|
||||
const hours = Math.floor(v / 3600000);
|
||||
v %= 3600000;
|
||||
const minutes = Math.floor(v / 60000);
|
||||
v %= 60000;
|
||||
const seconds = Math.floor(v / 1000);
|
||||
const milliseconds = v % 1000;
|
||||
return { days, hours, minutes, seconds, milliseconds };
|
||||
}
|
||||
|
||||
function normalise_duration (v) {
|
||||
return ms_to_duration(duration_to_ms(v));
|
||||
}
|
||||
|
||||
function add_durations(a, b) {
|
||||
return ms_to_duration(duration_to_ms(a) + duration_to_ms(b));
|
||||
}
|
||||
*/
|
||||
|
||||
export default {
|
||||
name: "GroupList",
|
||||
|
||||
components: {
|
||||
},
|
||||
|
||||
mixins: [
|
||||
AccessMixin
|
||||
],
|
||||
|
||||
data () {
|
||||
return {
|
||||
headers: [
|
||||
{
|
||||
value: "group",
|
||||
text: "Group name"
|
||||
},
|
||||
{
|
||||
value: "num_projects",
|
||||
text: "Number of campaigns"
|
||||
},
|
||||
{
|
||||
value: "lines",
|
||||
text: "Preplot lines"
|
||||
},
|
||||
{
|
||||
value: "points",
|
||||
text: "Preplot points"
|
||||
},
|
||||
{
|
||||
value: "sequences",
|
||||
text: "Total sequences"
|
||||
},
|
||||
{
|
||||
value: "shots_total",
|
||||
text: "Total shots"
|
||||
},
|
||||
{
|
||||
value: "prime",
|
||||
text: "Total prime"
|
||||
},
|
||||
{
|
||||
value: "other",
|
||||
text: "Total reshoot + infill"
|
||||
},
|
||||
/*
|
||||
{
|
||||
value: "ntba",
|
||||
text: "Total NTBA"
|
||||
},
|
||||
*/
|
||||
{
|
||||
value: "prod_duration",
|
||||
text: "Total duration"
|
||||
},
|
||||
{
|
||||
value: "prod_distance",
|
||||
text: "Total distance"
|
||||
},
|
||||
{
|
||||
value: "shooting_rate_mean",
|
||||
text: "Shooting rate (mean)"
|
||||
},
|
||||
{
|
||||
value: "shots_per_point",
|
||||
text: "Shots per point"
|
||||
},
|
||||
],
|
||||
items: [],
|
||||
expanded: [],
|
||||
options: { sortBy: ["group"], sortDesc: [false] },
|
||||
|
||||
projectHeaders: [
|
||||
{
|
||||
value: "pid",
|
||||
text: "ID"
|
||||
},
|
||||
{
|
||||
value: "name",
|
||||
text: "Name"
|
||||
},
|
||||
{
|
||||
value: "fsp",
|
||||
text: "Start"
|
||||
},
|
||||
{
|
||||
value: "lsp",
|
||||
text: "Finish"
|
||||
},
|
||||
{
|
||||
value: "lines",
|
||||
text: "Preplot lines"
|
||||
},
|
||||
{
|
||||
value: "seq_final",
|
||||
text: "Num. of sequences"
|
||||
},
|
||||
{
|
||||
value: "prod_duration",
|
||||
text: "Duration"
|
||||
},
|
||||
{
|
||||
value: "prod_distance",
|
||||
text: "Distance"
|
||||
},
|
||||
],
|
||||
|
||||
// Context menu stuff
|
||||
contextMenuShow: false,
|
||||
contextMenuX: 0,
|
||||
contextMenuY: 0,
|
||||
contextMenuItem: null,
|
||||
|
||||
/*
|
||||
// FIXME Eventually need to move this into Vuex
|
||||
groups: []
|
||||
*/
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
displayItems () {
|
||||
return this.items.filter(i => i.prod_distance);
|
||||
},
|
||||
|
||||
...mapGetters(['loading', 'groups'])
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
/*
|
||||
async prepareGroups () {
|
||||
//const groups = await this.api(["/prospects"]);
|
||||
//console.log("groups", groups);
|
||||
const groups = {};
|
||||
|
||||
for (const project of this.projects) {
|
||||
|
||||
if (!project.prod_distance) {
|
||||
// This project has no production data (either not started yet
|
||||
// or production data has not been imported) so we skip it.
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!project.prod_duration.days) {
|
||||
project.prod_duration = normalise_duration(project.prod_duration);
|
||||
}
|
||||
|
||||
for (const name of project.groups) {
|
||||
if (!(name in groups)) {
|
||||
groups[name] = {
|
||||
group: name,
|
||||
num_projects: 0,
|
||||
lines: 0,
|
||||
points: 0,
|
||||
sequences: 0,
|
||||
// Shots:
|
||||
prime: 0,
|
||||
other: 0,
|
||||
ntba: 0,
|
||||
prod_duration: {
|
||||
days: 0,
|
||||
hours: 0,
|
||||
minutes: 0,
|
||||
seconds: 0,
|
||||
milliseconds: 0
|
||||
},
|
||||
prod_distance: 0,
|
||||
shooting_rate: [],
|
||||
projects: []
|
||||
};
|
||||
}
|
||||
const group = groups[name];
|
||||
|
||||
group.num_projects++;
|
||||
group.lines = Math.max(group.lines, project.lines); // In case preplots changed
|
||||
group.points = Math.max(group.points, project.total); // Idem
|
||||
group.sequences += project.seq_final;
|
||||
group.prime += project.prime;
|
||||
group.other += project.other;
|
||||
//group.ntba += project.ntba;
|
||||
group.prod_duration = add_durations(group.prod_duration, project.prod_duration);
|
||||
group.prod_distance += project.prod_distance;
|
||||
group.shooting_rate.push(project.shooting_rate);
|
||||
group.projects.push(project);
|
||||
}
|
||||
}
|
||||
|
||||
this.groups = [];
|
||||
for (const group of Object.values(groups)) {
|
||||
group.shooting_rate_mean = d3a.mean(group.shooting_rate);
|
||||
group.shooting_rate_sd = d3a.deviation(group.shooting_rate);
|
||||
delete group.shooting_rate;
|
||||
|
||||
this.groups.push(group);
|
||||
}
|
||||
|
||||
},
|
||||
*/
|
||||
|
||||
async list () {
|
||||
this.items = [...this.groups];
|
||||
},
|
||||
|
||||
async load () {
|
||||
await this.refreshProjects();
|
||||
//await this.prepareGroups();
|
||||
await this.list();
|
||||
},
|
||||
|
||||
registerNotificationHandlers () {
|
||||
this.$store.dispatch('registerHandler', {
|
||||
table: 'project`',
|
||||
|
||||
handler: (context, message) => {
|
||||
if (message.payload?.table == "public") {
|
||||
this.load();
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
...mapActions(["api", "showSnack", "refreshProjects"])
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.registerNotificationHandlers();
|
||||
this.load();
|
||||
}
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -5,6 +5,22 @@
|
||||
<v-card-title>
|
||||
<v-toolbar flat>
|
||||
<v-toolbar-title>
|
||||
<template v-if="$route.params.sequence">
|
||||
<v-btn icon small
|
||||
:disabled="sequenceIndex >= (sequences.length - 1)"
|
||||
:to="{name: 'logBySequence', params: { sequence: (sequences[sequences.length-1]||{}).sequence }}"
|
||||
title="Go to the first sequence"
|
||||
>
|
||||
<v-icon dense>mdi-chevron-double-left</v-icon>
|
||||
</v-btn>
|
||||
<v-btn icon small
|
||||
:disabled="sequenceIndex >= (sequences.length - 1)"
|
||||
:to="{name: 'logBySequence', params: { sequence: (sequences[sequenceIndex+1]||{}).sequence }}"
|
||||
title="Go to the previous sequence"
|
||||
>
|
||||
<v-icon dense>mdi-chevron-left</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
<span class="d-none d-lg-inline">
|
||||
{{
|
||||
$route.params.sequence
|
||||
@@ -31,18 +47,38 @@
|
||||
: ""
|
||||
}}
|
||||
</span>
|
||||
|
||||
<template v-if="$route.params.sequence">
|
||||
<v-btn icon small
|
||||
:disabled="sequenceIndex==0"
|
||||
:to="{name: 'logBySequence', params: { sequence: (sequences[sequenceIndex-1]||{}).sequence }}"
|
||||
title="Go to the next sequence"
|
||||
>
|
||||
<v-icon dense>mdi-chevron-right</v-icon>
|
||||
</v-btn>
|
||||
<v-btn icon small class="mr-1"
|
||||
:disabled="sequenceIndex==0"
|
||||
:to="{name: 'logBySequence', params: { sequence: (sequences[0]||{}).sequence }}"
|
||||
title="Go to the last sequence"
|
||||
>
|
||||
<v-icon dense>mdi-chevron-double-right</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
|
||||
<a v-if="$route.params.sequence"
|
||||
class="mr-3"
|
||||
:href="`/projects/${$route.params.project}/sequences/${$route.params.sequence}`"
|
||||
title="View the shotlog for this sequence"
|
||||
>
|
||||
<v-icon
|
||||
right
|
||||
color="teal"
|
||||
>mdi-format-list-numbered</v-icon>
|
||||
</a>
|
||||
|
||||
</v-toolbar-title>
|
||||
|
||||
<a v-if="$route.params.sequence"
|
||||
class="mr-3"
|
||||
:href="`/projects/${$route.params.project}/sequences/${$route.params.sequence}`"
|
||||
title="View the shotlog for this sequence"
|
||||
>
|
||||
<v-icon
|
||||
right
|
||||
color="teal"
|
||||
>mdi-format-list-numbered</v-icon>
|
||||
</a>
|
||||
|
||||
|
||||
<dougal-event-edit v-if="$parent.writeaccess()"
|
||||
v-model="eventDialog"
|
||||
@@ -325,6 +361,12 @@
|
||||
@click="labelSearch=label"
|
||||
>{{label}}</v-chip>
|
||||
</span>
|
||||
<v-icon v-if="entry.meta.auto || entry.meta.author"
|
||||
x-small
|
||||
left
|
||||
color="primary"
|
||||
:title="entry.meta.author?`Automatic event by ${entry.meta.author}`:'Automatic event'"
|
||||
>mdi-robot</v-icon>
|
||||
<dougal-event-edit-history v-if="entry.has_edits && $parent.writeaccess()"
|
||||
:id="entry.id"
|
||||
:disabled="eventsLoading"
|
||||
@@ -488,17 +530,6 @@ export default {
|
||||
rows () {
|
||||
const rows = {};
|
||||
this.items
|
||||
.filter(i => {
|
||||
return !this.$route.params.sequence || (this.$route.params.sequence == i.sequence)
|
||||
})
|
||||
.filter(i => {
|
||||
for (const label of this.filterableLabels) {
|
||||
if (!this.shownLabels.includes(label) && i.labels.includes(label)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
})
|
||||
.forEach(i => {
|
||||
const key = (i.sequence && i.point) ? (i.sequence+"@"+i.point) : i.tstamp;
|
||||
if (!rows[key]) {
|
||||
@@ -529,6 +560,10 @@ export default {
|
||||
.sort( (a, b) => b[1]-a[1] );
|
||||
},
|
||||
|
||||
filteredLabels () {
|
||||
return this.filterableLabels.filter( label => !this.shownLabels.includes(label) );
|
||||
},
|
||||
|
||||
presetRemarks () {
|
||||
return this.projectConfiguration?.events?.presetRemarks ?? [];
|
||||
},
|
||||
@@ -541,7 +576,17 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
...mapGetters(['user', 'eventsLoading', 'online', 'sequence', 'line', 'point', 'position', 'timestamp', 'lineName', 'events', 'labels', 'userLabels', 'projectConfiguration']),
|
||||
sequenceIndex () {
|
||||
if ("sequence" in this.$route.params) {
|
||||
const index = this.sequences.findIndex( i => i.sequence == this.$route.params.sequence );
|
||||
if (index != -1) {
|
||||
return index;
|
||||
}
|
||||
}
|
||||
// return undefined
|
||||
},
|
||||
|
||||
...mapGetters(['user', 'eventsLoading', 'online', 'sequence', 'sequences', 'line', 'point', 'position', 'timestamp', 'lineName', 'events', 'labels', 'userLabels', 'projectConfiguration']),
|
||||
...mapState({projectSchema: state => state.project.projectSchema})
|
||||
|
||||
},
|
||||
@@ -549,6 +594,7 @@ export default {
|
||||
watch: {
|
||||
options: {
|
||||
async handler () {
|
||||
this.savePrefs(),
|
||||
await this.fetchEvents();
|
||||
},
|
||||
deep: true
|
||||
@@ -567,12 +613,19 @@ export default {
|
||||
},
|
||||
|
||||
filter (newVal, oldVal) {
|
||||
this.savePrefs();
|
||||
if (newVal?.toLowerCase() != oldVal?.toLowerCase()) {
|
||||
this.fetchEvents();
|
||||
}
|
||||
},
|
||||
|
||||
labelSearch () {
|
||||
this.savePrefs();
|
||||
this.fetchEvents();
|
||||
},
|
||||
|
||||
filteredLabels () {
|
||||
this.savePrefs()
|
||||
this.fetchEvents();
|
||||
},
|
||||
|
||||
@@ -581,7 +634,7 @@ export default {
|
||||
},
|
||||
|
||||
user (newVal, oldVal) {
|
||||
this.itemsPerPage = Number(localStorage.getItem(`dougal/prefs/${this.user?.name}/${this.$route.params.project}/${this.$options.name}/items-per-page`)) || 25;
|
||||
this.loadPrefs();
|
||||
}
|
||||
|
||||
},
|
||||
@@ -632,8 +685,10 @@ export default {
|
||||
|
||||
async fetchEvents (opts = {}) {
|
||||
const options = {
|
||||
sequence: this.$route.params.sequence,
|
||||
text: this.filter,
|
||||
label: this.labelSearch,
|
||||
excludeLabels: this.filteredLabels,
|
||||
...this.options
|
||||
};
|
||||
const res = await this.getEvents([this.$route.params.project, options]);
|
||||
@@ -871,10 +926,36 @@ export default {
|
||||
*/
|
||||
},
|
||||
|
||||
getPrefsKey () {
|
||||
return `dougal/prefs/${this.user?.name}/${this.$route.params.project}/Log/v1`;
|
||||
},
|
||||
|
||||
savePrefs () {
|
||||
const prefs = {
|
||||
shownLabels: this.shownLabels,
|
||||
labelSearch: this.labelSearch,
|
||||
filter: this.filter,
|
||||
options: this.options
|
||||
};
|
||||
localStorage.setItem(this.getPrefsKey(), JSON.stringify(prefs));
|
||||
},
|
||||
|
||||
loadPrefs () {
|
||||
const stored = localStorage.getItem(this.getPrefsKey());
|
||||
if (stored) {
|
||||
const prefs = JSON.parse(stored);
|
||||
if (prefs.shownLabels !== undefined) this.shownLabels = prefs.shownLabels;
|
||||
if (prefs.labelSearch !== undefined) this.labelSearch = prefs.labelSearch;
|
||||
if (prefs.filter !== undefined) this.filter = prefs.filter;
|
||||
if (prefs.options !== undefined) this.options = prefs.options;
|
||||
}
|
||||
},
|
||||
|
||||
...mapActions(["api", "showSnack", "refreshEvents", "getEvents"])
|
||||
},
|
||||
|
||||
async mounted () {
|
||||
this.loadPrefs();
|
||||
this.fetchEvents();
|
||||
|
||||
window.addEventListener('keyup', this.handleKeyboardEvent);
|
||||
|
||||
@@ -470,6 +470,33 @@
|
||||
@click="zoomOut"
|
||||
>mdi-magnify-minus-outline</v-icon>
|
||||
</div>
|
||||
<div>
|
||||
<v-icon
|
||||
class="my-1"
|
||||
title="Tilt out"
|
||||
@click="tiltOut"
|
||||
>mdi-axis-x-rotate-counterclockwise</v-icon>
|
||||
</div>
|
||||
<div>
|
||||
<v-icon
|
||||
class="my-1"
|
||||
title="Tilt in"
|
||||
@click="tiltIn"
|
||||
>mdi-axis-x-rotate-clockwise</v-icon>
|
||||
</div>
|
||||
<div>
|
||||
<v-icon v-if="bearing==0"
|
||||
class="my-1"
|
||||
title="Bin up"
|
||||
@click="setBearing('ζ')"
|
||||
>mdi-view-grid-outline</v-icon>
|
||||
<v-icon v-else
|
||||
class="my-1"
|
||||
title="North up"
|
||||
:style="`transform: rotate(${-bearing}deg);`"
|
||||
@click="setBearing(0)"
|
||||
>mdi-navigation</v-icon>
|
||||
</div>
|
||||
<div>
|
||||
<v-icon
|
||||
class="my-1"
|
||||
@@ -661,6 +688,7 @@ export default {
|
||||
//maxZoom: 18,
|
||||
maxPitch: 89
|
||||
},
|
||||
bearing: 0,
|
||||
|
||||
vesselPosition: null,
|
||||
vesselTrackLastRefresh: 0,
|
||||
@@ -977,6 +1005,41 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
tiltIn () {
|
||||
if (deck) {
|
||||
const viewState = deck.getViewports()[0];
|
||||
const initialViewState = {...this.viewStateDefaults, ...viewState};
|
||||
initialViewState.pitch -= 10;
|
||||
initialViewState.transitionDuration = 300;
|
||||
deck.setProps({initialViewState});
|
||||
}
|
||||
},
|
||||
|
||||
tiltOut () {
|
||||
if (deck) {
|
||||
const viewState = deck.getViewports()[0];
|
||||
const initialViewState = {...this.viewStateDefaults, ...viewState};
|
||||
initialViewState.pitch += 10;
|
||||
initialViewState.transitionDuration = 300;
|
||||
deck.setProps({initialViewState});
|
||||
}
|
||||
},
|
||||
|
||||
setBearing (bearing) {
|
||||
if (deck) {
|
||||
|
||||
if (bearing === 'ζ') {
|
||||
bearing = this.$store.getters.projectConfiguration?.binning?.theta ?? 0;
|
||||
}
|
||||
|
||||
const viewState = deck.getViewports()[0];
|
||||
const initialViewState = {...this.viewStateDefaults, ...viewState};
|
||||
initialViewState.bearing = (bearing + 360) % 360;
|
||||
initialViewState.transitionDuration = 300;
|
||||
deck.setProps({initialViewState});
|
||||
}
|
||||
},
|
||||
|
||||
toggleFullscreen() {
|
||||
const mapElement = document.getElementById('map-container');
|
||||
if (!this.isFullscreen) {
|
||||
@@ -1368,7 +1431,7 @@ export default {
|
||||
//console.log("SHOULD BE INITIALISING LAYERS HERE", gl);
|
||||
this.decodeURL();
|
||||
this.decodeURLHash();
|
||||
deck.onViewStateChange = this.updateURL;
|
||||
//deck.onViewStateChange = this.viewStateUpdated;
|
||||
},
|
||||
|
||||
setViewState () {
|
||||
@@ -1383,6 +1446,11 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
viewStateUpdated ({viewState}) {
|
||||
this.bearing = viewState.bearing;
|
||||
this.updateURL({viewState});
|
||||
},
|
||||
|
||||
updateURL ({viewState} = {}) {
|
||||
if (!viewState && deck?.viewManager) {
|
||||
viewState = deck.getViewports()[0];
|
||||
@@ -1709,7 +1777,8 @@ export default {
|
||||
layers: [],
|
||||
getTooltip: this.getTooltip,
|
||||
pickingRadius: 24,
|
||||
onWebGLInitialized: this.initLayers
|
||||
onWebGLInitialized: this.initLayers,
|
||||
onViewStateChange: this.viewStateUpdated,
|
||||
});
|
||||
|
||||
// Get fullscreen state
|
||||
|
||||
@@ -27,6 +27,8 @@
|
||||
<v-chip v-for="group in value"
|
||||
label
|
||||
small
|
||||
:title="`View repeatability data for ${group}`"
|
||||
:href="`/groups/${group}`"
|
||||
>{{ group }}</v-chip>
|
||||
</template>
|
||||
|
||||
@@ -187,19 +189,20 @@ export default {
|
||||
...mapGetters(['loading', 'projects'])
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
async projects () {
|
||||
await this.load();
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
async list () {
|
||||
this.items = [...this.projects];
|
||||
},
|
||||
|
||||
async summary (item) {
|
||||
const details = await this.api([`/project/${item.pid}/summary`]);
|
||||
if (details) {
|
||||
return Object.assign({}, details, item);
|
||||
}
|
||||
},
|
||||
|
||||
title (item) {
|
||||
if (item.organisations) {
|
||||
return "Access:\n" + Object.entries(item.organisations).map( org =>
|
||||
@@ -210,30 +213,22 @@ export default {
|
||||
},
|
||||
|
||||
async load () {
|
||||
await this.refreshProjects();
|
||||
if (!this.projects.length) {
|
||||
this.refreshProjects();
|
||||
}
|
||||
await this.list();
|
||||
const promises = [];
|
||||
for (const key in this.items) {
|
||||
const item = this.items[key];
|
||||
const promise = this.summary(item)
|
||||
.then( expanded => {
|
||||
if (expanded) {
|
||||
this.$set(this.items, key, expanded);
|
||||
}
|
||||
});
|
||||
promises.push(promise);
|
||||
},
|
||||
|
||||
handlerLoad (context, {payload}) {
|
||||
if (payload?.table == "public") {
|
||||
this.load();
|
||||
}
|
||||
},
|
||||
|
||||
registerNotificationHandlers () {
|
||||
this.$store.dispatch('registerHandler', {
|
||||
table: 'project`',
|
||||
|
||||
handler: (context, message) => {
|
||||
if (message.payload?.table == "public") {
|
||||
this.load();
|
||||
}
|
||||
}
|
||||
handler: this.handlerLoad
|
||||
});
|
||||
},
|
||||
|
||||
|
||||
@@ -6,8 +6,42 @@
|
||||
<v-progress-linear indeterminate v-if="loading"></v-progress-linear>
|
||||
<v-toolbar flat>
|
||||
<v-toolbar-title>
|
||||
<template v-if="$route.params.sequence">
|
||||
<v-btn icon small
|
||||
:disabled="sequenceIndex >= (sequences.length - 1)"
|
||||
:to="{name: 'shotlog', params: { sequence: (sequences[sequences.length-1]||{}).sequence }}"
|
||||
title="Go to the first sequence"
|
||||
>
|
||||
<v-icon dense>mdi-chevron-double-left</v-icon>
|
||||
</v-btn>
|
||||
<v-btn icon small
|
||||
:disabled="sequenceIndex >= (sequences.length - 1)"
|
||||
:to="{name: 'shotlog', params: { sequence: (sequences[sequenceIndex+1]||{}).sequence }}"
|
||||
title="Go to the previous sequence"
|
||||
>
|
||||
<v-icon dense>mdi-chevron-left</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
|
||||
Sequence {{sequenceNumber}}
|
||||
<small :class="statusColour" v-if="sequence">({{sequence.status}})</small>
|
||||
|
||||
<template v-if="$route.params.sequence">
|
||||
<v-btn icon small
|
||||
:disabled="sequenceIndex==0"
|
||||
:to="{name: 'shotlog', params: { sequence: (sequences[sequenceIndex-1]||{}).sequence }}"
|
||||
title="Go to the next sequence"
|
||||
>
|
||||
<v-icon dense>mdi-chevron-right</v-icon>
|
||||
</v-btn>
|
||||
<v-btn icon small class="mr-1"
|
||||
:disabled="sequenceIndex==0"
|
||||
:to="{name: 'shotlog', params: { sequence: (sequences[0]||{}).sequence }}"
|
||||
title="Go to the last sequence"
|
||||
>
|
||||
<v-icon dense>mdi-chevron-double-right</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
</v-toolbar-title>
|
||||
|
||||
<a v-if="$route.params.sequence"
|
||||
@@ -352,6 +386,16 @@ export default {
|
||||
return this.sequences.find(i => i.sequence == this.sequenceNumber);
|
||||
},
|
||||
|
||||
sequenceIndex () {
|
||||
if ("sequence" in this.$route.params) {
|
||||
const index = this.sequences.findIndex( i => i.sequence == this.$route.params.sequence );
|
||||
if (index != -1) {
|
||||
return index;
|
||||
}
|
||||
}
|
||||
// return undefined
|
||||
},
|
||||
|
||||
remarks () {
|
||||
return this.sequence?.remarks || "Nil.";
|
||||
},
|
||||
|
||||
@@ -357,6 +357,26 @@ app.map({
|
||||
delete: [ mw.auth.operations, mw.auth.access.write, mw.info.delete ]
|
||||
}
|
||||
},
|
||||
|
||||
/*
|
||||
* 4D comparisons
|
||||
*/
|
||||
|
||||
// FIXME no authentication yet!
|
||||
|
||||
'/comparison/group': {
|
||||
get: [ mw.etag.noSave, mw.comparisons.groups.list ],
|
||||
'/:group': {
|
||||
get: [ mw.etag.noSave, mw.comparisons.groups.get ],
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
|
||||
/*
|
||||
* Other endpoints
|
||||
*/
|
||||
|
||||
'/queue/outgoing/': {
|
||||
'asaqc': {
|
||||
get: [ mw.etag.noSave, mw.queue.asaqc.get ],
|
||||
|
||||
20
lib/www/server/api/middleware/comparisons/groups/get.js
Normal file
20
lib/www/server/api/middleware/comparisons/groups/get.js
Normal file
@@ -0,0 +1,20 @@
|
||||
const comparisons = require('../../../../lib/comparisons');
|
||||
|
||||
|
||||
module.exports = async function (req, res, next) {
|
||||
|
||||
try {
|
||||
// const data = await comparisons.groups()
|
||||
// if (data?.[req.params.group]) {
|
||||
const data = await comparisons.getGroup(req.params.group);
|
||||
if (data) {
|
||||
res.status(200).send(data);
|
||||
} else {
|
||||
res.status(404).send({message: "Group does not exist"});
|
||||
}
|
||||
return next();
|
||||
} catch (err) {
|
||||
next(err);
|
||||
}
|
||||
|
||||
};
|
||||
@@ -0,0 +1,4 @@
|
||||
module.exports = {
|
||||
list: require('./list'),
|
||||
get: require('./get'),
|
||||
}
|
||||
18
lib/www/server/api/middleware/comparisons/groups/list.js
Normal file
18
lib/www/server/api/middleware/comparisons/groups/list.js
Normal file
@@ -0,0 +1,18 @@
|
||||
const comparisons = require('../../../../lib/comparisons');
|
||||
|
||||
|
||||
module.exports = async function (req, res, next) {
|
||||
|
||||
try {
|
||||
const data = await comparisons.groups()
|
||||
if (data) {
|
||||
res.status(200).send(data);
|
||||
} else {
|
||||
res.status(204).end();
|
||||
}
|
||||
return next();
|
||||
} catch (err) {
|
||||
next(err);
|
||||
}
|
||||
|
||||
};
|
||||
3
lib/www/server/api/middleware/comparisons/index.js
Normal file
3
lib/www/server/api/middleware/comparisons/index.js
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = {
|
||||
groups: require('./groups')
|
||||
}
|
||||
@@ -66,8 +66,18 @@ const rels = [
|
||||
|
||||
function invalidateCache (data, cache) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!data) {
|
||||
ERROR("invalidateCache called with no data");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!data.payload) {
|
||||
ERROR("invalidateCache called without a payload; channel = %s", data.channel);
|
||||
return;
|
||||
}
|
||||
|
||||
const channel = data.channel;
|
||||
const project = data.payload.pid ?? data.payload?.new?.pid ?? data.payload?.old?.pid;
|
||||
const project = data.payload?.pid ?? data.payload?.new?.pid ?? data.payload?.old?.pid;
|
||||
const operation = data.payload.operation;
|
||||
const table = data.payload.table;
|
||||
const fields = { channel, project, operation, table };
|
||||
|
||||
@@ -23,4 +23,5 @@ module.exports = {
|
||||
version: require('./version'),
|
||||
admin: require('./admin'),
|
||||
compress: require('./compress'),
|
||||
comparisons: require('./comparisons'),
|
||||
};
|
||||
|
||||
@@ -16,7 +16,6 @@ module.exports = async function (req, res, next) {
|
||||
|
||||
if (json.length) {
|
||||
const data = bundle(json, {type});
|
||||
console.log("bundle", data);
|
||||
res.status(200).send(Buffer.from(data));
|
||||
} else {
|
||||
res.status(404).send();
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
const project = require('../../lib/db/project');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
class DetectProjectConfigurationChange {
|
||||
@@ -10,7 +9,7 @@ class DetectProjectConfigurationChange {
|
||||
|
||||
// Grab project configurations.
|
||||
// NOTE that this will run asynchronously
|
||||
this.run({channel: "project"}, ctx);
|
||||
//this.run({channel: "project"}, ctx);
|
||||
}
|
||||
|
||||
async run (data, ctx) {
|
||||
@@ -28,13 +27,13 @@ class DetectProjectConfigurationChange {
|
||||
try {
|
||||
DEBUG("Project configuration change detected")
|
||||
|
||||
const projects = await project.get();
|
||||
project.organisations.setCache(projects);
|
||||
const projects = await ctx.db.project.get();
|
||||
ctx.db.project.organisations.setCache(projects);
|
||||
|
||||
const _ctx_data = {};
|
||||
for (let pid of projects.map(i => i.pid)) {
|
||||
DEBUG("Retrieving configuration for", pid);
|
||||
const cfg = await project.configuration.get(pid);
|
||||
const cfg = await ctx.db.project.configuration.get(pid);
|
||||
if (cfg?.archived === true) {
|
||||
DEBUG(pid, "is archived. Ignoring");
|
||||
continue;
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
const { schema2pid } = require('../../lib/db/connection');
|
||||
const { event } = require('../../lib/db');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
class DetectSoftStart {
|
||||
@@ -33,14 +31,19 @@ class DetectSoftStart {
|
||||
const prev = this.prev?.payload?.new?.meta;
|
||||
// DEBUG("%j", prev);
|
||||
// DEBUG("%j", cur);
|
||||
DEBUG("cur.num_guns: %d\ncur.num_active: %d\nprv.num_active: %d\ntest passed: %j", cur.num_guns, cur.num_active, prev.num_active, cur.num_active >= 1 && !prev.num_active && cur.num_active < cur.num_guns);
|
||||
if (cur.lineStatus == "online" || prev.lineStatus == "online") {
|
||||
DEBUG("lineStatus is online, assuming not in a soft start situation");
|
||||
return;
|
||||
}
|
||||
|
||||
DEBUG("cur.num_guns: %d\ncur.num_active: %d\nprv.num_active: %d\ncur.num_nofire: %d\nprev.num_nofire: %d", cur.num_guns, cur.num_active, prev.num_active, cur.num_nofire, prev.num_nofire);
|
||||
|
||||
|
||||
if (cur.num_active >= 1 && !prev.num_active && cur.num_active < cur.num_guns) {
|
||||
INFO("Soft start detected @", cur.tstamp);
|
||||
|
||||
// FIXME Shouldn't need to use schema2pid as pid already present in payload.
|
||||
const projectId = await schema2pid(cur._schema ?? prev._schema);
|
||||
const projectId = await ctx.schema2pid(cur._schema ?? prev._schema);
|
||||
|
||||
// TODO: Try and grab the corresponding comment from the configuration?
|
||||
const payload = {
|
||||
@@ -50,12 +53,16 @@ class DetectSoftStart {
|
||||
meta: {auto: true, author: `*${this.constructor.name}*`}
|
||||
};
|
||||
DEBUG("Posting event", projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
if (ctx.dryRun) {
|
||||
DEBUG(`DRY RUN: await ctx.db.event.post(${projectId}, ${payload});`);
|
||||
} else {
|
||||
await ctx.db.event.post(projectId, payload);
|
||||
}
|
||||
|
||||
} else if (cur.num_active == cur.num_guns && prev.num_active < cur.num_active) {
|
||||
} else if ((cur.num_active == cur.num_guns || (prev.num_nofire > 0 && cur.num_nofire == 0)) && prev.num_active < cur.num_active) {
|
||||
INFO("Full volume detected @", cur.tstamp);
|
||||
|
||||
const projectId = await schema2pid(cur._schema ?? prev._schema);
|
||||
const projectId = await ctx.schema2pid(cur._schema ?? prev._schema);
|
||||
|
||||
// TODO: Try and grab the corresponding comment from the configuration?
|
||||
const payload = {
|
||||
@@ -65,7 +72,11 @@ class DetectSoftStart {
|
||||
meta: {auto: true, author: `*${this.constructor.name}*`}
|
||||
};
|
||||
DEBUG("Posting event", projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
if (ctx.dryRun) {
|
||||
DEBUG(`DRY RUN: await ctx.db.event.post(${projectId}, ${payload});`);
|
||||
} else {
|
||||
await ctx.db.event.post(projectId, payload);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
const { schema2pid } = require('../../lib/db/connection');
|
||||
const { event } = require('../../lib/db');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
class DetectSOLEOL {
|
||||
@@ -43,7 +41,7 @@ class DetectSOLEOL {
|
||||
|
||||
// We must use schema2pid because the pid may not have been
|
||||
// populated for this event.
|
||||
const projectId = await schema2pid(cur._schema ?? prev._schema);
|
||||
const projectId = await ctx.schema2pid(cur._schema ?? prev._schema);
|
||||
const labels = ["FSP", "FGSP"];
|
||||
const remarks = `SEQ ${cur._sequence}, SOL ${cur.lineName}, BSP: ${(cur.speed*3.6/1.852).toFixed(1)} kt, Water depth: ${Number(cur.waterDepth).toFixed(0)} m.`;
|
||||
const payload = {
|
||||
@@ -55,24 +53,32 @@ class DetectSOLEOL {
|
||||
meta: {auto: true, author: `*${this.constructor.name}*`}
|
||||
}
|
||||
INFO("Posting event", projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
if (ctx.dryRun) {
|
||||
DEBUG(`DRY RUN: await ctx.db.event.post(${projectId}, ${payload});`);
|
||||
} else {
|
||||
await ctx.db.event.post(projectId, payload);
|
||||
}
|
||||
} else if (prev.lineName == cur.lineName && prev._sequence == cur._sequence &&
|
||||
prev.lineStatus == "online" && cur.lineStatus != "online" && sequence) {
|
||||
INFO("Transition to OFFLINE detected");
|
||||
|
||||
const projectId = await schema2pid(prev._schema ?? cur._schema);
|
||||
const projectId = await ctx.schema2pid(prev._schema ?? cur._schema);
|
||||
const labels = ["LSP", "LGSP"];
|
||||
const remarks = `SEQ ${cur._sequence}, EOL ${cur.lineName}, BSP: ${(cur.speed*3.6/1.852).toFixed(1)} kt, Water depth: ${Number(cur.waterDepth).toFixed(0)} m.`;
|
||||
const remarks = `SEQ ${prev._sequence}, EOL ${prev.lineName}, BSP: ${(prev.speed*3.6/1.852).toFixed(1)} kt, Water depth: ${Number(prev.waterDepth).toFixed(0)} m.`;
|
||||
const payload = {
|
||||
type: "sequence",
|
||||
sequence,
|
||||
point: cur._point,
|
||||
point: prev._point,
|
||||
remarks,
|
||||
labels,
|
||||
meta: {auto: true, author: `*${this.constructor.name}*`}
|
||||
}
|
||||
INFO("Posting event", projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
if (ctx.dryRun) {
|
||||
DEBUG(`DRY RUN: await ctx.db.event.post(${projectId}, ${payload});`);
|
||||
} else {
|
||||
await ctx.db.event.post(projectId, payload);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
|
||||
@@ -8,37 +8,6 @@ const Handlers = [
|
||||
require('./detect-fdsp')
|
||||
];
|
||||
|
||||
function init (ctx) {
|
||||
|
||||
const instances = Handlers.map(Handler => new Handler(ctx));
|
||||
|
||||
function prepare (data, ctx) {
|
||||
const promises = [];
|
||||
for (let instance of instances) {
|
||||
const promise = new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
DEBUG("Run", instance.author);
|
||||
const result = await instance.run(data, ctx);
|
||||
DEBUG("%s result: %O", instance.author, result);
|
||||
resolve(result);
|
||||
} catch (err) {
|
||||
ERROR("%s error:\n%O", instance.author, err);
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
promises.push(promise);
|
||||
}
|
||||
return promises;
|
||||
}
|
||||
|
||||
function despatch (data, ctx) {
|
||||
return Promise.allSettled(prepare(data, ctx));
|
||||
}
|
||||
|
||||
return { instances, prepare, despatch };
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Handlers,
|
||||
init
|
||||
};
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
const { event, project } = require('../../lib/db');
|
||||
const { withinValidity } = require('../../lib/utils/ranges');
|
||||
const unique = require('../../lib/utils/unique');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
class ReportLineChangeTime {
|
||||
@@ -44,7 +41,7 @@ class ReportLineChangeTime {
|
||||
|
||||
async function getLineChangeTime (data, forward = false) {
|
||||
if (forward) {
|
||||
const ospEvents = await event.list(projectId, {label: "FGSP"});
|
||||
const ospEvents = await ctx.db.event.list(projectId, {label: "FGSP"});
|
||||
// DEBUG("ospEvents", ospEvents);
|
||||
const osp = ospEvents.filter(i => i.tstamp > data.tstamp).pop();
|
||||
DEBUG("fsp", osp);
|
||||
@@ -55,7 +52,7 @@ class ReportLineChangeTime {
|
||||
return { lineChangeTime: osp.tstamp - data.tstamp, osp };
|
||||
}
|
||||
} else {
|
||||
const ospEvents = await event.list(projectId, {label: "LGSP"});
|
||||
const ospEvents = await ctx.db.event.list(projectId, {label: "LGSP"});
|
||||
// DEBUG("ospEvents", ospEvents);
|
||||
const osp = ospEvents.filter(i => i.tstamp < data.tstamp).shift();
|
||||
DEBUG("lsp", osp);
|
||||
@@ -96,16 +93,20 @@ class ReportLineChangeTime {
|
||||
const opts = {jpq};
|
||||
|
||||
if (Array.isArray(seq)) {
|
||||
opts.sequences = unique(seq).filter(i => !!i);
|
||||
opts.sequences = ctx.unique(seq).filter(i => !!i);
|
||||
} else {
|
||||
opts.sequence = seq;
|
||||
}
|
||||
|
||||
const staleEvents = await event.list(projectId, opts);
|
||||
const staleEvents = await ctx.db.event.list(projectId, opts);
|
||||
DEBUG(staleEvents.length ?? 0, "events to delete");
|
||||
for (let staleEvent of staleEvents) {
|
||||
DEBUG(`Deleting event id ${staleEvent.id} (seq = ${staleEvent.sequence}, point = ${staleEvent.point})`);
|
||||
await event.del(projectId, staleEvent.id);
|
||||
if (ctx.dryRun) {
|
||||
DEBUG(`await ctx.db.event.del(${projectId}, ${staleEvent.id});`);
|
||||
} else {
|
||||
await ctx.db.event.del(projectId, staleEvent.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -180,7 +181,11 @@ class ReportLineChangeTime {
|
||||
|
||||
const maybePostEvent = async (projectId, payload) => {
|
||||
DEBUG("Posting event", projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
if (ctx.dryRun) {
|
||||
DEBUG(`await ctx.db.event.post(${projectId}, ${payload});`);
|
||||
} else {
|
||||
await ctx.db.event.post(projectId, payload);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -192,7 +197,7 @@ class ReportLineChangeTime {
|
||||
const data = n;
|
||||
DEBUG("INSERT seen: will add lct events related to ", data.id);
|
||||
|
||||
if (withinValidity(data.validity)) {
|
||||
if (ctx.withinValidity(data.validity)) {
|
||||
DEBUG("Event within validity period", data.validity, new Date());
|
||||
|
||||
data.tstamp = new Date(data.tstamp);
|
||||
|
||||
@@ -1,29 +1,101 @@
|
||||
const nodeAsync = require('async'); // npm install async
|
||||
const { listen } = require('../lib/db/notify');
|
||||
const db = require('../lib/db'); // Adjust paths; include all needed DB utils
|
||||
const { schema2pid } = require('../lib/db/connection');
|
||||
const unique = require('../lib/utils/unique'); // If needed by handlers
|
||||
const withinValidity = require('../lib/utils/ranges').withinValidity; // If needed
|
||||
const { ALERT, ERROR, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
// List of handler classes (add more as needed)
|
||||
const handlerClasses = require('./handlers').Handlers;
|
||||
|
||||
// Channels to listen to (hardcoded for simplicity; could scan handlers for mentions)
|
||||
const channels = require('../lib/db/channels');
|
||||
const handlers = require('./handlers');
|
||||
const { ActionsQueue } = require('../lib/queue');
|
||||
const { ERROR, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
function start () {
|
||||
// Queue config: Process one at a time for order; max retries=3
|
||||
const eventQueue = nodeAsync.queue(async (task, callback) => {
|
||||
const { data, ctx } = task;
|
||||
DEBUG(`Processing event on channel ${data.channel} with timestamp ${data._received ?? 'unknown'}`);
|
||||
|
||||
const queue = new ActionsQueue();
|
||||
const ctx = {}; // Context object
|
||||
for (const handler of ctx.handlers) {
|
||||
try {
|
||||
await handler.run(data, ctx);
|
||||
} catch (err) {
|
||||
ERROR(`Error in handler ${handler.constructor.name}:`, err);
|
||||
// Retry logic: Could add task.retries++, re-enqueue if < max
|
||||
}
|
||||
}
|
||||
|
||||
const { prepare, despatch } = handlers.init(ctx);
|
||||
if (typeof callback === 'function') {
|
||||
// async v3.2.6+ does not use callsbacks with AsyncFunctions, but anyway
|
||||
callback();
|
||||
}
|
||||
}, 1); // Concurrency=1 for strict order
|
||||
|
||||
listen(channels, function (data) {
|
||||
DEBUG("Incoming data", data);
|
||||
eventQueue.error((err, task) => {
|
||||
ALERT(`Queue error processing task:`, err, task);
|
||||
});
|
||||
|
||||
// We don't bother awaiting
|
||||
queue.enqueue(() => despatch(data, ctx));
|
||||
DEBUG("Queue size", queue.length());
|
||||
// Main setup function (call from server init)
|
||||
async function setupEventHandlers(projectsConfig) {
|
||||
// Shared context
|
||||
const ctx = {
|
||||
dryRun: Boolean(process.env.DOUGAL_HANDLERS_DRY_RUN) ?? false, // If true, don't commit changes
|
||||
projects: { configuration: projectsConfig }, // From user config
|
||||
handlers: handlerClasses.map(Cls => new Cls()), // Instances
|
||||
// DB utils (add more as needed)
|
||||
db,
|
||||
schema2pid,
|
||||
unique,
|
||||
withinValidity
|
||||
// Add other utils, e.g., ctx.logger = DEBUG;
|
||||
};
|
||||
|
||||
// Optional: Replay recent events on startup to rebuild state
|
||||
// await replayRecentEvents(ctx);
|
||||
|
||||
// Setup listener
|
||||
const subscriber = await listen(channels, (rawData) => {
|
||||
const data = {
|
||||
...rawData,
|
||||
enqueuedAt: new Date() // For monitoring
|
||||
};
|
||||
eventQueue.push({ data, ctx });
|
||||
});
|
||||
|
||||
INFO("Events manager started");
|
||||
DEBUG('Event handler system initialized with channels:', channels);
|
||||
if (ctx.dryRun) {
|
||||
DEBUG('DRY RUNNING');
|
||||
}
|
||||
|
||||
// Return for cleanup if needed
|
||||
return {
|
||||
close: () => {
|
||||
subscriber.events.removeAllListeners();
|
||||
subscriber.close();
|
||||
eventQueue.kill();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = { start }
|
||||
// Optional: Replay last N events to rebuild handler state (e.g., this.prev)
|
||||
// async function replayRecentEvents(ctx) {
|
||||
// try {
|
||||
// // Example: Fetch last 10 realtime events, sorted by tstamp
|
||||
// const recentRealtime = await event.listAllProjects({ channel: 'realtime', limit: 10, sort: 'tstamp DESC' });
|
||||
// // Assume event.listAllProjects is a custom DB method; implement if needed
|
||||
//
|
||||
// // Enqueue in original order (reverse sort)
|
||||
// recentRealtime.reverse().forEach((evt) => {
|
||||
// const data = { channel: 'realtime', payload: { new: evt } };
|
||||
// eventQueue.push({ data, ctx });
|
||||
// });
|
||||
//
|
||||
// // Similarly for 'event' channel if needed
|
||||
// DEBUG('Replayed recent events for state rebuild');
|
||||
// } catch (err) {
|
||||
// ERROR('Error replaying events:', err);
|
||||
// }
|
||||
// }
|
||||
|
||||
if (require.main === module) {
|
||||
start();
|
||||
}
|
||||
module.exports = { setupEventHandlers };
|
||||
|
||||
@@ -2,18 +2,37 @@
|
||||
|
||||
const { ERROR, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
async function getProjectConfigurations (opts = {}) {
|
||||
const includeArchived = {includeArchived: false, ...opts};
|
||||
let projectConfigurations = {};
|
||||
try {
|
||||
const db = require('./lib/db');
|
||||
const pids = (await db.project.get())
|
||||
.filter(i => includeArchived || !i.archived)
|
||||
.map(i => i.pid);
|
||||
for (const pid of pids) {
|
||||
DEBUG(`Reading project configuration for ${pid}`);
|
||||
const cfg = await db.project.configuration.get(pid);
|
||||
projectConfigurations[pid] = cfg;
|
||||
}
|
||||
} catch (err) {
|
||||
ERROR("Failed to get project configurations");
|
||||
ERROR(err);
|
||||
}
|
||||
return projectConfigurations;
|
||||
}
|
||||
|
||||
async function main () {
|
||||
// Check that we're running against the correct database version
|
||||
const version = require('./lib/version');
|
||||
INFO("Running version", await version.describe());
|
||||
version.compatible()
|
||||
.then( (versions) => {
|
||||
.then( async (versions) => {
|
||||
try {
|
||||
const api = require('./api');
|
||||
const ws = require('./ws');
|
||||
const periodicTasks = require('./periodic-tasks').init();
|
||||
|
||||
const { fork } = require('child_process');
|
||||
const { setupEventHandlers } = require('./events');
|
||||
|
||||
const port = process.env.HTTP_PORT || 3000;
|
||||
const host = process.env.HTTP_HOST || "127.0.0.1";
|
||||
@@ -25,33 +44,31 @@ async function main () {
|
||||
|
||||
periodicTasks.start();
|
||||
|
||||
const eventManagerPath = [__dirname, "events"].join("/");
|
||||
const eventManager = fork(eventManagerPath, /*{ stdio: 'ignore' }*/);
|
||||
const projectConfigurations = await getProjectConfigurations();
|
||||
const handlerSystem = await setupEventHandlers(projectConfigurations);
|
||||
|
||||
process.on("SIGINT", async () => {
|
||||
DEBUG("Interrupted (SIGINT)");
|
||||
eventManager.kill()
|
||||
handlerSystem.close();
|
||||
await periodicTasks.cleanup();
|
||||
process.exit(0);
|
||||
})
|
||||
|
||||
process.on("SIGHUP", async () => {
|
||||
DEBUG("Stopping (SIGHUP)");
|
||||
eventManager.kill()
|
||||
handlerSystem.close();
|
||||
await periodicTasks.cleanup();
|
||||
process.exit(0);
|
||||
})
|
||||
|
||||
process.on('beforeExit', async () => {
|
||||
DEBUG("Preparing to exit");
|
||||
eventManager.kill()
|
||||
handlerSystem.close();
|
||||
await periodicTasks.cleanup();
|
||||
});
|
||||
|
||||
process.on('exit', async () => {
|
||||
DEBUG("Exiting");
|
||||
// eventManager.kill()
|
||||
// periodicTasks.cleanup();
|
||||
});
|
||||
} catch (err) {
|
||||
ERROR(err);
|
||||
|
||||
@@ -8,8 +8,6 @@ function bundle (json, opts = {}) {
|
||||
const deltas = [];
|
||||
const values = [];
|
||||
|
||||
// console.log("JSON LENGTH", json.length);
|
||||
// console.log("OPTS", geometries, payload);
|
||||
|
||||
if (type == 0) {
|
||||
/* Preplot information – sail line points
|
||||
@@ -40,7 +38,7 @@ function bundle (json, opts = {}) {
|
||||
|
||||
return encode.sequential(json, el => el.sailline, el => el.point, deltas, values, type)
|
||||
|
||||
} if (type == 1) {
|
||||
} else if (type == 1) {
|
||||
/* Preplot information – source line points
|
||||
*
|
||||
* elem 0: Float32Array Longitude
|
||||
@@ -74,7 +72,6 @@ function bundle (json, opts = {}) {
|
||||
type: Uint16Array
|
||||
});
|
||||
|
||||
console.log("JSON", json[0]);
|
||||
return encode.sequential(json, el => el.line, el => el.point, deltas, values, type)
|
||||
|
||||
} else if (type == 2) {
|
||||
@@ -222,9 +219,6 @@ function bundle (json, opts = {}) {
|
||||
type: Uint8Array
|
||||
});
|
||||
|
||||
console.log("DELTAS", deltas);
|
||||
console.log("VALUES", values);
|
||||
|
||||
return encode.sequential(json, el => el.sequence, el => el.point, deltas, values, type)
|
||||
} else if (type == 3) {
|
||||
/* Final positions and raw vs final errors:
|
||||
@@ -279,6 +273,113 @@ function bundle (json, opts = {}) {
|
||||
});
|
||||
|
||||
return encode.sequential(json, el => el.sequence, el => el.point, deltas, values, type)
|
||||
} else if (type == 4) {
|
||||
/* Bare final positions
|
||||
*
|
||||
* Δelem 0: Sequence no. (Uint16Array, Uint8Array)
|
||||
* elem 0‒1: Float32Array, Float32Array – Final positions (x, y)
|
||||
*
|
||||
*/
|
||||
|
||||
deltas.push({
|
||||
key: el => el[2],
|
||||
baseType: Uint16Array,
|
||||
incrType: Int8Array
|
||||
});
|
||||
|
||||
values.push({
|
||||
key: el => el[3],
|
||||
type: Float32Array
|
||||
});
|
||||
|
||||
values.push({
|
||||
key: el => el[4],
|
||||
type: Float32Array
|
||||
});
|
||||
|
||||
return encode.sequential(json, el => el[0], el => el[1], deltas, values, type)
|
||||
} else if (type == 0xa) {
|
||||
/* 4D comparison data:
|
||||
*
|
||||
* elem0: i differences
|
||||
* elem1: j differences
|
||||
*
|
||||
* Note that line/point may not be unique.
|
||||
*
|
||||
*/
|
||||
|
||||
/*
|
||||
deltas.push({
|
||||
key: el => el.baseTStamp,
|
||||
baseType: BigUint64Array,
|
||||
incrType: Int32Array
|
||||
});
|
||||
|
||||
deltas.push({
|
||||
key: el => el.monTStamp,
|
||||
baseType: BigUint64Array,
|
||||
incrType: Int32Array
|
||||
})
|
||||
*/
|
||||
|
||||
values.push({
|
||||
key: el => el[2],
|
||||
type: Float32Array
|
||||
});
|
||||
|
||||
values.push({
|
||||
key: el => el[3],
|
||||
type: Float32Array
|
||||
});
|
||||
|
||||
/*
|
||||
values.push({
|
||||
key: el => el.baseSeq,
|
||||
type: Uint16Array
|
||||
});
|
||||
|
||||
values.push({
|
||||
key: el => el.monSeq,
|
||||
type: Uint16Array
|
||||
});
|
||||
*/
|
||||
|
||||
return encode.sequential(json, el => el[0], el => el[1], deltas, values, type)
|
||||
} else if (type == 0xc) {
|
||||
/* 4D comparison data (reduced sample)
|
||||
*
|
||||
* Input is comparison records, i.e.:
|
||||
* [ [ line, point, δi, δj ], … ]
|
||||
*
|
||||
* elem0: line
|
||||
* elem1: point
|
||||
* elem2: δi
|
||||
* elem3: δj
|
||||
*
|
||||
* Note that the chunk's `i` and `j` values are not used
|
||||
*/
|
||||
|
||||
values.push({
|
||||
key: el => el[0],
|
||||
type: Uint16Array
|
||||
});
|
||||
|
||||
values.push({
|
||||
key: el => el[1],
|
||||
type: Uint16Array
|
||||
});
|
||||
|
||||
values.push({
|
||||
key: el => el[2],
|
||||
type: Float32Array
|
||||
});
|
||||
|
||||
values.push({
|
||||
key: el => el[3],
|
||||
type: Float32Array
|
||||
});
|
||||
|
||||
return encode.sequential(json, el => 0, el => 0, deltas, values, type)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
429
lib/www/server/lib/comparisons/geometric-differences.js
Normal file
429
lib/www/server/lib/comparisons/geometric-differences.js
Normal file
@@ -0,0 +1,429 @@
|
||||
const d3a = require('d3-array');
|
||||
const { DougalBinaryBundle } = require('@dougal/binary');
|
||||
const { pool, setSurvey } = require('../db/connection');
|
||||
const db = require('../db');
|
||||
const { bundle } = require('../binary/bundle');
|
||||
const setops = require('../utils/setops');
|
||||
const { ijRMS, combinations, computeSample } = require('./utils');
|
||||
const { computePCA } = require('./pca');
|
||||
const { ERROR, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
|
||||
async function fetchErrors (pid) {
|
||||
|
||||
const client = await setSurvey(pid);
|
||||
|
||||
try {
|
||||
const text = `
|
||||
SELECT
|
||||
fs.line, fs.point,
|
||||
ij_error(fs.line::double precision, fs.point::double precision, fs.geometry)::json AS errorfinal
|
||||
FROM
|
||||
final_shots fs
|
||||
ORDER BY fs.line, fs.point;
|
||||
`;
|
||||
|
||||
const res = await client.query(text);
|
||||
|
||||
return res.rows.map( row =>
|
||||
[row.line, row.point, row.errorfinal.coordinates[0], row.errorfinal.coordinates[1]]
|
||||
);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async function groupTimestamps (groupName) {
|
||||
const projects = await groups()?.[groupName];
|
||||
if (projects?.length) {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async function groups () {
|
||||
const projects = await db.project.get({timestamps: true});
|
||||
const groupNames = [
|
||||
...projects
|
||||
.reduce( (acc, cur) => acc.add(...cur.groups), new Set() )
|
||||
].filter( i => !!i );
|
||||
|
||||
return Object.fromEntries(groupNames.map( g => [g, projects.filter( p => p.groups.includes(g) )] ));
|
||||
}
|
||||
|
||||
function geometric_differences (baseline, monitor) {
|
||||
|
||||
if (!baseline || !baseline.length) {
|
||||
throw new Error("No baseline data");
|
||||
}
|
||||
|
||||
if (!monitor || !monitor.length) {
|
||||
throw new Error("No monitor data");
|
||||
}
|
||||
|
||||
const comparison = []; // An array of { line, point, εi, εj }; line + point may be repeated
|
||||
|
||||
for (const bp of baseline) {
|
||||
const monitor_points = monitor.filter( mp => mp[0] === bp[0] && mp[1] === bp[1] );
|
||||
|
||||
if (!monitor_points.length) {
|
||||
// console.log(`No match for L${bp[0]} P${bp[1]}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const mp of monitor_points) {
|
||||
const εi = mp[2] - bp[2], εj = mp[3] - bp[3];
|
||||
comparison.push([bp[0], bp[1], εi, εj]);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return comparison;
|
||||
}
|
||||
|
||||
async function compare (baselineProjectID, monitorProjectID, infoObj) {
|
||||
console.log("Getting baseline", baselineProjectID);
|
||||
const baselineData = await fetchErrors(baselineProjectID);
|
||||
console.log("Getting monitor", monitorProjectID);
|
||||
const monitorData = await fetchErrors(monitorProjectID);
|
||||
console.log("Comparing");
|
||||
|
||||
const comparison = geometric_differences(baselineData, monitorData);
|
||||
|
||||
if (infoObj instanceof Object) {
|
||||
const baselineIJ = baselineData.map(i => i.slice(0,2));
|
||||
const monitorIJ = monitorData.map(i => i.slice(0,2));
|
||||
|
||||
infoObj.compared = comparison.length;
|
||||
infoObj.baselineLength = baselineData.length;
|
||||
infoObj.monitorLength = monitorData.length;
|
||||
infoObj.baselineUniqueLength = setops.unique(baselineIJ).length;
|
||||
infoObj.monitorUniqueLength = setops.unique(monitorIJ).length;
|
||||
infoObj.common = setops.intersection(baselineIJ, monitorIJ).length;
|
||||
}
|
||||
|
||||
return comparison;
|
||||
}
|
||||
|
||||
|
||||
async function save (baselineProjectID, monitorProjectID, bundle, meta) {
|
||||
const info = {};
|
||||
if (!bundle) {
|
||||
const comparison = await compare(baselineProjectID, monitorProjectID, info);
|
||||
if (comparison.length) {
|
||||
bundle = asBundle(comparison);
|
||||
} else {
|
||||
console.warn(`No matching points between ${baselineProjectID} and ${monitorProjectID}`);
|
||||
return;
|
||||
}
|
||||
} else if (!(bundle instanceof DougalBinaryBundle)) {
|
||||
throw new Error("Illegal data: `bundle` must of null or of type DougalBinaryBundle");
|
||||
}
|
||||
|
||||
if (!bundle.byteLength) {
|
||||
console.warn(`Empty comparison results between ${baselineProjectID} and ${monitorProjectID}. Refusing to store`);
|
||||
return;
|
||||
}
|
||||
|
||||
meta = {tstamp: (new Date()), ...info, ...stats(bundle), ...meta};
|
||||
|
||||
console.log("Storing in database");
|
||||
const client = await pool.connect();
|
||||
|
||||
try {
|
||||
const text = `
|
||||
INSERT INTO comparisons.comparisons
|
||||
(type, baseline_pid, monitor_pid, data, meta)
|
||||
VALUES ('geometric_difference', $1, $2, $3, $4)
|
||||
ON CONFLICT (type, baseline_pid, monitor_pid)
|
||||
DO UPDATE SET
|
||||
data = EXCLUDED.data,
|
||||
meta = EXCLUDED.meta;
|
||||
`;
|
||||
|
||||
const values = [ baselineProjectID, monitorProjectID, Buffer.from(bundle), meta ];
|
||||
const res = await client.query(text, values);
|
||||
return res.rowCount;
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async function get (baselineProjectID, monitorProjectID, type = 'geometric_difference') {
|
||||
|
||||
const client = await pool.connect();
|
||||
|
||||
try {
|
||||
|
||||
const text = `
|
||||
SELECT data, meta
|
||||
FROM comparisons.comparisons
|
||||
WHERE type = $3 AND baseline_pid = $1 AND monitor_pid = $2;
|
||||
`;
|
||||
|
||||
const values = [ baselineProjectID, monitorProjectID, type ];
|
||||
const res = await client.query(text, values);
|
||||
if (!res.rows.length) {
|
||||
console.log("Comparison not found in database");
|
||||
return;
|
||||
}
|
||||
|
||||
const { data, meta } = res.rows[0];
|
||||
return {
|
||||
data: DougalBinaryBundle.clone(data),
|
||||
meta
|
||||
};
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async function getSample (baselineProjectID, monitorProjectID) {
|
||||
return await get(baselineProjectID, monitorProjectID, 'geometric_difference_sample');
|
||||
}
|
||||
|
||||
|
||||
async function remove (baselineProjectID, monitorProjectID) {
|
||||
const client = await pool.connect();
|
||||
|
||||
try {
|
||||
const text = `
|
||||
DELETE
|
||||
FROM comparisons.comparisons
|
||||
WHERE
|
||||
(type = 'geometric_difference' OR type = 'geometric_difference_sample')
|
||||
AND baseline_pid = $1
|
||||
AND monitor_pid = $2;
|
||||
`;
|
||||
|
||||
const values = [ baselineProjectID, monitorProjectID ];
|
||||
|
||||
const res = await client.query(text, values);
|
||||
return res.rowCount;
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
|
||||
function stats (comparison) {
|
||||
let i, j, δi, δj;
|
||||
|
||||
if (comparison instanceof DougalBinaryBundle) {
|
||||
console.log("Computing stats");
|
||||
const udv = comparison.chunks()[0]?.udv;
|
||||
|
||||
if (!udv) {
|
||||
console.error("Could not determine udv from first chunk");
|
||||
console.log(comparison.chunks());
|
||||
return;
|
||||
}
|
||||
|
||||
let records;
|
||||
|
||||
if (udv == 0xa) {
|
||||
records = comparison.records;
|
||||
|
||||
// Transpose the records
|
||||
[ i, j, δi, δj ] = Array.from({ length: 4 }, (_, i) => records.map(row => row[i]));
|
||||
} else if (udv == 0xc) {
|
||||
records = comparison.records;
|
||||
let _;
|
||||
[ _, _, i, j, δi, δj ] = Array.from({ length: 6 }, (_, i) => records.map(row => row[i]));
|
||||
} else {
|
||||
throw new Error(`Unrecognised DougalBinaryBundle User Defined Value: ${udv}`);
|
||||
}
|
||||
|
||||
return {
|
||||
length: records.length,
|
||||
μ: [ d3a.mean(δi), d3a.mean(δj) ],
|
||||
σ: [ d3a.deviation(δi), d3a.deviation(δj) ],
|
||||
//rms: ijRMS(δi, δj),
|
||||
...computePCA(records)
|
||||
}
|
||||
} else if (Array.isArray(comparison)) {
|
||||
if (Array.isArray(comparison[0])) {
|
||||
return stats(asBundle(comparison, {type: 0xc}));
|
||||
} else {
|
||||
// Assume object
|
||||
return stats(asBundle(comparison));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function sortFn (a, b) {
|
||||
if (a.line == b.line) {
|
||||
if (a.point == b.point) {
|
||||
return a.baseTStamp - b.baseTStamp;
|
||||
} else {
|
||||
return a.point - b.point;
|
||||
}
|
||||
} else {
|
||||
return a.line - b.line;
|
||||
}
|
||||
}
|
||||
|
||||
function asBundle (comparison, opts = {type: 0x0a}) {
|
||||
return DougalBinaryBundle.clone(bundle(comparison, opts));
|
||||
}
|
||||
|
||||
function fromBundle (bundle) {
|
||||
if (!(bundle instanceof DougalBinaryBundle)) {
|
||||
bundle = DougalBinaryBundle.clone(bundle);
|
||||
}
|
||||
|
||||
const json = [];
|
||||
for (const record of bundle) {
|
||||
record.shift();
|
||||
json.push(record);
|
||||
}
|
||||
return json;
|
||||
}
|
||||
|
||||
async function saveGroup (group, opts = {}) {
|
||||
if (group == null) {
|
||||
// Save everything
|
||||
const g = await groups();
|
||||
for (const group of Object.values(g)) {
|
||||
await saveGroup(group)
|
||||
}
|
||||
} if (typeof group === "string") {
|
||||
// This is a group name
|
||||
const g = await groups();
|
||||
group = groups[g];
|
||||
}
|
||||
|
||||
if (Array.isArray(group)) {
|
||||
const pids = group.map( i => i.pid ).sort();
|
||||
|
||||
for (const [ baselineProjectID, monitorProjectID ] of combinations(pids, 2)) {
|
||||
try {
|
||||
if (!opts.overwrite) {
|
||||
const exists = await get(baselineProjectID, monitorProjectID);
|
||||
if (exists) {
|
||||
DEBUG("Not overwriting existing comparison between %s and %s. Skipping", baselineProjectID, monitorProjectID);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
await save(baselineProjectID, monitorProjectID);
|
||||
DEBUG("Saved comparison between %s and %s", baselineProjectID, monitorProjectID);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
ERROR("Error saving comparison between %s and %s", baselineProjectID, monitorProjectID);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async function getGroup (groupName, opts = {}) {
|
||||
|
||||
const client = await pool.connect();
|
||||
|
||||
try {
|
||||
|
||||
if (groupName) {
|
||||
|
||||
const group = (await groups())?.[groupName]?.map( i => i.pid)?.sort();
|
||||
|
||||
if (!group?.length || group?.length < 2) return;
|
||||
|
||||
|
||||
const pairs = combinations(group, 2);
|
||||
const flatValues = pairs.flat();
|
||||
const placeholders = [];
|
||||
for (let i = 0; i < pairs.length; i++) {
|
||||
placeholders.push(`($${i * 2 + 1}, $${i * 2 + 2})`);
|
||||
}
|
||||
const inClause = placeholders.join(',');
|
||||
const selectFields = opts.returnData ? 'data, meta' : 'meta';
|
||||
|
||||
const text = `
|
||||
SELECT baseline_pid, monitor_pid, ${selectFields}
|
||||
FROM comparisons.comparisons
|
||||
WHERE type = 'geometric_difference'
|
||||
AND (baseline_pid, monitor_pid) IN (VALUES ${inClause})
|
||||
ORDER BY baseline_pid, monitor_pid
|
||||
`;
|
||||
|
||||
if (!placeholders) {
|
||||
console.log("No pairs found in group");
|
||||
return [];
|
||||
}
|
||||
|
||||
const res = await client.query(text, flatValues);
|
||||
if (!res.rows.length) {
|
||||
console.log("Comparison not found in database");
|
||||
return;
|
||||
}
|
||||
|
||||
if (opts.returnData) {
|
||||
return res.rows.map( row => ({
|
||||
...row,
|
||||
data: DougalBinaryBundle.clone(row.data),
|
||||
}));
|
||||
} else {
|
||||
return res.rows;
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
const selectFields = opts.returnData ? 'data, meta' : 'meta';
|
||||
|
||||
const text = `
|
||||
SELECT baseline_pid, monitor_pid, ${selectFields}
|
||||
FROM comparisons.comparisons
|
||||
WHERE type = 'geometric_difference'
|
||||
ORDER BY baseline_pid, monitor_pid
|
||||
`;
|
||||
|
||||
const res = await client.query(text);
|
||||
if (!res.rows.length) {
|
||||
console.log("Comparison not found in database");
|
||||
return;
|
||||
}
|
||||
|
||||
if (opts.returnData) {
|
||||
return res.rows.map( row => ({
|
||||
...row,
|
||||
data: DougalBinaryBundle.clone(row.data),
|
||||
}));
|
||||
} else {
|
||||
return res.rows;
|
||||
}
|
||||
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
groups,
|
||||
fetchErrors,
|
||||
compare,
|
||||
computeSample,
|
||||
get,
|
||||
save,
|
||||
getSample,
|
||||
saveGroup,
|
||||
getGroup,
|
||||
remove,
|
||||
stats,
|
||||
asBundle,
|
||||
fromBundle
|
||||
};
|
||||
4
lib/www/server/lib/comparisons/index.js
Normal file
4
lib/www/server/lib/comparisons/index.js
Normal file
@@ -0,0 +1,4 @@
|
||||
|
||||
module.exports = {
|
||||
...require('./geometric-differences')
|
||||
}
|
||||
83
lib/www/server/lib/comparisons/pca.js
Normal file
83
lib/www/server/lib/comparisons/pca.js
Normal file
@@ -0,0 +1,83 @@
|
||||
const math = require('mathjs');
|
||||
|
||||
/**
|
||||
* Compute PCA (eigenvectors and eigenvalues) for deviation data to assess geometric repeatability.
|
||||
* @param {Array<Array<number>>} deviations - Array of [point, line, i_deviation, j_deviation]
|
||||
* @returns {Object} - { eigenvalues, eigenvectors, rms, anisotropy, primaryDirection }
|
||||
*/
|
||||
function computePCA(deviations) {
|
||||
// Extract i_deviation and j_deviation
|
||||
const deviationMatrix = deviations.map(row => [row[2], row[3]]);
|
||||
|
||||
// Convert to mathjs matrix
|
||||
const D = math.matrix(deviationMatrix);
|
||||
|
||||
// Compute mean for centering (1 x 2 matrix)
|
||||
const mean = math.mean(D, 0);
|
||||
|
||||
// Manually repeat-mean to match D's shape (n x 2)
|
||||
const n = deviationMatrix.length;
|
||||
const meanArr = mean.toArray();
|
||||
const meanRepeated = math.matrix(
|
||||
Array(n).fill().map(() => [meanArr[0], meanArr[1]])
|
||||
);
|
||||
|
||||
// Center the data
|
||||
const centered = math.subtract(D, meanRepeated);
|
||||
|
||||
// Compute covariance matrix: (1/(n-1)) * (D_centered^T * D_centered)
|
||||
const covMatrix = math.multiply(
|
||||
math.multiply(1 / (n - 1), math.transpose(centered)),
|
||||
centered
|
||||
);
|
||||
|
||||
// Perform eigen decomposition
|
||||
const result = math.eigs(covMatrix);
|
||||
let eigenvalues = result.values;
|
||||
const evObjs = result.eigenvectors;
|
||||
|
||||
// Convert eigenvalues to array if it's a matrix
|
||||
eigenvalues = Array.isArray(eigenvalues) ? eigenvalues : eigenvalues.toArray();
|
||||
|
||||
// Create pairs and convert vector to array if necessary
|
||||
const pairs = eigenvalues.map((val, i) => {
|
||||
let vec = evObjs[i].vector;
|
||||
if (vec.toArray) vec = vec.toArray();
|
||||
return { val, vec };
|
||||
});
|
||||
|
||||
// Sort by descending eigenvalues
|
||||
pairs.sort((a, b) => b.val - a.val);
|
||||
|
||||
// Sorted eigenvalues
|
||||
const sortedEigenvalues = pairs.map(p => p.val);
|
||||
|
||||
// Build eigenvector matrix: rows as components, columns as eigenvectors
|
||||
const dimension = pairs[0].vec.length; // e.g., 2
|
||||
const evecRows = [];
|
||||
for (let comp = 0; comp < dimension; comp++) {
|
||||
evecRows.push(pairs.map(p => p.vec[comp]));
|
||||
}
|
||||
const sortedEigenvectors = math.matrix(evecRows);
|
||||
|
||||
// Compute RMS errors along principal axes
|
||||
const rms = sortedEigenvalues.map(val => Math.sqrt(Math.max(val, 0)));
|
||||
|
||||
// Compute anisotropy (ratio of major to minor axis variance)
|
||||
const anisotropy = sortedEigenvalues[0] / (sortedEigenvalues[1] || 1); // Avoid division by zero
|
||||
|
||||
// Primary direction (angle in degrees of major eigenvector)
|
||||
const primaryVector = sortedEigenvectors.subset(math.index([0, 1], 0)).toArray();
|
||||
const primaryDirection = Math.atan2(primaryVector[1], primaryVector[0]) * 180 / Math.PI;
|
||||
|
||||
return {
|
||||
eigenvalues: sortedEigenvalues,
|
||||
eigenvectors: sortedEigenvectors.toArray(),
|
||||
rms: rms, // RMS errors along major/minor axes
|
||||
anisotropy: anisotropy, // Ratio of variances
|
||||
primaryDirection: primaryDirection // Angle of major axis (degrees)
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
module.exports = { computePCA };
|
||||
310
lib/www/server/lib/comparisons/utils.js
Normal file
310
lib/www/server/lib/comparisons/utils.js
Normal file
@@ -0,0 +1,310 @@
|
||||
const d3 = require('d3-array');
|
||||
|
||||
// Function to calculate the root mean square (RMS) of position deviations
|
||||
// This computes the RMS of the Euclidean distances: sqrt( (1/n) * sum(δi² + δj²) )
|
||||
// Assumes deviations are already centered (mean deviation ~0); if normalization by std dev or range is needed, adjust accordingly
|
||||
function ijRMS(δi, δj) {
|
||||
if (!δi.length || !δj.length) return 0;
|
||||
|
||||
if (δi.length != δj.length) {
|
||||
console.warn(`δi and δj have different lengths!`);
|
||||
}
|
||||
|
||||
let sumSquares = 0;
|
||||
const n = Math.min(δi.length, δj.length);
|
||||
|
||||
for (let i=0; i < n; i++) {
|
||||
sumSquares += (δi[i] * δi[i]) + (δj[i] * δj[i]);
|
||||
}
|
||||
|
||||
const meanSquare = sumSquares / n;
|
||||
const rms = Math.sqrt(meanSquare);
|
||||
|
||||
return rms;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Performs stratified sampling on an array of [line, point, δi, δj] data points.
|
||||
* Groups by line and samples proportionally to preserve shape and spread.
|
||||
*
|
||||
* @param {Array<Array<number>>} data - Input data: [[line, point, δi, δj], ...]
|
||||
* @param {number} sampleSize - Target number of samples (e.g., 2000)
|
||||
* @returns {Array<Array<number>>} Sampled data in same format
|
||||
*/
|
||||
function old_stratifiedSample(data, sampleSize) {
|
||||
if (!Array.isArray(data) || data.length === 0) return [];
|
||||
if (!Number.isInteger(sampleSize) || sampleSize <= 0) {
|
||||
throw new Error('sampleSize must be a positive integer');
|
||||
}
|
||||
|
||||
// Group data by line (first element)
|
||||
const grouped = d3.group(data, d => d[0]);
|
||||
const totalSize = data.length;
|
||||
const sampled = [];
|
||||
|
||||
// Ensure sampleSize doesn't exceed data size
|
||||
const effectiveSampleSize = Math.min(sampleSize, totalSize);
|
||||
|
||||
// Iterate over each line group
|
||||
for (const [line, group] of grouped) {
|
||||
// Calculate proportional sample size for this group
|
||||
const groupSize = group.length;
|
||||
const groupSampleSize = Math.max(1, Math.round((groupSize / totalSize) * effectiveSampleSize));
|
||||
|
||||
// Shuffle group and take first N elements
|
||||
const shuffled = d3.shuffle([...group]);
|
||||
sampled.push(...shuffled.slice(0, groupSampleSize));
|
||||
}
|
||||
|
||||
// If sampled size is slightly off due to rounding, adjust
|
||||
if (sampled.length > effectiveSampleSize) {
|
||||
return d3.shuffle(sampled).slice(0, effectiveSampleSize);
|
||||
} else if (sampled.length < effectiveSampleSize) {
|
||||
// Pad with random samples from entire dataset if needed
|
||||
const remaining = effectiveSampleSize - sampled.length;
|
||||
const additional = d3.shuffle(data.filter(d => !sampled.includes(d))).slice(0, remaining);
|
||||
sampled.push(...additional);
|
||||
}
|
||||
|
||||
return sampled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs stratified sampling on an array of [line, point, δi, δj] data points.
|
||||
* Stratifies by line and δi quantiles to preserve shape and spread, with outlier control.
|
||||
*
|
||||
* @param {Array<Array<number>>} data - Input data: [[line, point, δi, δj], ...]
|
||||
* @param {number} sampleSize - Target number of samples (e.g., 2000)
|
||||
* @param {number} [binsPerLine=10] - Number of δi quantile bins per line
|
||||
* @returns {Array<Array<number>>} Sampled data in same format
|
||||
*/
|
||||
function stratifiedSample(data, sampleSize, binsPerLine = 10) {
|
||||
if (!Array.isArray(data) || data.length === 0) return [];
|
||||
if (!Number.isInteger(sampleSize) || sampleSize <= 0) {
|
||||
throw new Error('sampleSize must be a positive integer');
|
||||
}
|
||||
if (!Number.isInteger(binsPerLine) || binsPerLine <= 0) {
|
||||
throw new Error('binsPerLine must be a positive integer');
|
||||
}
|
||||
|
||||
const totalSize = data.length;
|
||||
const effectiveSampleSize = Math.min(sampleSize, totalSize);
|
||||
const sampled = [];
|
||||
|
||||
// Group by line
|
||||
const groupedByLine = d3.group(data, d => d[0]);
|
||||
|
||||
// Compute population stats for validation
|
||||
const populationStats = computeStats(data);
|
||||
|
||||
// Iterate over each line
|
||||
for (const [line, group] of groupedByLine) {
|
||||
const groupSize = group.length;
|
||||
const lineSampleSize = Math.max(1, Math.round((groupSize / totalSize) * effectiveSampleSize));
|
||||
|
||||
// Create quantile-based bins for δi
|
||||
const δiValues = group.map(d => d[2]).sort(d3.ascending);
|
||||
const quantiles = d3.range(0, binsPerLine + 1).map(i => d3.quantile(δiValues, i / binsPerLine));
|
||||
const binnedData = group.map(d => {
|
||||
const δi = d[2];
|
||||
let binIndex = 0;
|
||||
for (let i = 0; i < binsPerLine; i++) {
|
||||
if (δi >= quantiles[i] && δi < quantiles[i + 1]) {
|
||||
binIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return { data: d, bin: binIndex };
|
||||
});
|
||||
const groupedByBin = d3.group(binnedData, d => d.bin);
|
||||
|
||||
// Allocate samples across bins, inversely weighted by density to control outliers
|
||||
const binSampleSizes = new Map();
|
||||
let remainingLineSamples = lineSampleSize;
|
||||
const binCounts = Array(binsPerLine).fill(0);
|
||||
for (const [bin, binGroup] of groupedByBin) {
|
||||
binCounts[bin] = binGroup.length;
|
||||
}
|
||||
const maxBinCount = d3.max(binCounts);
|
||||
for (const [bin, binGroup] of groupedByBin) {
|
||||
const binSize = binGroup.length;
|
||||
// Inverse weighting: smaller bins (outliers) get fewer samples
|
||||
const weight = binSize > 0 ? Math.max(0.1, 1 - (binSize / maxBinCount) * 0.5) : 1;
|
||||
const binSampleSize = Math.max(1, Math.round(lineSampleSize * (binSize / groupSize) * weight));
|
||||
binSampleSizes.set(bin, Math.min(binSampleSize, binSize));
|
||||
remainingLineSamples -= binSampleSizes.get(bin);
|
||||
}
|
||||
|
||||
// Distribute remaining samples
|
||||
if (remainingLineSamples > 0) {
|
||||
const nonEmptyBins = Array.from(groupedByBin.keys());
|
||||
for (let i = 0; i < remainingLineSamples && nonEmptyBins.length > 0; i++) {
|
||||
const bin = nonEmptyBins[i % nonEmptyBins.length];
|
||||
binSampleSizes.set(bin, binSampleSizes.get(bin) + 1);
|
||||
}
|
||||
}
|
||||
|
||||
// Sample from each bin
|
||||
for (const [bin, binGroup] of groupedByBin) {
|
||||
const samples = d3.shuffle([...binGroup]).slice(0, binSampleSizes.get(bin)).map(s => s.data);
|
||||
sampled.push(...samples);
|
||||
}
|
||||
}
|
||||
|
||||
// Adjust sample size
|
||||
let finalSample = sampled;
|
||||
if (sampled.length > effectiveSampleSize) {
|
||||
finalSample = d3.shuffle(sampled).slice(0, effectiveSampleSize);
|
||||
} else if (sampled.length < effectiveSampleSize) {
|
||||
const remaining = effectiveSampleSize - sampled.length;
|
||||
const additional = d3.shuffle(data.filter(d => !sampled.includes(d))).slice(0, remaining);
|
||||
finalSample = [...sampled, ...additional];
|
||||
}
|
||||
|
||||
// Validate and adjust if stats are off
|
||||
const sampleStats = computeStats(finalSample);
|
||||
const statTolerance = { μ: 0.1, σ: 0.2 }; // Allowable relative deviation
|
||||
const needsAdjustment =
|
||||
Math.abs(sampleStats.μ[0] - populationStats.μ[0]) / populationStats.μ[0] > statTolerance.μ ||
|
||||
Math.abs(sampleStats.μ[1] - populationStats.μ[1]) / populationStats.μ[1] > statTolerance.μ ||
|
||||
Math.abs(sampleStats.σ[0] - populationStats.σ[0]) / populationStats.σ[0] > statTolerance.σ ||
|
||||
Math.abs(sampleStats.σ[1] - populationStats.σ[1]) / populationStats.σ[1] > statTolerance.σ;
|
||||
|
||||
if (needsAdjustment) {
|
||||
// Add points from underrepresented regions
|
||||
const δiSample = finalSample.map(d => d[2]);
|
||||
const δiPopulation = data.map(d => d[2]);
|
||||
const quantiles = d3.range(0, binsPerLine + 1).map(i => d3.quantile(δiPopulation, i / binsPerLine));
|
||||
const sampleBins = d3.histogram().domain(d3.extent(δiPopulation)).thresholds(quantiles)(δiSample);
|
||||
const populationBins = d3.histogram().domain(d3.extent(δiPopulation)).thresholds(quantiles)(δiPopulation);
|
||||
const underSampledBins = sampleBins
|
||||
.map((b, i) => ({ bin: i, diff: populationBins[i].length / totalSize - b.length / finalSample.length }))
|
||||
.filter(b => b.diff > 0.1); // Significant under-sampling
|
||||
|
||||
if (underSampledBins.length > 0) {
|
||||
const additionalSamples = [];
|
||||
for (const { bin } of underSampledBins) {
|
||||
const binData = data.filter(d => d[2] >= quantiles[bin] && d[2] < quantiles[bin + 1] && !finalSample.includes(d));
|
||||
const needed = Math.round((underSampledBins[0].diff * effectiveSampleSize) / 2);
|
||||
additionalSamples.push(...d3.shuffle(binData).slice(0, needed));
|
||||
}
|
||||
finalSample = d3.shuffle([...finalSample, ...additionalSamples]).slice(0, effectiveSampleSize);
|
||||
}
|
||||
}
|
||||
|
||||
return finalSample;
|
||||
}
|
||||
|
||||
function decimate (data, decimationCount = 20) {
|
||||
return data.filter( (row, index) => (index % decimationCount) == 0 );
|
||||
}
|
||||
|
||||
function computeSample (data, opts = {}) {
|
||||
const DEFAULT_SAMPLE_SIZE = 2000;
|
||||
let sample;
|
||||
|
||||
if (opts.decimate === true) {
|
||||
if (opts.sampleSize > 0) {
|
||||
sample = decimate(data.records, Math.floor(data.records.length / opts.sampleSize));
|
||||
} else {
|
||||
sample = decimate(data.records, Math.floor(data.records.length / DEFAULT_SAMPLE_SIZE));
|
||||
}
|
||||
} else if (opts.decimate > 0) {
|
||||
sample = decimate(data.records, opts.decimate);
|
||||
} else if (opts.sampleSize) {
|
||||
sample = stratifiedSample(data.records, opt.sampleSize);
|
||||
} else {
|
||||
sample = stratifiedSample(data.records, DEFAULT_SAMPLE_SIZE);
|
||||
}
|
||||
|
||||
return sample;
|
||||
}
|
||||
|
||||
// Optional: Utility to compute stats for validation
|
||||
function computeStats(data) {
|
||||
const δi = data.map(d => d[2]);
|
||||
const δj = data.map(d => d[3]);
|
||||
const rms = Math.sqrt(d3.mean(data, d => d[2] ** 2 + d[3] ** 2));
|
||||
return {
|
||||
l: data.length,
|
||||
μ: [d3.mean(δi), d3.mean(δj)],
|
||||
σ: [d3.deviation(δi), d3.deviation(δj)],
|
||||
rms
|
||||
};
|
||||
}
|
||||
|
||||
function centre (data) {
|
||||
const stats = computeStats(data);
|
||||
|
||||
return data.map( row => [row[0], row[1], row[2]-stats.μ[0], row[3]-stats.μ[1]] )
|
||||
}
|
||||
|
||||
function outliers (data, sd=1.96) {
|
||||
const stats = computeStats(data);
|
||||
|
||||
function fn ([l, p, i, j]) {
|
||||
return (i - stats.μ[0]) > stats.σ[0]*sd ||
|
||||
(j - stats.μ[1]) > stats.σ[1]*sd;
|
||||
}
|
||||
|
||||
return data.filter(fn)
|
||||
}
|
||||
|
||||
function inliers (data, sd=1.96) {
|
||||
const stats = computeStats(data);
|
||||
|
||||
function fn ([l, p, i, j]) {
|
||||
return (i - stats.μ[0]) <= stats.σ[0]*sd &&
|
||||
(j - stats.μ[1]) <= stats.σ[1]*sd;
|
||||
}
|
||||
|
||||
return data.filter(fn)
|
||||
}
|
||||
|
||||
function difference (a, b) {
|
||||
const obj = Array.isArray(a) ? [] : {};
|
||||
for (const k in a) {
|
||||
const v0 = a[k];
|
||||
const v1 = b[k]
|
||||
if (v0 instanceof Object && v1 instanceof Object) {
|
||||
obj[k] = difference (v0, v1);
|
||||
} else if (!isNaN(Number(v0)) && !isNaN(Number(v1))) {
|
||||
obj[k] = v1 - v0;
|
||||
}
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
function combinations (a, n) {
|
||||
const results = [];
|
||||
|
||||
function combine(current, start) {
|
||||
if (current.length === n) {
|
||||
results.push([...current]);
|
||||
return;
|
||||
}
|
||||
for (let i = start; i < a.length; i++) {
|
||||
current.push(a[i]);
|
||||
combine(current, i + 1);
|
||||
current.pop();
|
||||
}
|
||||
}
|
||||
|
||||
combine([], 0);
|
||||
return results;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
combinations,
|
||||
centre,
|
||||
ijRMS,
|
||||
computeStats,
|
||||
computeSample,
|
||||
stratifiedSample,
|
||||
old_stratifiedSample,
|
||||
decimate,
|
||||
difference,
|
||||
outliers,
|
||||
inliers
|
||||
}
|
||||
@@ -10,5 +10,6 @@ module.exports = [
|
||||
"planned_lines",
|
||||
"raw_lines", "raw_shots",
|
||||
"final_lines", "final_shots", "info",
|
||||
"queue_items"
|
||||
"queue_items",
|
||||
"comparisons",
|
||||
];
|
||||
|
||||
@@ -1,8 +1,14 @@
|
||||
const { setSurvey, pool } = require('../connection');
|
||||
|
||||
async function get () {
|
||||
async function get (opts = {}) {
|
||||
|
||||
const select = opts.timestamps
|
||||
? "last_project_update(pid) tstamp,"
|
||||
: "";
|
||||
|
||||
const text = `
|
||||
SELECT
|
||||
${select}
|
||||
pid,
|
||||
name,
|
||||
schema,
|
||||
|
||||
@@ -15,11 +15,50 @@ async function getSummary (projectId, sequence, opts = {}) {
|
||||
return res.rows[0];
|
||||
}
|
||||
|
||||
async function getPoints (projectId, sequence, opts = {}) {
|
||||
|
||||
const offset = Math.abs(opts.offset) || Math.abs((opts.page-1)*opts.itemsPerPage) || 0;
|
||||
const limit = Math.abs(opts.limit) || Math.abs(Number(opts.itemsPerPage)) || null;
|
||||
|
||||
const client = await setSurvey(projectId);
|
||||
|
||||
const restriction = sequence
|
||||
? "sequence = $3"
|
||||
: "TRUE OR $3";
|
||||
|
||||
const text = `
|
||||
SELECT line, point, sequence, st_x(ST_Transform(geometry, 4326)) longitude, st_y(ST_Transform(geometry, 4326)) latitude
|
||||
FROM final_shots
|
||||
WHERE ${restriction}
|
||||
ORDER BY sequence, point
|
||||
OFFSET $1
|
||||
LIMIT $2;
|
||||
`;
|
||||
|
||||
try {
|
||||
const res = await client.query({text, values: [offset, limit, sequence], rowMode: 'array'});
|
||||
return res.rows;
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
// throw { status: 500, message: "Internal error" };
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
async function get (projectId, sequence, opts = {}) {
|
||||
if (opts.summary) {
|
||||
return await getSummary(projectId, sequence, opts);
|
||||
}
|
||||
if (opts.type == 4) {
|
||||
// The user is request that we send just the bare details:
|
||||
// sequence, sailline, line, longitude, latitude.
|
||||
//
|
||||
// This will probably be a binary data request (though doesn't
|
||||
// need to).
|
||||
return await getPoints(projectId, sequence, opts);
|
||||
}
|
||||
|
||||
const client = await setSurvey(projectId);
|
||||
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
const Queue = require('./queue');
|
||||
|
||||
// Inspired by:
|
||||
// https://stackoverflow.com/questions/53540348/js-async-await-tasks-queue#53540586
|
||||
|
||||
class ActionsQueue extends Queue {
|
||||
|
||||
constructor (items = []) {
|
||||
super(items);
|
||||
|
||||
this.pending = false;
|
||||
}
|
||||
|
||||
enqueue (action) {
|
||||
return new Promise ((resolve, reject) => {
|
||||
super.enqueue({ action, resolve, reject });
|
||||
this.dequeue();
|
||||
});
|
||||
}
|
||||
|
||||
async dequeue () {
|
||||
|
||||
if (this.pending) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const item = super.dequeue();
|
||||
|
||||
if (!item) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
this.pending = true;
|
||||
|
||||
const result = await item.action(this);
|
||||
|
||||
this.pending = false;
|
||||
item.resolve(result);
|
||||
} catch (err) {
|
||||
this.pending = false;
|
||||
item.reject(err);
|
||||
} finally {
|
||||
this.dequeue();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = ActionsQueue;
|
||||
@@ -1,6 +0,0 @@
|
||||
|
||||
module.exports = {
|
||||
Queue: require('./queue'),
|
||||
ActionsQueue: require('./actions-queue')
|
||||
};
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
|
||||
class Queue {
|
||||
|
||||
constructor (items = []) {
|
||||
this.items = items;
|
||||
}
|
||||
|
||||
enqueue (item) {
|
||||
this.items.push(item);
|
||||
}
|
||||
|
||||
dequeue () {
|
||||
return this.items.shift();
|
||||
}
|
||||
|
||||
length () {
|
||||
return this.items.length;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = Queue;
|
||||
@@ -1,52 +1,110 @@
|
||||
// TODO Append location to PATH
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const {Builder, By, Key, until} = require('selenium-webdriver');
|
||||
const firefox = require('selenium-webdriver/firefox');
|
||||
const { Builder, By, Key, until } = require('selenium-webdriver');
|
||||
const firefox = require('selenium-webdriver/firefox');
|
||||
const { execSync } = require('child_process');
|
||||
|
||||
const geckodriverPath = path.resolve(__dirname, "geckodriver");
|
||||
|
||||
// We launch a browser instance and then start an activity timer.
|
||||
// We shut down the browser after a period of inactivity, to
|
||||
// save memory.
|
||||
// State to prevent race conditions
|
||||
let driver = null;
|
||||
let timer = null;
|
||||
let isShuttingDown = false;
|
||||
|
||||
function resetTimer () {
|
||||
clearTimeout(timer);
|
||||
timer = setTimeout(shutdown, 120000); // Yup, hardcoded to two minutes. For now anyway
|
||||
// Verify GeckoDriver exists
|
||||
if (!fs.existsSync(geckodriverPath)) {
|
||||
throw new Error(`GeckoDriver not found at ${geckodriverPath}`);
|
||||
}
|
||||
|
||||
async function launch () {
|
||||
function resetTimer() {
|
||||
clearTimeout(timer);
|
||||
timer = setTimeout(shutdown, 120000); // 2 minutes inactivity timeout
|
||||
}
|
||||
|
||||
async function launch() {
|
||||
if (isShuttingDown) {
|
||||
console.log("Shutdown in progress, waiting...");
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
return launch(); // Retry after delay
|
||||
}
|
||||
resetTimer();
|
||||
if (!driver) {
|
||||
console.log("Launching Firefox");
|
||||
const options = new firefox.Options();
|
||||
// Explicitly set headless mode and optimize for server
|
||||
options.addArguments('--headless', '--no-sandbox', '--disable-gpu');
|
||||
// Limit content processes to reduce resource usage
|
||||
options.setPreference('dom.ipc.processCount', 1);
|
||||
|
||||
const service = new firefox.ServiceBuilder(geckodriverPath);
|
||||
driver = await new Builder()
|
||||
.forBrowser('firefox')
|
||||
.setFirefoxService(new firefox.ServiceBuilder(geckodriverPath))
|
||||
.setFirefoxOptions(options.headless())
|
||||
.setFirefoxService(service)
|
||||
.setFirefoxOptions(options)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
async function shutdown () {
|
||||
if (driver) {
|
||||
async function shutdown() {
|
||||
if (driver && !isShuttingDown) {
|
||||
isShuttingDown = true;
|
||||
console.log("Shutting down Firefox");
|
||||
// This is an attempt at avoiding a race condition if someone
|
||||
// makes a call and resets the timer while the shutdown is in
|
||||
// progress.
|
||||
const d = driver;
|
||||
driver = null;
|
||||
await d.quit();
|
||||
try {
|
||||
const d = driver;
|
||||
driver = null;
|
||||
await d.quit();
|
||||
// Explicitly stop the service
|
||||
const service = d.service;
|
||||
if (service) {
|
||||
service.stop();
|
||||
}
|
||||
console.log("Firefox shutdown complete");
|
||||
} catch (error) {
|
||||
console.error("Error during shutdown:", error);
|
||||
// Forcefully kill lingering processes (Linux/Unix)
|
||||
try {
|
||||
execSync('pkill -u $USER firefox || true');
|
||||
execSync('pkill -u $USER geckodriver || true');
|
||||
console.log("Terminated lingering Firefox/GeckoDriver processes");
|
||||
} catch (killError) {
|
||||
console.error("Error killing processes:", killError);
|
||||
}
|
||||
} finally {
|
||||
isShuttingDown = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function url2pdf (url) {
|
||||
async function url2pdf(url) {
|
||||
await launch();
|
||||
await driver.get(url);
|
||||
return await driver.printPage({width: 21.0, height: 29.7});
|
||||
try {
|
||||
console.log(`Navigating to ${url}`);
|
||||
await driver.get(url);
|
||||
// Add delay to stabilize Marionette communication
|
||||
await driver.sleep(3000);
|
||||
const pdf = await driver.printPage({ width: 21.0, height: 29.7 });
|
||||
resetTimer(); // Reset timer after successful operation
|
||||
return pdf;
|
||||
} catch (error) {
|
||||
console.error("Error in url2pdf:", error);
|
||||
await shutdown(); // Force shutdown on error
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Periodically clean up orphaned processes (every 5 minutes)
|
||||
setInterval(() => {
|
||||
try {
|
||||
const firefoxCount = execSync('pgrep -c firefox || echo 0').toString().trim();
|
||||
if (parseInt(firefoxCount) > 0 && !driver) {
|
||||
console.log(`Found ${firefoxCount} orphaned Firefox processes, cleaning up...`);
|
||||
execSync('pkill -u $USER firefox || true');
|
||||
execSync('pkill -u $USER geckodriver || true');
|
||||
console.log("Cleaned up orphaned processes");
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error checking orphaned processes:", error);
|
||||
}
|
||||
}, 300000);
|
||||
|
||||
module.exports = { url2pdf };
|
||||
|
||||
52
lib/www/server/lib/utils/setops.js
Normal file
52
lib/www/server/lib/utils/setops.js
Normal file
@@ -0,0 +1,52 @@
|
||||
|
||||
function unique(arr) {
|
||||
const set = new Set(arr.map(JSON.stringify));
|
||||
return Array.from(set).map(JSON.parse);
|
||||
}
|
||||
|
||||
function duplicates(arr) {
|
||||
const seen = new Set();
|
||||
const dups = new Set();
|
||||
for (const item of arr.map(JSON.stringify)) {
|
||||
if (seen.has(item)) {
|
||||
dups.add(item);
|
||||
} else {
|
||||
seen.add(item);
|
||||
}
|
||||
}
|
||||
return Array.from(dups).map(JSON.parse);
|
||||
}
|
||||
|
||||
function union(arr1, arr2) {
|
||||
const set = new Set([...arr1, ...arr2].map(JSON.stringify));
|
||||
return Array.from(set).map(JSON.parse);
|
||||
}
|
||||
|
||||
|
||||
function intersection(arr1, arr2) {
|
||||
const set2 = new Set(arr2.map(JSON.stringify));
|
||||
return arr1.filter(item => set2.has(JSON.stringify(item)));
|
||||
}
|
||||
|
||||
function difference(arr1, arr2) {
|
||||
const set2 = new Set(arr2.map(JSON.stringify));
|
||||
return arr1.filter(item => !set2.has(JSON.stringify(item)));
|
||||
}
|
||||
|
||||
function symmetricDifference(arr1, arr2) {
|
||||
const set1 = new Set(arr1.map(JSON.stringify));
|
||||
const set2 = new Set(arr2.map(JSON.stringify));
|
||||
return [
|
||||
...arr1.filter(item => !set2.has(JSON.stringify(item))),
|
||||
...arr2.filter(item => !set1.has(JSON.stringify(item)))
|
||||
];
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
unique,
|
||||
duplicates,
|
||||
union,
|
||||
intersection,
|
||||
difference,
|
||||
symmetricDifference
|
||||
}
|
||||
@@ -16,7 +16,7 @@
|
||||
"api": "0.4.0"
|
||||
},
|
||||
"wanted": {
|
||||
"db_schema": "^0.6.0"
|
||||
"db_schema": "^0.6.6"
|
||||
}
|
||||
},
|
||||
"engines": {
|
||||
@@ -29,6 +29,7 @@
|
||||
"@dougal/binary": "file:../../modules/@dougal/binary",
|
||||
"@dougal/organisations": "file:../../modules/@dougal/organisations",
|
||||
"@dougal/user": "file:../../modules/@dougal/user",
|
||||
"async": "^3.2.6",
|
||||
"body-parser": "gitlab:aaltronav/contrib/expressjs/body-parser",
|
||||
"busboy": "^1.6.0",
|
||||
"compression": "^1.8.1",
|
||||
@@ -43,6 +44,7 @@
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"leaflet-headless": "git+https://git@gitlab.com/aaltronav/contrib/leaflet-headless.git#devel",
|
||||
"marked": "^4.0.12",
|
||||
"mathjs": "^14.6.0",
|
||||
"node-fetch": "^2.6.1",
|
||||
"nunjucks": "^3.2.3",
|
||||
"path-to-regexp": "^6.2.1",
|
||||
|
||||
130
package-lock.json
generated
130
package-lock.json
generated
@@ -1678,17 +1678,6 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"lib/www/client/source/node_modules/@babel/runtime": {
|
||||
"version": "7.23.2",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"regenerator-runtime": "^0.14.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"lib/www/client/source/node_modules/@babel/template": {
|
||||
"version": "7.27.2",
|
||||
"dev": true,
|
||||
@@ -7524,11 +7513,6 @@
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"lib/www/client/source/node_modules/regenerator-runtime": {
|
||||
"version": "0.14.0",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"lib/www/client/source/node_modules/regenerator-transform": {
|
||||
"version": "0.15.2",
|
||||
"dev": true,
|
||||
@@ -9359,6 +9343,7 @@
|
||||
"@dougal/binary": "file:../../modules/@dougal/binary",
|
||||
"@dougal/organisations": "file:../../modules/@dougal/organisations",
|
||||
"@dougal/user": "file:../../modules/@dougal/user",
|
||||
"async": "^3.2.6",
|
||||
"body-parser": "gitlab:aaltronav/contrib/expressjs/body-parser",
|
||||
"busboy": "^1.6.0",
|
||||
"compression": "^1.8.1",
|
||||
@@ -9373,6 +9358,7 @@
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"leaflet-headless": "git+https://git@gitlab.com/aaltronav/contrib/leaflet-headless.git#devel",
|
||||
"marked": "^4.0.12",
|
||||
"mathjs": "^14.6.0",
|
||||
"node-fetch": "^2.6.1",
|
||||
"nunjucks": "^3.2.3",
|
||||
"path-to-regexp": "^6.2.1",
|
||||
@@ -10526,17 +10512,6 @@
|
||||
"node": ">=6.0.0"
|
||||
}
|
||||
},
|
||||
"lib/www/server/node_modules/redoc-cli/node_modules/@babel/runtime": {
|
||||
"version": "7.16.7",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"regenerator-runtime": "^0.13.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"lib/www/server/node_modules/redoc-cli/node_modules/@babel/template": {
|
||||
"version": "7.12.13",
|
||||
"dev": true,
|
||||
@@ -12531,11 +12506,6 @@
|
||||
"url": "https://github.com/Mermade/oas-kit?sponsor=1"
|
||||
}
|
||||
},
|
||||
"lib/www/server/node_modules/redoc-cli/node_modules/regenerator-runtime": {
|
||||
"version": "0.13.9",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"lib/www/server/node_modules/redoc-cli/node_modules/require-directory": {
|
||||
"version": "2.1.1",
|
||||
"dev": true,
|
||||
@@ -13296,6 +13266,15 @@
|
||||
"node": ">=0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/runtime": {
|
||||
"version": "7.28.3",
|
||||
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.3.tgz",
|
||||
"integrity": "sha512-9uIQ10o0WGdpP6GDhXcdOJPJuDgFtIDtN/9+ArJQ2NAfAmiuhTQdzkaTGR33v43GYS2UrSA0eX2pPPHoFVvpxA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@deck.gl/aggregation-layers": {
|
||||
"version": "9.1.13",
|
||||
"resolved": "https://registry.npmjs.org/@deck.gl/aggregation-layers/-/aggregation-layers-9.1.13.tgz",
|
||||
@@ -14171,6 +14150,11 @@
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/async": {
|
||||
"version": "3.2.6",
|
||||
"resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz",
|
||||
"integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA=="
|
||||
},
|
||||
"node_modules/asynckit": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||
@@ -14383,6 +14367,19 @@
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/complex.js": {
|
||||
"version": "2.4.2",
|
||||
"resolved": "https://registry.npmjs.org/complex.js/-/complex.js-2.4.2.tgz",
|
||||
"integrity": "sha512-qtx7HRhPGSCBtGiST4/WGHuW+zeaND/6Ld+db6PbrulIB1i2Ev/2UPiqcmpQNPSyfBKraC0EOvOKCB5dGZKt3g==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
},
|
||||
"funding": {
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/rawify"
|
||||
}
|
||||
},
|
||||
"node_modules/compressible": {
|
||||
"version": "2.0.18",
|
||||
"resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz",
|
||||
@@ -15083,6 +15080,12 @@
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/escape-latex": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/escape-latex/-/escape-latex-1.2.0.tgz",
|
||||
"integrity": "sha512-nV5aVWW1K0wEiUIEdZ4erkGGH8mDxGyxSeqPzRNtWP7ataw+/olFObw7hujFWlVjNsaDFw5VZ5NzVSIqRgfTiw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/escodegen": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz",
|
||||
@@ -15199,6 +15202,19 @@
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/fraction.js": {
|
||||
"version": "5.3.1",
|
||||
"resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.1.tgz",
|
||||
"integrity": "sha512-PhqCuhSKIGbbkJ+cojHv47eEWClU71FIOhiUsYdZYTwhIzCeIN8rXeEjserTvPat5JLJChumn8chHz64WkZgTw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
},
|
||||
"funding": {
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/rawify"
|
||||
}
|
||||
},
|
||||
"node_modules/fs-minipass": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz",
|
||||
@@ -15629,6 +15645,12 @@
|
||||
"resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
|
||||
"integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g=="
|
||||
},
|
||||
"node_modules/javascript-natural-sort": {
|
||||
"version": "0.7.1",
|
||||
"resolved": "https://registry.npmjs.org/javascript-natural-sort/-/javascript-natural-sort-0.7.1.tgz",
|
||||
"integrity": "sha512-nO6jcEfZWQXDhOiBtG2KvKyEptz7RVbpGP4vTD2hLBdmNQSsCiicO2Ioinv6UI4y9ukqnBpy+XZ9H6uLNgJTlw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/jsbn": {
|
||||
"version": "0.1.1",
|
||||
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
|
||||
@@ -15842,6 +15864,29 @@
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/mathjs": {
|
||||
"version": "14.6.0",
|
||||
"resolved": "https://registry.npmjs.org/mathjs/-/mathjs-14.6.0.tgz",
|
||||
"integrity": "sha512-5vI2BLB5GKQmiSK9BH6hVkZ+GgqpdnOgEfmHl7mqVmdQObLynr63KueyYYLCQMzj66q69mV2XZZGQqqxeftQbA==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.26.10",
|
||||
"complex.js": "^2.2.5",
|
||||
"decimal.js": "^10.4.3",
|
||||
"escape-latex": "^1.2.0",
|
||||
"fraction.js": "^5.2.1",
|
||||
"javascript-natural-sort": "^0.7.1",
|
||||
"seedrandom": "^3.0.5",
|
||||
"tiny-emitter": "^2.1.0",
|
||||
"typed-function": "^4.2.1"
|
||||
},
|
||||
"bin": {
|
||||
"mathjs": "bin/cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/md5": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz",
|
||||
@@ -16337,6 +16382,12 @@
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/seedrandom": {
|
||||
"version": "3.0.5",
|
||||
"resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-3.0.5.tgz",
|
||||
"integrity": "sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/semver": {
|
||||
"version": "7.7.2",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
|
||||
@@ -16600,6 +16651,12 @@
|
||||
"texture-compressor": "bin/texture-compressor.js"
|
||||
}
|
||||
},
|
||||
"node_modules/tiny-emitter": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/tiny-emitter/-/tiny-emitter-2.1.0.tgz",
|
||||
"integrity": "sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/toidentifier": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
|
||||
@@ -16661,6 +16718,15 @@
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/typed-function": {
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/typed-function/-/typed-function-4.2.1.tgz",
|
||||
"integrity": "sha512-EGjWssW7Tsk4DGfE+5yluuljS1OGYWiI1J6e8puZz9nTMM51Oug8CD5Zo4gWMsOhq5BI+1bF+rWTm4Vbj3ivRA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/undici-types": {
|
||||
"version": "7.8.0",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.8.0.tgz",
|
||||
|
||||
Reference in New Issue
Block a user