Compare commits

...

42 Commits

Author SHA1 Message Date
D. Berge
cf8b0937d9 Rework comparison components.
More focused on error ellipses.
2025-08-19 19:28:19 +02:00
D. Berge
d737f5d676 Refresh comparisons when notified of changes 2025-08-19 19:27:38 +02:00
D. Berge
5fe19da586 Add control to reset comparisons view 2025-08-19 19:27:03 +02:00
D. Berge
0af0cf4b42 Add overlays when loading / data error 2025-08-19 18:58:04 +02:00
D. Berge
ccb8205d26 Don't cache comparisons in the API 2025-08-19 18:55:31 +02:00
D. Berge
9b3fffdcfc Don't save comparison samples 2025-08-19 18:54:28 +02:00
D. Berge
dea1e9ee0d Add comparisons channel to notifications 2025-08-19 18:53:40 +02:00
D. Berge
d45ec767ec Add database upgrade file 43 2025-08-19 17:56:30 +02:00
D. Berge
67520ffc48 Add database upgrade file 42 2025-08-19 17:56:14 +02:00
D. Berge
22a296ba26 Add database upgrade file 41 2025-08-19 17:55:58 +02:00
D. Berge
f89435d80f Don't overwrite existing comparisons unless forced.
opts.overwrite = true will cause existing comparisons to be
recomputed.
2025-08-19 17:20:57 +02:00
D. Berge
a3f1dd490c Fix non-existent method 2025-08-19 17:20:03 +02:00
D. Berge
2fcfcb4f84 Add link to group comparison from project list 2025-08-18 16:39:20 +02:00
D. Berge
b60db7e7ef Add frontend route for 4D comparisons 2025-08-18 14:17:17 +02:00
D. Berge
4bb087fff7 Add 4D comparisons list Vue component 2025-08-18 14:16:23 +02:00
D. Berge
15af5effc3 Add 4D comparisons Vue component 2025-08-18 14:15:52 +02:00
D. Berge
b5c6d04e62 Add utilities for transforming duration objects 2025-08-18 14:15:14 +02:00
D. Berge
571c5a8bca Add Vue components for 4D comparisons 2025-08-18 14:14:34 +02:00
D. Berge
c45982829c Add set operations utilities 2025-08-18 14:11:56 +02:00
D. Berge
f3958b37b7 Add comparison API endpoints 2025-08-18 14:11:20 +02:00
D. Berge
58374adc68 Add two new bundle types.
Of which 0xa is not actually used and 0xc is used for geometric
comparison data ([ line, point, δi, δj ]).
2025-08-18 14:05:26 +02:00
D. Berge
32aea8a5ed Add comparison functions to server/lib 2025-08-18 13:53:43 +02:00
D. Berge
023b65285f Fix bug trying to get project info for undefined 2025-08-18 13:51:37 +02:00
D. Berge
a320962669 Add project group info to Vuex 2025-08-18 13:50:49 +02:00
D. Berge
0c0067b8d9 Add iterators 2025-08-18 13:48:49 +02:00
D. Berge
ef8466992c Add automatic event icon to log.
So that the user can visually see which events were created by
Dougal (not including QC events).
2025-08-18 11:22:58 +02:00
D. Berge
8e4e70cbdc Add server status info to help dialogue 2025-08-17 13:19:51 +02:00
D. Berge
4dadffbbe7 Refactor Selenium to make it more robust.
It should stop runaway Firefox processes.
2025-08-17 13:18:04 +02:00
D. Berge
24dcebd0d9 Remove logging statements 2025-08-17 13:17:22 +02:00
D. Berge
12a762f44f Fix typo in @dougal/binary 2025-08-16 14:55:53 +02:00
D. Berge
ebf13abc28 Merge branch '337-fix-event-queue' into 'devel'
Resolve "Automatic event detection fault: soft start on every shot during line"

Closes #337

See merge request wgp/dougal/software!61
2025-08-16 12:55:15 +00:00
D. Berge
b3552db02f Add error checking to ETag logic 2025-08-16 11:36:43 +02:00
D. Berge
cd882c0611 Add debug info to soft start detection 2025-08-16 11:36:43 +02:00
D. Berge
6fc9c020a4 Fix off-by-one error in LGSP detection 2025-08-16 11:36:43 +02:00
D. Berge
75284322f1 Modify full volume detection on Smartsource
The Smartsource firmware seems to have changed rendering the old
test invalid.
2025-08-16 11:36:43 +02:00
D. Berge
e849c47f01 Remove old queue implementation 2025-08-16 11:36:43 +02:00
D. Berge
387d20a4f0 Rewrite automatic event handling system 2025-08-16 11:36:43 +02:00
D. Berge
2fab06d340 Don't send timestamp when patching seq+point events.
Closes #339.
2025-08-16 11:35:35 +02:00
D. Berge
7d2fb5558a Hide switches to enable additional graphs.
All violin plots as well as position scatter plots and histograms
are shown by default. This is due to #338.

For some reason, having them enabled from the get go does not
cause any problems.
2025-08-15 18:09:51 +02:00
D. Berge
764e2cfb23 Rename endpoint 2025-08-14 13:34:36 +02:00
D. Berge
bf1af1f76c Make it explicit that :id is numeric 2025-08-14 13:34:27 +02:00
D. Berge
09e4cd2467 Add CSV event import.
Closes #336
2025-08-14 13:33:30 +02:00
59 changed files with 3893 additions and 261 deletions

View File

@@ -0,0 +1,109 @@
-- Add procedure to decimate old nav data
--
-- New schema version: 0.6.3
--
-- ATTENTION:
--
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
--
--
-- NOTE: This upgrade creates a new schema called `comparisons`.
-- NOTE: Each application starts a transaction, which must be committed
-- or rolled back.
--
-- This update adds a `comparisons` table to a `comparisons` schema.
-- The `comparisons.comparisons` table holds 4D prospect comparison data.
--
-- To apply, run as the dougal user:
--
-- psql <<EOF
-- \i $THIS_FILE
-- COMMIT;
-- EOF
--
-- NOTE: It can be applied multiple times without ill effect.
--
BEGIN;
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
BEGIN
RAISE NOTICE '%', notice;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
BEGIN
RAISE NOTICE 'Updating schema %', 'public';
SET search_path TO public;
-- BEGIN
CREATE SCHEMA IF NOT EXISTS comparisons
AUTHORIZATION postgres;
COMMENT ON SCHEMA comparisons
IS 'Holds 4D comparison data and logic';
CREATE TABLE IF NOT EXISTS comparisons.comparisons
(
type text COLLATE pg_catalog."default" NOT NULL,
baseline_pid text COLLATE pg_catalog."default" NOT NULL,
monitor_pid text COLLATE pg_catalog."default" NOT NULL,
data bytea,
meta jsonb NOT NULL DEFAULT '{}'::jsonb,
CONSTRAINT comparisons_pkey PRIMARY KEY (baseline_pid, monitor_pid, type)
)
TABLESPACE pg_default;
ALTER TABLE IF EXISTS comparisons.comparisons
OWNER to postgres;
-- END
END;
$outer$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
DECLARE
row RECORD;
current_db_version TEXT;
BEGIN
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
IF current_db_version >= '0.6.3' THEN
RAISE EXCEPTION
USING MESSAGE='Patch already applied';
END IF;
IF current_db_version != '0.6.2' THEN
RAISE EXCEPTION
USING MESSAGE='Invalid database version: ' || current_db_version,
HINT='Ensure all previous patches have been applied.';
END IF;
CALL pg_temp.upgrade_database();
END;
$outer$ LANGUAGE plpgsql;
CALL pg_temp.upgrade();
CALL pg_temp.show_notice('Cleaning up');
DROP PROCEDURE pg_temp.upgrade_database ();
DROP PROCEDURE pg_temp.upgrade ();
CALL pg_temp.show_notice('Updating db_schema version');
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.3"}')
ON CONFLICT (key) DO UPDATE
SET value = public.info.value || '{"db_schema": "0.6.3"}' WHERE public.info.key = 'version';
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
DROP PROCEDURE pg_temp.show_notice (notice text);
--
--NOTE Run `COMMIT;` now if all went well
--

View File

@@ -0,0 +1,169 @@
-- Add procedure to decimate old nav data
--
-- New schema version: 0.6.4
--
-- ATTENTION:
--
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
--
--
-- NOTE: This upgrade affects the public schema only.
-- NOTE: Each application starts a transaction, which must be committed
-- or rolled back.
--
-- This update modifies notify() to accept, as optional arguments, the
-- names of columns that are to be *excluded* from the notification.
-- It is intended for tables with large columns which are however of
-- no particular interest in a notification.
--
-- To apply, run as the dougal user:
--
-- psql <<EOF
-- \i $THIS_FILE
-- COMMIT;
-- EOF
--
-- NOTE: It can be applied multiple times without ill effect.
--
BEGIN;
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
BEGIN
RAISE NOTICE '%', notice;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
BEGIN
RAISE NOTICE 'Updating schema %', 'public';
SET search_path TO public;
-- BEGIN
CREATE OR REPLACE FUNCTION public.notify()
RETURNS trigger
LANGUAGE 'plpgsql'
COST 100
VOLATILE NOT LEAKPROOF
AS $BODY$
DECLARE
channel text := TG_ARGV[0];
pid text;
payload text;
notification text;
payload_id integer;
old_json jsonb;
new_json jsonb;
excluded_col text;
i integer;
BEGIN
-- Fetch pid
SELECT projects.pid INTO pid FROM projects WHERE schema = TG_TABLE_SCHEMA;
-- Build old and new as jsonb, excluding specified columns if provided
IF OLD IS NOT NULL THEN
old_json := row_to_json(OLD)::jsonb;
FOR i IN 1 .. TG_NARGS - 1 LOOP
excluded_col := TG_ARGV[i];
old_json := old_json - excluded_col;
END LOOP;
ELSE
old_json := NULL;
END IF;
IF NEW IS NOT NULL THEN
new_json := row_to_json(NEW)::jsonb;
FOR i IN 1 .. TG_NARGS - 1 LOOP
excluded_col := TG_ARGV[i];
new_json := new_json - excluded_col;
END LOOP;
ELSE
new_json := NULL;
END IF;
-- Build payload
payload := json_build_object(
'tstamp', CURRENT_TIMESTAMP,
'operation', TG_OP,
'schema', TG_TABLE_SCHEMA,
'table', TG_TABLE_NAME,
'old', old_json,
'new', new_json,
'pid', pid
)::text;
-- Handle large payloads
IF octet_length(payload) < 1000 THEN
PERFORM pg_notify(channel, payload);
ELSE
-- Store large payload and notify with ID (as before)
INSERT INTO notify_payloads (payload) VALUES (payload) RETURNING id INTO payload_id;
notification := json_build_object(
'tstamp', CURRENT_TIMESTAMP,
'operation', TG_OP,
'schema', TG_TABLE_SCHEMA,
'table', TG_TABLE_NAME,
'pid', pid,
'payload_id', payload_id
)::text;
PERFORM pg_notify(channel, notification);
RAISE INFO 'Payload over limit';
END IF;
RETURN NULL;
END;
$BODY$;
ALTER FUNCTION public.notify()
OWNER TO postgres;
-- END
END;
$outer$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
DECLARE
row RECORD;
current_db_version TEXT;
BEGIN
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
IF current_db_version >= '0.6.4' THEN
RAISE EXCEPTION
USING MESSAGE='Patch already applied';
END IF;
IF current_db_version != '0.6.3' THEN
RAISE EXCEPTION
USING MESSAGE='Invalid database version: ' || current_db_version,
HINT='Ensure all previous patches have been applied.';
END IF;
CALL pg_temp.upgrade_database();
END;
$outer$ LANGUAGE plpgsql;
CALL pg_temp.upgrade();
CALL pg_temp.show_notice('Cleaning up');
DROP PROCEDURE pg_temp.upgrade_database ();
DROP PROCEDURE pg_temp.upgrade ();
CALL pg_temp.show_notice('Updating db_schema version');
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.4"}')
ON CONFLICT (key) DO UPDATE
SET value = public.info.value || '{"db_schema": "0.6.4"}' WHERE public.info.key = 'version';
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
DROP PROCEDURE pg_temp.show_notice (notice text);
--
--NOTE Run `COMMIT;` now if all went well
--

View File

@@ -0,0 +1,96 @@
-- Add procedure to decimate old nav data
--
-- New schema version: 0.6.5
--
-- ATTENTION:
--
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
--
--
-- NOTE: This upgrade affects the public schema only.
-- NOTE: Each application starts a transaction, which must be committed
-- or rolled back.
--
-- This update modifies notify() to accept, as optional arguments, the
-- names of columns that are to be *excluded* from the notification.
-- It is intended for tables with large columns which are however of
-- no particular interest in a notification.
--
-- To apply, run as the dougal user:
--
-- psql <<EOF
-- \i $THIS_FILE
-- COMMIT;
-- EOF
--
-- NOTE: It can be applied multiple times without ill effect.
--
BEGIN;
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
BEGIN
RAISE NOTICE '%', notice;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
BEGIN
RAISE NOTICE 'Updating schema %', 'public';
SET search_path TO public;
-- BEGIN
CREATE OR REPLACE TRIGGER comparisons_tg
AFTER INSERT OR DELETE OR UPDATE
ON comparisons.comparisons
FOR EACH ROW
EXECUTE FUNCTION public.notify('comparisons', 'data');
-- END
END;
$outer$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
DECLARE
row RECORD;
current_db_version TEXT;
BEGIN
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
IF current_db_version >= '0.6.5' THEN
RAISE EXCEPTION
USING MESSAGE='Patch already applied';
END IF;
IF current_db_version != '0.6.4' THEN
RAISE EXCEPTION
USING MESSAGE='Invalid database version: ' || current_db_version,
HINT='Ensure all previous patches have been applied.';
END IF;
CALL pg_temp.upgrade_database();
END;
$outer$ LANGUAGE plpgsql;
CALL pg_temp.upgrade();
CALL pg_temp.show_notice('Cleaning up');
DROP PROCEDURE pg_temp.upgrade_database ();
DROP PROCEDURE pg_temp.upgrade ();
CALL pg_temp.show_notice('Updating db_schema version');
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.5"}')
ON CONFLICT (key) DO UPDATE
SET value = public.info.value || '{"db_schema": "0.6.5"}' WHERE public.info.key = 'version';
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
DROP PROCEDURE pg_temp.show_notice (notice text);
--
--NOTE Run `COMMIT;` now if all went well
--

View File

@@ -503,6 +503,37 @@ class DougalBinaryBundle extends ArrayBuffer {
return ab;
}
get records () {
const data = [];
for (const record of this) {
data.push(record.slice(1));
}
return data;
}
[Symbol.iterator]() {
const chunks = this.chunks();
let chunkIndex = 0;
let chunkIterator = chunks.length > 0 ? chunks[0][Symbol.iterator]() : null;
return {
next() {
if (!chunkIterator) {
return { done: true };
}
let result = chunkIterator.next();
while (result.done && chunkIndex < chunks.length - 1) {
chunkIndex++;
chunkIterator = chunks[chunkIndex][Symbol.iterator]();
result = chunkIterator.next();
}
return result;
}
};
}
}
@@ -693,7 +724,7 @@ class DougalBinaryChunkSequential extends ArrayBuffer {
getRecord (index) {
if (index < 0 || index >= this.jCount) throw new Error(`Invalid record index: ${index}`);
const arr = [thid.udv, this.i, this.j0 + index * this.Δj];
const arr = [this.udv, this.i, this.j0 + index * this.Δj];
for (let m = 0; m < this.ΔelemCount; m++) {
const values = this.Δelem(m);
@@ -707,6 +738,21 @@ class DougalBinaryChunkSequential extends ArrayBuffer {
return arr;
}
[Symbol.iterator]() {
let index = 0;
const chunk = this;
return {
next() {
if (index < chunk.jCount) {
return { value: chunk.getRecord(index++), done: false };
} else {
return { done: true };
}
}
};
}
}
@@ -901,6 +947,21 @@ class DougalBinaryChunkInterleaved extends ArrayBuffer {
return arr;
}
[Symbol.iterator]() {
let index = 0;
const chunk = this;
return {
next() {
if (index < chunk.jCount) {
return { value: chunk.getRecord(index++), done: false };
} else {
return { done: true };
}
}
};
}
}

View File

@@ -3,8 +3,10 @@
<v-card-title class="headline">
Array inline / crossline error
<v-spacer></v-spacer>
<!--
<v-switch v-model="scatterplot" label="Scatterplot"></v-switch>
<v-switch class="ml-4" v-model="histogram" label="Histogram"></v-switch>
-->
</v-card-title>
<v-container fluid fill-height>
@@ -57,8 +59,8 @@ export default {
graph: [],
busy: false,
resizeObserver: null,
scatterplot: false,
histogram: false
scatterplot: true,
histogram: true
};
},

View File

@@ -3,8 +3,10 @@
<v-card-title class="headline">
Gun depth
<v-spacer></v-spacer>
<!--
<v-switch v-model="shotpoint" label="Shotpoint"></v-switch>
<v-switch class="ml-4" v-model="violinplot" label="Violin plot"></v-switch>
-->
</v-card-title>
<v-container fluid fill-height>
@@ -59,7 +61,7 @@ export default {
busy: false,
resizeObserver: null,
shotpoint: true,
violinplot: false
violinplot: true
};
},

View File

@@ -3,8 +3,10 @@
<v-card-title class="headline">
Gun pressures
<v-spacer></v-spacer>
<!--
<v-switch v-model="shotpoint" label="Shotpoint"></v-switch>
<v-switch class="ml-4" v-model="violinplot" label="Violin plot"></v-switch>
-->
</v-card-title>
<v-container fluid fill-height>
@@ -59,7 +61,7 @@ export default {
busy: false,
resizeObserver: null,
shotpoint: true,
violinplot: false
violinplot: true
};
},

View File

@@ -3,8 +3,10 @@
<v-card-title class="headline">
Gun timing
<v-spacer></v-spacer>
<!--
<v-switch v-model="shotpoint" label="Shotpoint"></v-switch>
<v-switch class="ml-4" v-model="violinplot" label="Violin plot"></v-switch>
-->
</v-card-title>
<v-container fluid fill-height>
@@ -59,7 +61,7 @@ export default {
busy: false,
resizeObserver: null,
shotpoint: true,
violinplot: false
violinplot: true
};
},

View File

@@ -0,0 +1,187 @@
<template>
<v-card v-if="comparison" class="ma-1">
<v-card-title>Comparison Summary: Baseline {{ baseline.pid }} vs Monitor {{ monitor.pid }}</v-card-title>
<v-card-text>
<v-row>
<v-col cols="12" md="6">
<h3>Deviation Statistics</h3>
<v-simple-table dense>
<template v-slot:default>
<thead>
<tr>
<th>Metric</th>
<th>I (m)</th>
<th>J (m)</th>
</tr>
</thead>
<tbody>
<tr>
<td>Mean (μ)</td>
<td>{{ comparison['μ'][0].toFixed(3) }}</td>
<td>{{ comparison['μ'][1].toFixed(3) }}</td>
</tr>
<tr>
<td>Std Dev (σ)</td>
<td>{{ comparison['σ'][0].toFixed(3) }}</td>
<td>{{ comparison['σ'][1].toFixed(3) }}</td>
</tr>
<tr>
<td>RMS</td>
<td>{{ comparison.rms[0].toFixed(3) }}</td>
<td>{{ comparison.rms[1].toFixed(3) }}</td>
</tr>
</tbody>
</template>
</v-simple-table>
<h3 class="mt-4">Error distribution</h3>
<ul>
<li title="Relative to I-axis positive direction">Primary Direction: {{ (comparison.primaryDirection * 180 / Math.PI).toFixed(2) }}°</li>
<li>Anisotropy: {{ comparison.anisotropy.toFixed(2) }}</li>
<li title="Length of the semi-major axis of the error ellipse">Semi-Major Axis: {{ semiMajorAxis.toFixed(2) }} m</li>
<li title="Length of the semi-minor axis of the error ellipse">Semi-Minor Axis: {{ semiMinorAxis.toFixed(2) }} m</li>
<li title="Area of the error ellipse">Error Ellipse Area: {{ ellipseArea.toFixed(2) }} </li>
</ul>
<h3 class="mt-4">Counts</h3>
<ul>
<li title="Unique line / point pairs found in both projects">Common Points: {{ comparison.common }}</li>
<li title="Total number of points compared, including reshoots, infills, etc.">Comparison Length: {{ comparison.length }}</li>
<li title="Number of points in the baseline project">Baseline Points: {{ comparison.baselineLength }} (Unique: {{ comparison.baselineUniqueLength }})</li>
<li title="Number of points in the monitor project">Monitor Points: {{ comparison.monitorLength }} (Unique: {{ comparison.monitorUniqueLength }})</li>
</ul>
<p class="mt-3" title="Date and time when the comparison was last performed">Computation timestamp: {{ new Date(comparison.tstamp).toLocaleString() }}</p>
</v-col>
<v-col cols="12" md="6">
<h3>Error Ellipse</h3>
<svg width="300" height="300" style="border: 1px solid #ccc;">
<g :transform="`translate(150, 150) scale(${ellipseScale})`">
<line x1="0" y1="-150" x2="0" y2="150" stroke="lightgray" stroke-dasharray="5,5"/>
<line x1="-150" y1="0" x2="150" y2="0" stroke="lightgray" stroke-dasharray="5,5"/>
<ellipse
:rx="Math.sqrt(comparison.eigenvalues[0])"
:ry="Math.sqrt(comparison.eigenvalues[1])"
:transform="`rotate(${ellipseAngle})`"
fill="none"
stroke="blue"
stroke-width="2"
/>
<line
:x1="0"
:y1="0"
:x2="Math.sqrt(comparison.eigenvalues[0]) * Math.cos(ellipseRad)"
:y2="Math.sqrt(comparison.eigenvalues[0]) * Math.sin(ellipseRad)"
stroke="red"
stroke-width="2"
arrow-end="classic-wide-long"
/>
<line
:x1="0"
:y1="0"
:x2="Math.sqrt(comparison.eigenvalues[1]) * Math.cos(ellipseRad + Math.PI / 2)"
:y2="Math.sqrt(comparison.eigenvalues[1]) * Math.sin(ellipseRad + Math.PI / 2)"
stroke="green"
stroke-width="2"
arrow-end="classic-wide-long"
/>
</g>
</svg>
<p class="text-caption">Ellipse scaled for visibility (factor: {{ ellipseScale.toFixed(1) }}). Axes represent sqrt(eigenvalues).</p>
</v-col>
</v-row>
</v-card-text>
</v-card>
</template>
<script>
export default {
name: "DougalGroupComparisonSummary",
props: {
baseline: { type: Object, required: true },
monitor: { type: Object, required: true },
comparison: { type: Object, required: true }
},
data () {
return {
};
},
computed: {
ellipseAngle () {
if (!this.comparison) return 0;
const ev = this.comparison.eigenvectors[0];
return Math.atan2(ev[1], ev[0]) * 180 / Math.PI;
},
ellipseRad () {
return this.ellipseAngle * Math.PI / 180;
},
ellipseRx () {
if (!this.comparison) return 0;
return Math.sqrt(this.comparison.eigenvalues[0]) * this.ellipseScale;
},
ellipseRy () {
if (!this.comparison) return 0;
return Math.sqrt(this.comparison.eigenvalues[1]) * this.ellipseScale;
},
ellipseScale () {
if (!this.comparison) return 1;
const maxSigma = Math.max(
Math.sqrt(this.comparison.eigenvalues[0]),
Math.sqrt(this.comparison.eigenvalues[1])
);
const maxMu = Math.max(
Math.abs(this.comparison['μ'][0]),
Math.abs(this.comparison['μ'][1])
);
//const maxExtent = maxMu + 3 * maxSigma;
const maxExtent = 20;
return 100 / maxExtent; // Adjust scale to fit within ~200 pixels diameter
},
ellipseArea () {
if (!this.comparison) return 0;
const a = Math.sqrt(this.comparison.eigenvalues[0]);
const b = Math.sqrt(this.comparison.eigenvalues[1]);
return Math.PI * a * b;
},
semiMajorAxis () {
if (!this.comparison) return 0;
return Math.max(
Math.sqrt(this.comparison.eigenvalues[0]),
Math.sqrt(this.comparison.eigenvalues[1])
);
},
semiMinorAxis () {
if (!this.comparison) return 0;
return Math.min(
Math.sqrt(this.comparison.eigenvalues[0]),
Math.sqrt(this.comparison.eigenvalues[1])
);
},
meanX () {
return this.comparison ? this.comparison['μ'][0] : 0;
},
meanY () {
return this.comparison ? this.comparison['μ'][1] : 0;
},
ellipseViewBox () {
return '-150 -150 300 300';
},
}
}
</script>

View File

@@ -0,0 +1,118 @@
<template>
<v-card class="ma-1">
<v-card-title>Group Repeatability Summary</v-card-title>
<v-card-text>
<p>Error ellipse area for each baseline-monitor pair. Lower values indicate better repeatability. Colors range from green (best) to red (worst).</p>
<v-simple-table dense>
<thead>
<tr>
<th>Baseline \ Monitor</th>
<th v-for="project in projects" :key="project.pid">{{ project.pid }}</th>
</tr>
</thead>
<tbody>
<tr v-for="(baselineProject, rowIndex) in projects" :key="baselineProject.pid">
<td>{{ baselineProject.pid }}</td>
<td v-for="(monitorProject, colIndex) in projects" :key="monitorProject.pid">
<v-tooltip v-if="colIndex > rowIndex" top>
<template v-slot:activator="{ on, attrs }">
<div
:style="{ backgroundColor: getEllipseAreaColor(baselineProject.pid, monitorProject.pid), color: 'white', textAlign: 'center', padding: '4px' }"
v-bind="attrs"
v-on="on"
@click="emitInput(baselineProject, monitorProject)"
>
{{ formatEllipseArea(baselineProject.pid, monitorProject.pid) }}
</div>
</template>
<span v-if="getComp(baselineProject.pid, monitorProject.pid)">
<div>σ_i: {{ getComp(baselineProject.pid, monitorProject.pid).meta['σ'][0].toFixed(2) }} m</div>
<div>σ_j: {{ getComp(baselineProject.pid, monitorProject.pid).meta['σ'][1].toFixed(2) }} m</div>
<div>Anisotropy: {{ getComp(baselineProject.pid, monitorProject.pid).meta.anisotropy.toFixed(0) }}</div>
<div>Ellipse Area: {{ getEllipseArea(baselineProject.pid, monitorProject.pid).toFixed(2) }} </div>
<div>Primary Direction: {{ formatPrimaryDirection(getComp(baselineProject.pid, monitorProject.pid)) }}°</div>
</span>
</v-tooltip>
</td>
</tr>
</tbody>
</v-simple-table>
</v-card-text>
</v-card>
</template>
<script>
export default {
name: 'DougalGroupRepeatabilitySummary',
props: {
comparisons: {
type: Array,
required: true
},
projects: {
type: Array,
required: true
}
},
data () {
return {
};
},
computed: {
compMap () {
return new Map(this.comparisons.map(c => [`${c.baseline_pid}-${c.monitor_pid}`, c]));
},
minEllipseArea () {
if (!this.comparisons.length) return 0;
return Math.min(...this.comparisons.map(c => {
const a = Math.sqrt(c.meta.eigenvalues[0]);
const b = Math.sqrt(c.meta.eigenvalues[1]);
return Math.PI * a * b;
}));
},
maxEllipseArea () {
if (!this.comparisons.length) return 0;
return Math.max(...this.comparisons.map(c => {
const a = Math.sqrt(c.meta.eigenvalues[0]);
const b = Math.sqrt(c.meta.eigenvalues[1]);
return Math.PI * a * b;
}));
}
},
methods: {
getComp (basePid, monPid) {
return this.compMap.get(`${basePid}-${monPid}`);
},
getEllipseArea (basePid, monPid) {
const comp = this.getComp(basePid, monPid);
if (!comp) return null;
const a = Math.sqrt(comp.meta.eigenvalues[0]);
const b = Math.sqrt(comp.meta.eigenvalues[1]);
return Math.PI * a * b;
},
formatEllipseArea (basePid, monPid) {
const val = this.getEllipseArea(basePid, monPid);
return val !== null ? val.toFixed(1) : '';
},
getEllipseAreaColor (basePid, monPid) {
const val = this.getEllipseArea(basePid, monPid);
if (val === null) return '';
const ratio = (val - this.minEllipseArea) / (this.maxEllipseArea - this.minEllipseArea);
const hue = (1 - ratio) * 120;
return `hsl(${hue}, 70%, 70%)`;
},
formatPrimaryDirection (comp) {
if (!comp) return '';
return (comp.meta.primaryDirection * 180 / Math.PI).toFixed(1);
},
emitInput (baselineProject, monitorProject) {
if (this.getComp(baselineProject.pid, monitorProject.pid)) {
this.$emit('input', baselineProject, monitorProject);
}
}
}
}
</script>

View File

@@ -2,6 +2,7 @@
<v-dialog
v-model="dialog"
max-width="500"
scrollable
style="z-index:2020;"
>
<template v-slot:activator="{ on, attrs }">
@@ -58,6 +59,9 @@
</v-window-item>
<v-window-item value="serverinfo">
<dougal-server-status :status="serverStatus"></dougal-server-status>
</v-window-item>
</v-window>
<v-divider></v-divider>
@@ -69,8 +73,7 @@
text
:href="`mailto:${email}?Subject=Question`"
>
<v-icon class="d-lg-none">mdi-help-circle</v-icon>
<span class="d-none d-lg-inline">Ask a question</span>
<v-icon title="Ask a question">mdi-help-circle</v-icon>
</v-btn>
<v-btn
@@ -78,8 +81,7 @@
text
href="mailto:dougal-support@aaltronav.eu?Subject=Bug report"
>
<v-icon class="d-lg-none">mdi-bug</v-icon>
<span class="d-none d-lg-inline">Report a bug</span>
<v-icon title="Report a bug">mdi-bug</v-icon>
</v-btn>
<!---
@@ -93,16 +95,36 @@
</v-btn>
--->
<v-btn
color="info"
text
title="View support info"
:input-value="page == 'support'"
@click="page = 'support'"
>
<v-icon>mdi-account-question</v-icon>
</v-btn>
<v-btn v-if="versionHistory"
color="info"
text
:title="page == 'support' ? 'View release notes' : 'View support info'"
title="View release notes"
:input-value="page == 'changelog'"
@click="page = page == 'support' ? 'changelog' : 'support'"
@click="page = 'changelog'"
>
<v-icon>mdi-history</v-icon>
</v-btn>
<v-btn v-if="serverStatus"
color="info"
text
title="View server status"
:input-value="page == 'serverinfo'"
@click="page = 'serverinfo'"
>
<v-icon>mdi-server-network</v-icon>
</v-btn>
<v-spacer></v-spacer>
@@ -124,46 +146,110 @@
<script>
import { mapActions, mapGetters } from 'vuex';
import DougalServerStatus from './server-status';
export default {
name: 'DougalHelpDialog',
components: {
DougalServerStatus
},
data () {
return {
dialog: false,
email: "dougal-support@aaltronav.eu",
feed: btoa(encodeURIComponent("https://gitlab.com/wgp/dougal/software.atom?feed_token=XSPpvsYEny8YmH75Nz5W")),
serverStatus: null,
clientVersion: process.env.DOUGAL_FRONTEND_VERSION ?? "(unknown)",
serverVersion: null,
versionHistory: null,
releaseHistory: [],
releaseShown: null,
page: "support"
page: "support",
lastUpdate: 0,
updateInterval: 12000,
refreshTimer: null
};
},
computed: {
sinceUpdate () {
return this.lastUpdate
? (Date.now() - this.lastUpdate)
: +Infinity;
}
},
watch: {
dialog(newVal) {
if (newVal) {
this.startAutoRefresh();
} else {
this.stopAutoRefresh();
}
},
page(newVal) {
if (newVal === 'serverinfo' && this.dialog) {
this.getServerStatus(); // Immediate update when switching to serverinfo
this.startAutoRefresh();
} else {
this.stopAutoRefresh();
}
}
},
methods: {
async getServerVersion () {
if (!this.serverVersion) {
const version = await this.api(['/version', {}, null, {silent:true}]);
this.serverVersion = version?.tag ?? "(unknown)";
if (version) this.lastUpdate = Date.now();
}
if (!this.versionHistory) {
const history = await this.api(['/version/history?count=3', {}, null, {silent:true}]);
const history = await this.api(['/version/history?count=6', {}, null, {silent:true}]);
this.releaseHistory = history;
this.versionHistory = history?.[this.serverVersion.replace(/-.*$/, "")] ?? null;
}
},
async getServerStatus () {
const status = await this.api(['/diagnostics', {}, null, {silent: true}]);
if (status) {
this.serverStatus = status;
this.lastUpdate = Date.now();
}
},
startAutoRefresh() {
if (this.refreshTimer) return; // Prevent multiple timers
this.refreshTimer = setInterval(() => {
if (this.dialog && this.page === 'serverinfo') {
this.getServerStatus();
// Optionally refresh server version if needed
// this.getServerVersion();
}
}, this.updateInterval);
},
stopAutoRefresh() {
if (this.refreshTimer) {
clearInterval(this.refreshTimer);
this.refreshTimer = null;
}
},
...mapActions(["api"])
},
async mounted () {
this.getServerVersion();
this.getServerStatus();
},
async beforeUpdate () {
this.getServerVersion();
beforeDestroy() {
this.stopAutoRefresh(); // Clean up timer on component destruction
}
};

View File

@@ -0,0 +1,213 @@
<template>
<v-card max-width="800" max-height="600" class="mx-auto" style="overflow-y: auto;">
<v-card-title class="headline">
Server status {{ status.hostname }}
</v-card-title>
<v-card-text>
<v-expansion-panels accordion>
<!-- System Info -->
<v-expansion-panel>
<v-expansion-panel-header>System Info</v-expansion-panel-header>
<v-expansion-panel-content>
<v-row>
<v-col cols="6">
<strong>Uptime:</strong> {{ formatUptime(status.uptime) }}
</v-col>
<v-col cols="6">
<strong>Load:</strong> {{ status.loadavg[0].toFixed(2) }} / {{ status.loadavg[1].toFixed(2) }} / {{ status.loadavg[2].toFixed(2) }}
<v-progress-linear
:value="loadAvgPercent"
:color="getLoadAvgColor(status.loadavg[0])"
height="6"
rounded
></v-progress-linear>
<div class="text-caption">
1-min Load: {{ status.loadavg[0].toFixed(2) }} ({{ loadAvgPercent.toFixed(1) }}% of max)
</div>
</v-col>
</v-row>
</v-expansion-panel-content>
</v-expansion-panel>
<!-- Memory -->
<v-expansion-panel>
<v-expansion-panel-header>Memory</v-expansion-panel-header>
<v-expansion-panel-content>
<v-progress-linear
:value="memoryUsedPercent"
:color="getProgressColor(memoryUsedPercent)"
height="10"
rounded
></v-progress-linear>
<div class="text-caption mt-2">
Used: {{ formatBytes(status.memory.total - status.memory.free) }} / Total: {{ formatBytes(status.memory.total) }} ({{ memoryUsedPercent.toFixed(1) }}%)
</div>
</v-expansion-panel-content>
</v-expansion-panel>
<!-- CPUs -->
<v-expansion-panel>
<v-expansion-panel-header>CPUs ({{ status.cpus.length }} cores)</v-expansion-panel-header>
<v-expansion-panel-content>
<v-row dense>
<v-col v-for="(cpu, index) in status.cpus" :key="index" cols="12" sm="6">
<v-card outlined class="pa-2">
<div class="text-caption">Core {{ index + 1 }}: {{ cpu.model }} @ {{ cpu.speed }} MHz</div>
<v-progress-linear
:value="cpuUsagePercent(cpu)"
:color="getProgressColor(cpuUsagePercent(cpu))"
height="8"
rounded
></v-progress-linear>
<div class="text-caption">
Usage: {{ cpuUsagePercent(cpu).toFixed(1) }}% (Idle: {{ cpuIdlePercent(cpu).toFixed(1) }}%)
</div>
</v-card>
</v-col>
</v-row>
</v-expansion-panel-content>
</v-expansion-panel>
<!-- Network Interfaces -->
<v-expansion-panel>
<v-expansion-panel-header>Network Interfaces</v-expansion-panel-header>
<v-expansion-panel-content>
<v-list dense>
<v-list-item v-for="(iface, name) in status.networkInterfaces" :key="name">
<v-list-item-content>
<v-list-item-title>{{ name }}</v-list-item-title>
<v-list-item-subtitle v-for="(addr, idx) in iface" :key="idx">
{{ addr.family }}: {{ addr.address }} (Netmask: {{ addr.netmask }})
</v-list-item-subtitle>
</v-list-item-content>
</v-list-item>
</v-list>
</v-expansion-panel-content>
</v-expansion-panel>
<!-- Storage -->
<v-expansion-panel>
<v-expansion-panel-header>Storage</v-expansion-panel-header>
<v-expansion-panel-content>
<!-- Root -->
<div class="mb-4">
<strong>Root (/):</strong>
<v-progress-linear
:value="status.storage.root.usedPercent"
:color="getProgressColor(status.storage.root.usedPercent)"
height="10"
rounded
></v-progress-linear>
<div class="text-caption">
Used: {{ formatBytes(status.storage.root.used) }} / Total: {{ formatBytes(status.storage.root.total) }} ({{ status.storage.root.usedPercent.toFixed(1) }}%)
</div>
</div>
<!-- Data subfolders -->
<div>
<strong>Data:</strong>
<v-expansion-panels flat>
<v-expansion-panel v-for="(folder, name) in status.storage.data" :key="name">
<v-expansion-panel-header disable-icon-rotate>{{ name }}</v-expansion-panel-header>
<v-expansion-panel-content>
<v-progress-linear
:value="folder.usedPercent"
:color="getProgressColor(folder.usedPercent)"
height="10"
rounded
></v-progress-linear>
<div class="text-caption">
Used: {{ formatBytes(folder.used) }} / Total: {{ formatBytes(folder.total) }} ({{ folder.usedPercent.toFixed(1) }}%)
</div>
</v-expansion-panel-content>
</v-expansion-panel>
</v-expansion-panels>
</div>
</v-expansion-panel-content>
</v-expansion-panel>
<!-- Database -->
<v-expansion-panel>
<v-expansion-panel-header>Database</v-expansion-panel-header>
<v-expansion-panel-content>
<div class="mb-2">
<strong>Total Size:</strong> {{ formatBytes(status.database.size) }}
</div>
<v-list dense>
<v-list-item v-for="(project, name) in status.database.projects" :key="name">
<v-list-item-content>
<v-list-item-title>{{ name }}</v-list-item-title>
<v-progress-linear
:value="project.percent"
:color="getProgressColor(project.percent)"
height="8"
rounded
></v-progress-linear>
<v-list-item-subtitle>
Size: {{ formatBytes(project.size) }} ({{ project.percent.toFixed(2) }}%)
</v-list-item-subtitle>
</v-list-item-content>
</v-list-item>
</v-list>
</v-expansion-panel-content>
</v-expansion-panel>
</v-expansion-panels>
</v-card-text>
</v-card>
</template>
<script>
export default {
name: "DougalServerStatus",
props: {
status: {
type: Object,
required: true
}
},
computed: {
memoryUsedPercent() {
return ((this.status.memory.total - this.status.memory.free) / this.status.memory.total) * 100;
},
loadAvgPercent() {
const maxLoad = this.status.cpus.length * 4; // Assume 4x cores as max for scaling
return Math.min((this.status.loadavg[0] / maxLoad) * 100, 100); // Cap at 100%
}
},
methods: {
getProgressColor(value) {
if (value >= 80) return 'error'; // Red for 80100%
if (value >= 60) return 'warning'; // Yellow for 6080%
return 'success'; // Green for 060%
},
getLoadAvgColor(load) {
const coreCount = this.status.cpus.length;
if (load >= coreCount * 2) return 'error'; // Red for load ≥ 2x cores
if (load >= coreCount) return 'warning'; // Yellow for load ≥ 1x cores but < 2x
return 'success'; // Green for load < 1x cores
},
formatBytes(bytes) {
if (bytes === 0) return '0 Bytes';
const k = 1024;
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
},
formatUptime(seconds) {
const days = Math.floor(seconds / 86400);
seconds %= 86400;
const hours = Math.floor(seconds / 3600);
seconds %= 3600;
const minutes = Math.floor(seconds / 60);
return `${days}d ${hours}h ${minutes}m`;
},
cpuUsagePercent(cpu) {
const total = cpu.times.user + cpu.times.nice + cpu.times.sys + cpu.times.idle + cpu.times.irq;
return ((total - cpu.times.idle) / total) * 100;
},
cpuIdlePercent(cpu) {
const total = cpu.times.user + cpu.times.nice + cpu.times.sys + cpu.times.idle + cpu.times.irq;
return (cpu.times.idle / total) * 100;
}
}
};
</script>

View File

@@ -0,0 +1,47 @@
function duration_to_ms(v) {
if (v instanceof Object) {
return (
(v.days || 0) * 86400000 +
(v.hours || 0) * 3600000 +
(v.minutes || 0) * 60000 +
(v.seconds || 0) * 1000 +
(v.milliseconds || 0)
);
} else {
return {
days: 0,
hours: 0,
minutes: 0,
seconds: 0,
milliseconds: 0
}
}
}
function ms_to_duration(v) {
const days = Math.floor(v / 86400000);
v %= 86400000;
const hours = Math.floor(v / 3600000);
v %= 3600000;
const minutes = Math.floor(v / 60000);
v %= 60000;
const seconds = Math.floor(v / 1000);
const milliseconds = v % 1000;
return { days, hours, minutes, seconds, milliseconds };
}
function normalise_duration (v) {
return ms_to_duration(duration_to_ms(v));
}
function add_durations(a, b) {
return ms_to_duration(duration_to_ms(a) + duration_to_ms(b));
}
export {
duration_to_ms,
ms_to_duration,
normalise_duration,
add_durations
}

View File

@@ -20,6 +20,9 @@ import ProjectSettings from '../views/ProjectSettings.vue'
import Users from '../views/Users.vue'
import DougalAppBarExtensionProject from '../components/app-bar-extension-project'
import DougalAppBarExtensionProjectList from '../components/app-bar-extension-project-list'
import GroupList from '../views/GroupList.vue'
import Group from '../views/Group.vue'
Vue.use(VueRouter)
@@ -196,7 +199,43 @@ Vue.use(VueRouter)
component: ProjectSettings
}
]
}
},
{
pathToRegexpOptions: { strict: true },
path: "/groups",
redirect: "/groups/"
},
{
pathToRegexpOptions: { strict: true },
path: "/groups/",
component: GroupList,
meta: {
breadcrumbs: [
{ text: "Groups", href: "/groups", disabled: true }
],
appBarExtension: {
// component: DougalAppBarExtensionProjectList
}
}
},
{
pathToRegexpOptions: { strict: true },
path: "/groups/:group",
redirect: "/groups/:group/"
},
{
pathToRegexpOptions: { strict: true },
path: "/groups/:group/",
name: "Group",
component: Group,
meta: {
breadcrumbs: [
{ text: "Groups", href: "/groups" },
],
},
children: [
]
},
]
const router = new VueRouter({

View File

@@ -1,5 +1,10 @@
async function getProject ({commit, dispatch}, projectId) {
if (projectId == null) {
console.log(`Skipping call to getProject${projectId})`);
return;
}
const init = {
headers: {
cache: "reload",

View File

@@ -1,3 +1,5 @@
import * as d3a from 'd3-array';
import { duration_to_ms, ms_to_duration, normalise_duration, add_durations } from '@/lib/durations';
/** Fetch projects from server
*/
@@ -8,6 +10,7 @@ async function refreshProjects ({commit, dispatch, state, rootState}) {
}
commit('setProjectsLoading');
const tstamp = new Date();
const pid = rootState.project.projectId;
const url = `/project`;
const init = {
@@ -17,10 +20,26 @@ async function refreshProjects ({commit, dispatch, state, rootState}) {
const res = await dispatch('api', [url, init, null, {silent:true}]);
if (res) {
for (let index in res) {
const project = res[index];
if (!project.pid) {
console.warn("Project has no Project ID!");
continue;
}
const url = `/project/${project.pid}/summary`;
const init = {};
const summary = await dispatch('api', [url, init, null, {silent:true}]);
if (summary) {
res[index] = {...project, ...summary};
}
}
commit('setProjects', res);
commit('setProjectsTimestamp');
commit('setProjectsTimestamp', tstamp);
}
commit('clearProjectsLoading');
dispatch('prepareGroups');
}
/** Return a subset of projects from state.projects
@@ -118,4 +137,83 @@ async function getProjects ({commit, dispatch, state}, [{pid, name, schema, grou
return {projects: filteredProjects, count};
}
export default { refreshProjects, getProjects };
async function prepareGroups ({commit, dispatch, state, rootState}) {
const groups = {};
for (const project of state.projects) {
if (!project.prod_distance) {
// This project has no production data (either not started yet
// or production data has not been imported) so we skip it.
continue;
}
if (!project.prod_duration.days) {
project.prod_duration = normalise_duration(project.prod_duration);
}
for (const name of project.groups) {
if (!(name in groups)) {
groups[name] = {
group: name,
num_projects: 0,
lines: 0,
points: 0,
sequences: 0,
// Shots:
prime: 0,
other: 0,
ntba: 0,
prod_duration: {
days: 0,
hours: 0,
minutes: 0,
seconds: 0,
milliseconds: 0
},
prod_distance: 0,
shooting_rate: [],
projects: []
};
}
const group = groups[name];
group.num_projects++;
group.lines = Math.max(group.lines, project.lines); // In case preplots changed
group.points = Math.max(group.points, project.total); // Idem
group.sequences += project.seq_final;
group.prime += project.prime;
group.other += project.other;
//group.ntba += project.ntba;
group.prod_duration = add_durations(group.prod_duration, project.prod_duration);
group.prod_distance += project.prod_distance;
group.shooting_rate.push(project.shooting_rate);
group.projects.push(project);
}
}
const grouplist = [];
for (const group of Object.values(groups)) {
group.shooting_rate_mean = d3a.mean(group.shooting_rate);
group.shooting_rate_sd = d3a.deviation(group.shooting_rate);
delete group.shooting_rate;
grouplist.push(group);
}
commit('setGroups', grouplist);
}
async function getGroups({commit, dispatch, state, rootState}) {
if (!state.groups.length) {
await dispatch('refreshProjects');
}
return state.groups;
}
export default { refreshProjects, getProjects, prepareGroups, getGroups };

View File

@@ -3,7 +3,7 @@ function projects (state) {
return state.projects;
}
function projectGroups (state) {
function projectGroupNames (state) {
return [...new Set(state.projects.map(i => i.groups).flat())].sort();
}
@@ -15,4 +15,8 @@ function projectsLoading (state) {
return !!state.loading;
}
export default { projects, projectGroups, projectCount, projectsLoading };
function groups (state) {
return state.groups;
}
export default { projects, projectGroupNames, projectCount, projectsLoading, groups };

View File

@@ -39,10 +39,15 @@ function abortProjectsLoading (state) {
state.loading = null;
}
function setGroups (state, groups) {
state.groups = Object.freeze(groups);
}
export default {
setProjects,
setProjectsLoading,
clearProjectsLoading,
setProjectsTimestamp,
setProjectsETag
setProjectsETag,
setGroups
};

View File

@@ -1,5 +1,6 @@
const state = () => ({
projects: Object.freeze([]),
groups: Object.freeze([]),
loading: null,
timestamp: null,
etag: null,

View File

@@ -0,0 +1,307 @@
<template>
<v-container fluid fill-height class="ma-0 pa-0">
<v-overlay :value="loading && !comparisons.length" absolute>
<v-progress-circular
indeterminate
size="64"
></v-progress-circular>
</v-overlay>
<v-overlay :value="!loading && !groupFound" absolute opacity="0.8">
<v-row justify="center">
<v-alert
type="error"
>
Group not found
</v-alert>
</v-row>
<v-row justify="center">
<v-btn color="primary" @click="refreshProjects">Retry</v-btn>
</v-row>
</v-overlay>
<v-row no-gutters align="stretch" class="fill-height">
<v-col cols="12" v-if="groupFound">
<v-data-table class="ma-1"
:headers="projectHeaders"
:items="projects"
dense
>
<template v-slot:item.baseline="{item, value, index}">
<v-simple-checkbox v-if="index+1 < projects.length"
color="primary"
:value="baseline === item"
@input="setBaseline(item)"
></v-simple-checkbox>
</template>
<template v-slot:item.monitor="{item, value, index}">
<v-simple-checkbox v-if="index > 0 && !(index <= baselineIndex)"
color="primary"
:value="monitor === item"
@input="setMonitor(item)"
></v-simple-checkbox>
</template>
<template v-slot:item.pid="{item, value}">
<v-chip label small outlined>{{ value }}</v-chip>
</template>
<template v-slot:item.fsp="{item, value}">
<span title="First production shot">{{value.tstamp.substr(0, 10)}}</span>
</template>
<template v-slot:item.lsp="{item, value}">
<span title="Last production shot">{{value.tstamp.substr(0, 10)}}</span>
</template>
<template v-slot:item.prod_duration="{item, value}">
<span v-if="value.days > 2" :title="`${value.days} d ${value.hours} h ${value.minutes} m ${(value.seconds + value.milliseconds/1000).toFixed(3)} s`">
{{ value.days }} d
</span>
<span v-else>
{{ value.days }} d {{ value.hours }} h {{ value.minutes }} m {{ (value.seconds + value.milliseconds/1000).toFixed(1) }} s
</span>
</template>
<template v-slot:item.prod_distance="{item, value}">
{{ (value/1000).toFixed(1) }} km
</template>
<template v-slot:footer.prepend>
<v-btn v-if="comparison"
text
color="primary"
title="Back to summary"
@click="clearComparison"
>Back</v-btn>
</template>
</v-data-table>
<!-- BEGIN TEST -->
<dougal-group-comparison-summary v-if="comparison"
:baseline="baseline"
:monitor="monitor"
:comparison="comparison"
></dougal-group-comparison-summary>
<dougal-group-repeatability-summary v-else-if="comparisons.length"
:comparisons="comparisons"
:projects="projects"
@input="setComparison"
></dougal-group-repeatability-summary>
<!-- END TEST -->
</v-col>
<v-col cols="12" v-else>
<v-card>
<v-card-text>
Group does not exist.
</v-card-text>
</v-card>
</v-col>
</v-row>
</v-container>
</template>
<script>
import { mapActions, mapGetters } from 'vuex'
import AccessMixin from '@/mixins/access';
import DougalGroupRepeatabilitySummary from '@/components/group-repeatability-summary.vue';
import DougalGroupComparisonSummary from '@/components/group-comparison-summary';
export default {
name: 'Group',
mixins: [
AccessMixin
],
components: {
DougalGroupRepeatabilitySummary,
DougalGroupComparisonSummary,
},
data () {
return {
projectHeaders: [
{
value: "baseline",
text: "Baseline"
},
{
value: "monitor",
text: "Monitor"
},
{
value: "pid",
text: "ID"
},
{
value: "name",
text: "Name"
},
{
value: "fsp",
text: "Start"
},
{
value: "lsp",
text: "Finish"
},
{
value: "lines",
text: "Preplot lines"
},
{
value: "seq_final",
text: "Num. of sequences"
},
{
value: "prod_duration",
text: "Duration"
},
{
value: "prod_distance",
text: "Distance"
},
],
baseline: null,
monitor: null,
comparisons: []
}
},
computed: {
groupName () {
return this.$route.params.group;
},
group () {
return this.groups.find( i => i.group === this.groupName );
},
groupFound () {
return !!(this.loading || this.group);
},
projects () {
return this.group?.projects.toSorted((a, b) => a.pid.localeCompare(b.pid));
},
baselineIndex () {
return this.projects.indexOf(this.baseline);
},
comparison () {
return this.comparisons.find( row =>
row.baseline_pid == this.baseline?.pid && row.monitor_pid == this.monitor?.pid
)?.meta;
},
...mapGetters(["loading", "groups"])
},
methods: {
setBaseline (project) {
if (project === this.baseline) {
this.baseline = null;
} else {
this.baseline = project;
if (this.monitor) {
if (this.projects.indexOf(this.monitor) <= this.projects.indexOf(this.baseline)) {
this.monitor = null;
}
}
}
},
setMonitor (project) {
if (project === this.monitor) {
this.monitor = null;
} else {
this.monitor = project;
}
},
clearComparison () {
this.baseline = null;
this.monitor = null;
},
setComparison (baseline, monitor) {
this.clearComparison();
this.setBaseline(baseline);
this.setMonitor(monitor);
},
async getComparisons () {
const url = `/comparison/group/${this.$route.params.group}`;
this.comparisons = await this.api([url]);
},
// TODO Should this go in a Vuex action rather?
async refreshComparisons () {
await this.getGroups();
if (this.groupFound) {
await this.getComparisons();
}
},
/*
async getComparison () {
if (this.baseline && this.monitor) {
const url = `/comparison/group/${this.$route.params.group}/baseline/${this.baseline.pid}/monitor/${this.monitor.pid}`;
const comparison = await this.api([url]);
if (comparison) {
this.comparison = comparison;
}
}
},
*/
handleComparisons (context, {payload}) {
this.refreshComparisons();
},
registerNotificationHandlers (action = "registerHandler") {
this.$store.dispatch(action, {
table: 'comparisons',
handler: this.handleComparisons
});
},
unregisterNotificationHandlers () {
return this.registerNotificationHandlers("unregisterHandler");
},
...mapActions(["api", "getGroups", "refreshProjects"])
},
async mounted () {
this.registerNotificationHandlers();
this.refreshComparisons()
},
beforeDestroy () {
this.unregisterNotificationHandlers();
}
}
</script>

View File

@@ -0,0 +1,396 @@
<template>
<v-container fluid>
<v-data-table
:headers="headers"
:items="displayItems"
item-key="group"
:options.sync="options"
:expanded.sync="expanded"
show-expand
:loading="loading"
>
<template v-slot:item.group="{item, value}">
<v-chip
label
small
:href="`./${value}`"
>{{ value }}</v-chip>
</template>
<template v-slot:item.shots_total="{item, value}">
<div>{{ item.prime + item.other }}</div>
<v-progress-linear
background-color="secondary"
color="primary"
:value="item.prime/(item.prime+item.other)*100"
></v-progress-linear>
</template>
<template v-slot:item.prime="{item, value}">
{{ value }}
({{ (value / (item.prime + item.other) * 100).toFixed(1) }}%)
</template>
<template v-slot:item.other="{item, value}">
{{ value }}
({{ (value / (item.prime + item.other) * 100).toFixed(1) }}%)
</template>
<template v-slot:item.prod_duration="{item, value}">
<span v-if="value.days > 2" :title="`${value.days} d ${value.hours} h ${value.minutes} m ${(value.seconds + value.milliseconds/1000).toFixed(3)} s`">
{{ value.days }} d
</span>
<span v-else>
{{ value.days }} d {{ value.hours }} h {{ value.minutes }} m {{ (value.seconds + value.milliseconds/1000).toFixed(1) }} s
</span>
</template>
<template v-slot:item.prod_distance="{item, value}">
{{ (value/1000).toFixed(1) }} km
</template>
<template v-slot:item.shooting_rate_mean="{item, value}">
{{ (value).toFixed(2) }} s ±{{ (item.shooting_rate_sd).toFixed(3) }} s
</template>
<template v-slot:item.shots_per_point="{item, value}">
<div>
{{ ((item.prime + item.other)/item.points).toFixed(1) }}
({{ ((((item.prime + item.other)/item.points) / item.num_projects)*100).toFixed(2) }}%)
</div>
<v-progress-linear
:value="((((item.prime + item.other)/item.points) / item.num_projects)*100)"
></v-progress-linear>
</template>
<template v-slot:expanded-item="{ headers, item }">
<td :colspan="headers.length">
<v-data-table class="ma-1"
:headers="projectHeaders"
:items="item.projects"
dense
hide-default-footer
>
<template v-slot:item.pid="{item, value}">
<a :href="`/projects/${value}`" title="Go to project">{{ value }}</a>
</template>
<template v-slot:item.fsp="{item, value}">
<span title="First production shot">{{value.tstamp.substr(0, 10)}}</span>
</template>
<template v-slot:item.lsp="{item, value}">
<span title="Last production shot">{{value.tstamp.substr(0, 10)}}</span>
</template>
<template v-slot:item.prod_duration="{item, value}">
<span v-if="value.days > 2" :title="`${value.days} d ${value.hours} h ${value.minutes} m ${(value.seconds + value.milliseconds/1000).toFixed(3)} s`">
{{ value.days }} d
</span>
<span v-else>
{{ value.days }} d {{ value.hours }} h {{ value.minutes }} m {{ (value.seconds + value.milliseconds/1000).toFixed(1) }} s
</span>
</template>
<template v-slot:item.prod_distance="{item, value}">
{{ (value/1000).toFixed(1) }} km
</template>
</v-data-table>
</td>
</template>
</v-data-table>
</v-container>
</template>
<style>
td p:last-of-type {
margin-bottom: 0;
}
</style>
<script>
import { mapActions, mapGetters } from 'vuex';
import AccessMixin from '@/mixins/access';
// FIXME send to lib/utils or so
/*
function duration_to_ms(v) {
if (v instanceof Object) {
return (
(v.days || 0) * 86400000 +
(v.hours || 0) * 3600000 +
(v.minutes || 0) * 60000 +
(v.seconds || 0) * 1000 +
(v.milliseconds || 0)
);
} else {
return {
days: 0,
hours: 0,
minutes: 0,
seconds: 0,
milliseconds: 0
}
}
}
function ms_to_duration(v) {
const days = Math.floor(v / 86400000);
v %= 86400000;
const hours = Math.floor(v / 3600000);
v %= 3600000;
const minutes = Math.floor(v / 60000);
v %= 60000;
const seconds = Math.floor(v / 1000);
const milliseconds = v % 1000;
return { days, hours, minutes, seconds, milliseconds };
}
function normalise_duration (v) {
return ms_to_duration(duration_to_ms(v));
}
function add_durations(a, b) {
return ms_to_duration(duration_to_ms(a) + duration_to_ms(b));
}
*/
export default {
name: "GroupList",
components: {
},
mixins: [
AccessMixin
],
data () {
return {
headers: [
{
value: "group",
text: "Group name"
},
{
value: "num_projects",
text: "Number of campaigns"
},
{
value: "lines",
text: "Preplot lines"
},
{
value: "points",
text: "Preplot points"
},
{
value: "sequences",
text: "Total sequences"
},
{
value: "shots_total",
text: "Total shots"
},
{
value: "prime",
text: "Total prime"
},
{
value: "other",
text: "Total reshoot + infill"
},
/*
{
value: "ntba",
text: "Total NTBA"
},
*/
{
value: "prod_duration",
text: "Total duration"
},
{
value: "prod_distance",
text: "Total distance"
},
{
value: "shooting_rate_mean",
text: "Shooting rate (mean)"
},
{
value: "shots_per_point",
text: "Shots per point"
},
],
items: [],
expanded: [],
options: { sortBy: ["group"], sortDesc: [false] },
projectHeaders: [
{
value: "pid",
text: "ID"
},
{
value: "name",
text: "Name"
},
{
value: "fsp",
text: "Start"
},
{
value: "lsp",
text: "Finish"
},
{
value: "lines",
text: "Preplot lines"
},
{
value: "seq_final",
text: "Num. of sequences"
},
{
value: "prod_duration",
text: "Duration"
},
{
value: "prod_distance",
text: "Distance"
},
],
// Context menu stuff
contextMenuShow: false,
contextMenuX: 0,
contextMenuY: 0,
contextMenuItem: null,
/*
// FIXME Eventually need to move this into Vuex
groups: []
*/
}
},
computed: {
displayItems () {
return this.items.filter(i => i.prod_distance);
},
...mapGetters(['loading', 'groups'])
},
methods: {
/*
async prepareGroups () {
//const groups = await this.api(["/prospects"]);
//console.log("groups", groups);
const groups = {};
for (const project of this.projects) {
if (!project.prod_distance) {
// This project has no production data (either not started yet
// or production data has not been imported) so we skip it.
continue;
}
if (!project.prod_duration.days) {
project.prod_duration = normalise_duration(project.prod_duration);
}
for (const name of project.groups) {
if (!(name in groups)) {
groups[name] = {
group: name,
num_projects: 0,
lines: 0,
points: 0,
sequences: 0,
// Shots:
prime: 0,
other: 0,
ntba: 0,
prod_duration: {
days: 0,
hours: 0,
minutes: 0,
seconds: 0,
milliseconds: 0
},
prod_distance: 0,
shooting_rate: [],
projects: []
};
}
const group = groups[name];
group.num_projects++;
group.lines = Math.max(group.lines, project.lines); // In case preplots changed
group.points = Math.max(group.points, project.total); // Idem
group.sequences += project.seq_final;
group.prime += project.prime;
group.other += project.other;
//group.ntba += project.ntba;
group.prod_duration = add_durations(group.prod_duration, project.prod_duration);
group.prod_distance += project.prod_distance;
group.shooting_rate.push(project.shooting_rate);
group.projects.push(project);
}
}
this.groups = [];
for (const group of Object.values(groups)) {
group.shooting_rate_mean = d3a.mean(group.shooting_rate);
group.shooting_rate_sd = d3a.deviation(group.shooting_rate);
delete group.shooting_rate;
this.groups.push(group);
}
},
*/
async list () {
this.items = [...this.groups];
},
async load () {
await this.refreshProjects();
//await this.prepareGroups();
await this.list();
},
registerNotificationHandlers () {
this.$store.dispatch('registerHandler', {
table: 'project`',
handler: (context, message) => {
if (message.payload?.table == "public") {
this.load();
}
}
});
},
...mapActions(["api", "showSnack", "refreshProjects"])
},
mounted () {
this.registerNotificationHandlers();
this.load();
}
}
</script>

View File

@@ -325,6 +325,12 @@
@click="labelSearch=label"
>{{label}}</v-chip>
</span>
<v-icon v-if="entry.meta.auto || entry.meta.author"
x-small
left
color="primary"
:title="entry.meta.author?`Automatic event by ${entry.meta.author}`:'Automatic event'"
>mdi-robot</v-icon>
<dougal-event-edit-history v-if="entry.has_edits && $parent.writeaccess()"
:id="entry.id"
:disabled="eventsLoading"
@@ -737,6 +743,13 @@ export default {
if (event.id) {
const id = event.id;
delete event.id;
// If this is an edit, ensure that it is *either*
// a timestamp event or a sequence + point one.
if (event.sequence && event.point && event.tstamp) {
delete event.tstamp;
}
this.putEvent(id, event, callback); // No await
} else {
this.postEvent(event, callback); // No await

View File

@@ -27,6 +27,8 @@
<v-chip v-for="group in value"
label
small
:title="`View repeatability data for ${group}`"
:href="`/groups/${group}`"
>{{ group }}</v-chip>
</template>

View File

@@ -225,16 +225,28 @@ app.map({
'changes/:since': {
get: [ mw.auth.access.read, mw.event.changes ]
},
// TODO Rename -/:sequence → sequence/:sequence
// NOTE: old alias for /sequence/:sequence
'-/:sequence/': { // NOTE: We need to avoid conflict with the next endpoint ☹
get: [ mw.auth.access.read, mw.event.sequence.get ],
},
':id/': {
'sequence/:sequence/': {
get: [ mw.auth.access.read, mw.event.sequence.get ],
},
':id(\\d+)/': {
get: [ mw.auth.access.read, mw.event.get ],
put: [ mw.auth.access.write, mw.event.put ],
patch: [ mw.auth.access.write, mw.event.patch ],
delete: [mw.auth.access.write, mw.event.delete ]
},
'import': {
put: [ mw.auth.access.write, mw.event.import.csv, mw.event.import.put ],
post: [ mw.auth.access.write, mw.event.import.csv, mw.event.import.put ],
'/:filename': {
put: [ mw.auth.access.read, mw.event.import.csv, mw.event.import.put ],
post: [ mw.auth.access.write, mw.event.import.csv, mw.event.import.put ],
delete: [ mw.auth.access.write, mw.event.import.delete ]
},
},
},
/*
@@ -345,6 +357,26 @@ app.map({
delete: [ mw.auth.operations, mw.auth.access.write, mw.info.delete ]
}
},
/*
* 4D comparisons
*/
// FIXME no authentication yet!
'/comparison/group': {
get: [ mw.etag.noSave, mw.comparisons.groups.list ],
'/:group': {
get: [ mw.etag.noSave, mw.comparisons.groups.get ],
},
},
/*
* Other endpoints
*/
'/queue/outgoing/': {
'asaqc': {
get: [ mw.etag.noSave, mw.queue.asaqc.get ],

View File

@@ -0,0 +1,20 @@
const comparisons = require('../../../../lib/comparisons');
module.exports = async function (req, res, next) {
try {
// const data = await comparisons.groups()
// if (data?.[req.params.group]) {
const data = await comparisons.getGroup(req.params.group);
if (data) {
res.status(200).send(data);
} else {
res.status(404).send({message: "Group does not exist"});
}
return next();
} catch (err) {
next(err);
}
};

View File

@@ -0,0 +1,4 @@
module.exports = {
list: require('./list'),
get: require('./get'),
}

View File

@@ -0,0 +1,18 @@
const comparisons = require('../../../../lib/comparisons');
module.exports = async function (req, res, next) {
try {
const data = await comparisons.groups()
if (data) {
res.status(200).send(data);
} else {
res.status(204).end();
}
return next();
} catch (err) {
next(err);
}
};

View File

@@ -0,0 +1,3 @@
module.exports = {
groups: require('./groups')
}

View File

@@ -66,8 +66,18 @@ const rels = [
function invalidateCache (data, cache) {
return new Promise((resolve, reject) => {
if (!data) {
ERROR("invalidateCache called with no data");
return;
}
if (!data.payload) {
ERROR("invalidateCache called without a payload; channel = %s", data.channel);
return;
}
const channel = data.channel;
const project = data.payload.pid ?? data.payload?.new?.pid ?? data.payload?.old?.pid;
const project = data.payload?.pid ?? data.payload?.new?.pid ?? data.payload?.old?.pid;
const operation = data.payload.operation;
const table = data.payload.table;
const fields = { channel, project, operation, table };

View File

@@ -0,0 +1,146 @@
const Busboy = require('busboy');
const { parse } = require('csv-parse/sync');
async function middleware(req, res, next) {
const contentType = req.headers['content-type'] || '';
let csvText = null;
let filename = null;
if (req.params.filename && contentType.startsWith('text/csv')) {
csvText = typeof req.body === 'string' ? req.body : req.body.toString('utf8');
filename = req.params.filename;
processCsv();
} else if (contentType.startsWith('multipart/form-data')) {
const busboy = Busboy({ headers: req.headers });
let found = false;
busboy.on('file', (name, file, info) => {
if (found) {
file.resume();
return;
}
if (info.mimeType === 'text/csv') {
found = true;
filename = info.filename || 'unnamed.csv';
csvText = '';
file.setEncoding('utf8');
file.on('data', (data) => { csvText += data; });
file.on('end', () => {});
} else {
file.resume();
}
});
busboy.on('field', () => {}); // Ignore fields
busboy.on('finish', () => {
if (!found) {
return next();
}
processCsv();
});
req.pipe(busboy);
return;
} else {
return next();
}
function processCsv() {
let records;
try {
records = parse(csvText, {
relax_quotes: true,
quote: '"',
escape: '"',
skip_empty_lines: true,
trim: true
});
} catch (e) {
return res.status(400).json({ error: 'Invalid CSV' });
}
if (!records.length) {
return res.status(400).json({ error: 'Empty CSV' });
}
const headers = records[0].map(h => h.toLowerCase().trim());
const rows = records.slice(1);
let lastDate = null;
let lastTime = null;
const currentDate = new Date().toISOString().slice(0, 10);
const currentTime = new Date().toISOString().slice(11, 19);
const events = [];
for (let row of rows) {
let object = { labels: [] };
for (let k = 0; k < headers.length; k++) {
let key = headers[k];
let val = row[k] ? row[k].trim() : '';
if (!key) continue;
if (['remarks', 'event', 'comment', 'comments', 'text'].includes(key)) {
object.remarks = val;
} else if (key === 'label') {
if (val) object.labels.push(val);
} else if (key === 'labels') {
if (val) object.labels.push(...val.split(';').map(l => l.trim()).filter(l => l));
} else if (key === 'sequence' || key === 'seq') {
if (val) object.sequence = Number(val);
} else if (['point', 'shot', 'shotpoint'].includes(key)) {
if (val) object.point = Number(val);
} else if (key === 'date') {
object.date = val;
} else if (key === 'time') {
object.time = val;
} else if (key === 'timestamp') {
object.timestamp = val;
} else if (key === 'latitude') {
object.latitude = parseFloat(val);
} else if (key === 'longitude') {
object.longitude = parseFloat(val);
}
}
if (!object.remarks) continue;
let useSeqPoint = Number.isFinite(object.sequence) && Number.isFinite(object.point);
let tstamp = null;
if (!useSeqPoint) {
if (object.timestamp) {
tstamp = new Date(object.timestamp);
}
if (!tstamp || isNaN(tstamp.getTime())) {
let dateStr = object.date || lastDate || currentDate;
let timeStr = object.time || lastTime || currentTime;
if (timeStr.length === 5) timeStr += ':00';
let full = `${dateStr}T${timeStr}.000Z`;
tstamp = new Date(full);
if (isNaN(tstamp.getTime())) continue;
}
if (object.date) lastDate = object.date;
if (object.time) lastTime = object.time;
}
let event = {
remarks: object.remarks,
labels: object.labels,
meta: {
author: "*CSVImport*",
"*CSVImport*": {
filename,
tstamp: new Date().toISOString()
}
}
};
if (!isNaN(object.latitude) && !isNaN(object.longitude)) {
event.meta.geometry = {
type: "Point",
coordinates: [object.longitude, object.latitude]
};
}
if (useSeqPoint) {
event.sequence = object.sequence;
event.point = object.point;
} else if (tstamp) {
event.tstamp = tstamp.toISOString();
} else {
continue;
}
events.push(event);
}
req.body = events;
next();
}
}
module.exports = middleware;

View File

@@ -0,0 +1,18 @@
const { event } = require('../../../../lib/db');
module.exports = async function (req, res, next) {
try {
if (req.params.project && req.params.filename) {
await event.unimport(req.params.project, req.params.filename, req.query);
res.status(204).end();
} else {
res.status(400).send({message: "Malformed request"});
}
next();
} catch (err) {
next(err);
}
};

View File

@@ -0,0 +1,6 @@
module.exports = {
csv: require('./csv'),
put: require('./put'),
delete: require('./delete'),
}

View File

@@ -0,0 +1,16 @@
const { event } = require('../../../../lib/db');
module.exports = async function (req, res, next) {
try {
const payload = req.body;
await event.import(req.params.project, payload, req.query);
res.status(200).send(payload);
next();
} catch (err) {
next(err);
}
};

View File

@@ -7,5 +7,6 @@ module.exports = {
put: require('./put'),
patch: require('./patch'),
delete: require('./delete'),
changes: require('./changes')
changes: require('./changes'),
import: require('./import'),
}

View File

@@ -23,4 +23,5 @@ module.exports = {
version: require('./version'),
admin: require('./admin'),
compress: require('./compress'),
comparisons: require('./comparisons'),
};

View File

@@ -16,7 +16,6 @@ module.exports = async function (req, res, next) {
if (json.length) {
const data = bundle(json, {type});
console.log("bundle", data);
res.status(200).send(Buffer.from(data));
} else {
res.status(404).send();

View File

@@ -1,4 +1,3 @@
const project = require('../../lib/db/project');
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
class DetectProjectConfigurationChange {
@@ -10,7 +9,7 @@ class DetectProjectConfigurationChange {
// Grab project configurations.
// NOTE that this will run asynchronously
this.run({channel: "project"}, ctx);
//this.run({channel: "project"}, ctx);
}
async run (data, ctx) {
@@ -28,13 +27,13 @@ class DetectProjectConfigurationChange {
try {
DEBUG("Project configuration change detected")
const projects = await project.get();
project.organisations.setCache(projects);
const projects = await ctx.db.project.get();
ctx.db.project.organisations.setCache(projects);
const _ctx_data = {};
for (let pid of projects.map(i => i.pid)) {
DEBUG("Retrieving configuration for", pid);
const cfg = await project.configuration.get(pid);
const cfg = await ctx.db.project.configuration.get(pid);
if (cfg?.archived === true) {
DEBUG(pid, "is archived. Ignoring");
continue;

View File

@@ -1,5 +1,3 @@
const { schema2pid } = require('../../lib/db/connection');
const { event } = require('../../lib/db');
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
class DetectSoftStart {
@@ -33,14 +31,19 @@ class DetectSoftStart {
const prev = this.prev?.payload?.new?.meta;
// DEBUG("%j", prev);
// DEBUG("%j", cur);
DEBUG("cur.num_guns: %d\ncur.num_active: %d\nprv.num_active: %d\ntest passed: %j", cur.num_guns, cur.num_active, prev.num_active, cur.num_active >= 1 && !prev.num_active && cur.num_active < cur.num_guns);
if (cur.lineStatus == "online" || prev.lineStatus == "online") {
DEBUG("lineStatus is online, assuming not in a soft start situation");
return;
}
DEBUG("cur.num_guns: %d\ncur.num_active: %d\nprv.num_active: %d\ncur.num_nofire: %d\nprev.num_nofire: %d", cur.num_guns, cur.num_active, prev.num_active, cur.num_nofire, prev.num_nofire);
if (cur.num_active >= 1 && !prev.num_active && cur.num_active < cur.num_guns) {
INFO("Soft start detected @", cur.tstamp);
// FIXME Shouldn't need to use schema2pid as pid already present in payload.
const projectId = await schema2pid(cur._schema ?? prev._schema);
const projectId = await ctx.schema2pid(cur._schema ?? prev._schema);
// TODO: Try and grab the corresponding comment from the configuration?
const payload = {
@@ -50,12 +53,16 @@ class DetectSoftStart {
meta: {auto: true, author: `*${this.constructor.name}*`}
};
DEBUG("Posting event", projectId, payload);
await event.post(projectId, payload);
if (ctx.dryRun) {
DEBUG(`DRY RUN: await ctx.db.event.post(${projectId}, ${payload});`);
} else {
await ctx.db.event.post(projectId, payload);
}
} else if (cur.num_active == cur.num_guns && prev.num_active < cur.num_active) {
} else if ((cur.num_active == cur.num_guns || (prev.num_nofire > 0 && cur.num_nofire == 0)) && prev.num_active < cur.num_active) {
INFO("Full volume detected @", cur.tstamp);
const projectId = await schema2pid(cur._schema ?? prev._schema);
const projectId = await ctx.schema2pid(cur._schema ?? prev._schema);
// TODO: Try and grab the corresponding comment from the configuration?
const payload = {
@@ -65,7 +72,11 @@ class DetectSoftStart {
meta: {auto: true, author: `*${this.constructor.name}*`}
};
DEBUG("Posting event", projectId, payload);
await event.post(projectId, payload);
if (ctx.dryRun) {
DEBUG(`DRY RUN: await ctx.db.event.post(${projectId}, ${payload});`);
} else {
await ctx.db.event.post(projectId, payload);
}
}
} catch (err) {

View File

@@ -1,5 +1,3 @@
const { schema2pid } = require('../../lib/db/connection');
const { event } = require('../../lib/db');
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
class DetectSOLEOL {
@@ -43,7 +41,7 @@ class DetectSOLEOL {
// We must use schema2pid because the pid may not have been
// populated for this event.
const projectId = await schema2pid(cur._schema ?? prev._schema);
const projectId = await ctx.schema2pid(cur._schema ?? prev._schema);
const labels = ["FSP", "FGSP"];
const remarks = `SEQ ${cur._sequence}, SOL ${cur.lineName}, BSP: ${(cur.speed*3.6/1.852).toFixed(1)} kt, Water depth: ${Number(cur.waterDepth).toFixed(0)} m.`;
const payload = {
@@ -55,24 +53,32 @@ class DetectSOLEOL {
meta: {auto: true, author: `*${this.constructor.name}*`}
}
INFO("Posting event", projectId, payload);
await event.post(projectId, payload);
if (ctx.dryRun) {
DEBUG(`DRY RUN: await ctx.db.event.post(${projectId}, ${payload});`);
} else {
await ctx.db.event.post(projectId, payload);
}
} else if (prev.lineName == cur.lineName && prev._sequence == cur._sequence &&
prev.lineStatus == "online" && cur.lineStatus != "online" && sequence) {
INFO("Transition to OFFLINE detected");
const projectId = await schema2pid(prev._schema ?? cur._schema);
const projectId = await ctx.schema2pid(prev._schema ?? cur._schema);
const labels = ["LSP", "LGSP"];
const remarks = `SEQ ${cur._sequence}, EOL ${cur.lineName}, BSP: ${(cur.speed*3.6/1.852).toFixed(1)} kt, Water depth: ${Number(cur.waterDepth).toFixed(0)} m.`;
const remarks = `SEQ ${prev._sequence}, EOL ${prev.lineName}, BSP: ${(prev.speed*3.6/1.852).toFixed(1)} kt, Water depth: ${Number(prev.waterDepth).toFixed(0)} m.`;
const payload = {
type: "sequence",
sequence,
point: cur._point,
point: prev._point,
remarks,
labels,
meta: {auto: true, author: `*${this.constructor.name}*`}
}
INFO("Posting event", projectId, payload);
await event.post(projectId, payload);
if (ctx.dryRun) {
DEBUG(`DRY RUN: await ctx.db.event.post(${projectId}, ${payload});`);
} else {
await ctx.db.event.post(projectId, payload);
}
}
} catch (err) {

View File

@@ -8,37 +8,6 @@ const Handlers = [
require('./detect-fdsp')
];
function init (ctx) {
const instances = Handlers.map(Handler => new Handler(ctx));
function prepare (data, ctx) {
const promises = [];
for (let instance of instances) {
const promise = new Promise(async (resolve, reject) => {
try {
DEBUG("Run", instance.author);
const result = await instance.run(data, ctx);
DEBUG("%s result: %O", instance.author, result);
resolve(result);
} catch (err) {
ERROR("%s error:\n%O", instance.author, err);
reject(err);
}
});
promises.push(promise);
}
return promises;
}
function despatch (data, ctx) {
return Promise.allSettled(prepare(data, ctx));
}
return { instances, prepare, despatch };
}
module.exports = {
Handlers,
init
};

View File

@@ -1,6 +1,3 @@
const { event, project } = require('../../lib/db');
const { withinValidity } = require('../../lib/utils/ranges');
const unique = require('../../lib/utils/unique');
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
class ReportLineChangeTime {
@@ -44,7 +41,7 @@ class ReportLineChangeTime {
async function getLineChangeTime (data, forward = false) {
if (forward) {
const ospEvents = await event.list(projectId, {label: "FGSP"});
const ospEvents = await ctx.db.event.list(projectId, {label: "FGSP"});
// DEBUG("ospEvents", ospEvents);
const osp = ospEvents.filter(i => i.tstamp > data.tstamp).pop();
DEBUG("fsp", osp);
@@ -55,7 +52,7 @@ class ReportLineChangeTime {
return { lineChangeTime: osp.tstamp - data.tstamp, osp };
}
} else {
const ospEvents = await event.list(projectId, {label: "LGSP"});
const ospEvents = await ctx.db.event.list(projectId, {label: "LGSP"});
// DEBUG("ospEvents", ospEvents);
const osp = ospEvents.filter(i => i.tstamp < data.tstamp).shift();
DEBUG("lsp", osp);
@@ -96,16 +93,20 @@ class ReportLineChangeTime {
const opts = {jpq};
if (Array.isArray(seq)) {
opts.sequences = unique(seq).filter(i => !!i);
opts.sequences = ctx.unique(seq).filter(i => !!i);
} else {
opts.sequence = seq;
}
const staleEvents = await event.list(projectId, opts);
const staleEvents = await ctx.db.event.list(projectId, opts);
DEBUG(staleEvents.length ?? 0, "events to delete");
for (let staleEvent of staleEvents) {
DEBUG(`Deleting event id ${staleEvent.id} (seq = ${staleEvent.sequence}, point = ${staleEvent.point})`);
await event.del(projectId, staleEvent.id);
if (ctx.dryRun) {
DEBUG(`await ctx.db.event.del(${projectId}, ${staleEvent.id});`);
} else {
await ctx.db.event.del(projectId, staleEvent.id);
}
}
}
}
@@ -180,7 +181,11 @@ class ReportLineChangeTime {
const maybePostEvent = async (projectId, payload) => {
DEBUG("Posting event", projectId, payload);
await event.post(projectId, payload);
if (ctx.dryRun) {
DEBUG(`await ctx.db.event.post(${projectId}, ${payload});`);
} else {
await ctx.db.event.post(projectId, payload);
}
}
@@ -192,7 +197,7 @@ class ReportLineChangeTime {
const data = n;
DEBUG("INSERT seen: will add lct events related to ", data.id);
if (withinValidity(data.validity)) {
if (ctx.withinValidity(data.validity)) {
DEBUG("Event within validity period", data.validity, new Date());
data.tstamp = new Date(data.tstamp);

View File

@@ -1,29 +1,101 @@
const nodeAsync = require('async'); // npm install async
const { listen } = require('../lib/db/notify');
const db = require('../lib/db'); // Adjust paths; include all needed DB utils
const { schema2pid } = require('../lib/db/connection');
const unique = require('../lib/utils/unique'); // If needed by handlers
const withinValidity = require('../lib/utils/ranges').withinValidity; // If needed
const { ALERT, ERROR, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
// List of handler classes (add more as needed)
const handlerClasses = require('./handlers').Handlers;
// Channels to listen to (hardcoded for simplicity; could scan handlers for mentions)
const channels = require('../lib/db/channels');
const handlers = require('./handlers');
const { ActionsQueue } = require('../lib/queue');
const { ERROR, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
function start () {
// Queue config: Process one at a time for order; max retries=3
const eventQueue = nodeAsync.queue(async (task, callback) => {
const { data, ctx } = task;
DEBUG(`Processing event on channel ${data.channel} with timestamp ${data._received ?? 'unknown'}`);
const queue = new ActionsQueue();
const ctx = {}; // Context object
for (const handler of ctx.handlers) {
try {
await handler.run(data, ctx);
} catch (err) {
ERROR(`Error in handler ${handler.constructor.name}:`, err);
// Retry logic: Could add task.retries++, re-enqueue if < max
}
}
const { prepare, despatch } = handlers.init(ctx);
if (typeof callback === 'function') {
// async v3.2.6+ does not use callsbacks with AsyncFunctions, but anyway
callback();
}
}, 1); // Concurrency=1 for strict order
listen(channels, function (data) {
DEBUG("Incoming data", data);
eventQueue.error((err, task) => {
ALERT(`Queue error processing task:`, err, task);
});
// We don't bother awaiting
queue.enqueue(() => despatch(data, ctx));
DEBUG("Queue size", queue.length());
// Main setup function (call from server init)
async function setupEventHandlers(projectsConfig) {
// Shared context
const ctx = {
dryRun: Boolean(process.env.DOUGAL_HANDLERS_DRY_RUN) ?? false, // If true, don't commit changes
projects: { configuration: projectsConfig }, // From user config
handlers: handlerClasses.map(Cls => new Cls()), // Instances
// DB utils (add more as needed)
db,
schema2pid,
unique,
withinValidity
// Add other utils, e.g., ctx.logger = DEBUG;
};
// Optional: Replay recent events on startup to rebuild state
// await replayRecentEvents(ctx);
// Setup listener
const subscriber = await listen(channels, (rawData) => {
const data = {
...rawData,
enqueuedAt: new Date() // For monitoring
};
eventQueue.push({ data, ctx });
});
INFO("Events manager started");
DEBUG('Event handler system initialized with channels:', channels);
if (ctx.dryRun) {
DEBUG('DRY RUNNING');
}
// Return for cleanup if needed
return {
close: () => {
subscriber.events.removeAllListeners();
subscriber.close();
eventQueue.kill();
}
};
}
module.exports = { start }
// Optional: Replay last N events to rebuild handler state (e.g., this.prev)
// async function replayRecentEvents(ctx) {
// try {
// // Example: Fetch last 10 realtime events, sorted by tstamp
// const recentRealtime = await event.listAllProjects({ channel: 'realtime', limit: 10, sort: 'tstamp DESC' });
// // Assume event.listAllProjects is a custom DB method; implement if needed
//
// // Enqueue in original order (reverse sort)
// recentRealtime.reverse().forEach((evt) => {
// const data = { channel: 'realtime', payload: { new: evt } };
// eventQueue.push({ data, ctx });
// });
//
// // Similarly for 'event' channel if needed
// DEBUG('Replayed recent events for state rebuild');
// } catch (err) {
// ERROR('Error replaying events:', err);
// }
// }
if (require.main === module) {
start();
}
module.exports = { setupEventHandlers };

View File

@@ -2,18 +2,37 @@
const { ERROR, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
async function getProjectConfigurations (opts = {}) {
const includeArchived = {includeArchived: false, ...opts};
let projectConfigurations = {};
try {
const db = require('./lib/db');
const pids = (await db.project.get())
.filter(i => includeArchived || !i.archived)
.map(i => i.pid);
for (const pid of pids) {
DEBUG(`Reading project configuration for ${pid}`);
const cfg = await db.project.configuration.get(pid);
projectConfigurations[pid] = cfg;
}
} catch (err) {
ERROR("Failed to get project configurations");
ERROR(err);
}
return projectConfigurations;
}
async function main () {
// Check that we're running against the correct database version
const version = require('./lib/version');
INFO("Running version", await version.describe());
version.compatible()
.then( (versions) => {
.then( async (versions) => {
try {
const api = require('./api');
const ws = require('./ws');
const periodicTasks = require('./periodic-tasks').init();
const { fork } = require('child_process');
const { setupEventHandlers } = require('./events');
const port = process.env.HTTP_PORT || 3000;
const host = process.env.HTTP_HOST || "127.0.0.1";
@@ -25,33 +44,31 @@ async function main () {
periodicTasks.start();
const eventManagerPath = [__dirname, "events"].join("/");
const eventManager = fork(eventManagerPath, /*{ stdio: 'ignore' }*/);
const projectConfigurations = await getProjectConfigurations();
const handlerSystem = await setupEventHandlers(projectConfigurations);
process.on("SIGINT", async () => {
DEBUG("Interrupted (SIGINT)");
eventManager.kill()
handlerSystem.close();
await periodicTasks.cleanup();
process.exit(0);
})
process.on("SIGHUP", async () => {
DEBUG("Stopping (SIGHUP)");
eventManager.kill()
handlerSystem.close();
await periodicTasks.cleanup();
process.exit(0);
})
process.on('beforeExit', async () => {
DEBUG("Preparing to exit");
eventManager.kill()
handlerSystem.close();
await periodicTasks.cleanup();
});
process.on('exit', async () => {
DEBUG("Exiting");
// eventManager.kill()
// periodicTasks.cleanup();
});
} catch (err) {
ERROR(err);

View File

@@ -8,8 +8,6 @@ function bundle (json, opts = {}) {
const deltas = [];
const values = [];
// console.log("JSON LENGTH", json.length);
// console.log("OPTS", geometries, payload);
if (type == 0) {
/* Preplot information sail line points
@@ -40,7 +38,7 @@ function bundle (json, opts = {}) {
return encode.sequential(json, el => el.sailline, el => el.point, deltas, values, type)
} if (type == 1) {
} else if (type == 1) {
/* Preplot information source line points
*
* elem 0: Float32Array Longitude
@@ -74,7 +72,6 @@ function bundle (json, opts = {}) {
type: Uint16Array
});
console.log("JSON", json[0]);
return encode.sequential(json, el => el.line, el => el.point, deltas, values, type)
} else if (type == 2) {
@@ -222,9 +219,6 @@ function bundle (json, opts = {}) {
type: Uint8Array
});
console.log("DELTAS", deltas);
console.log("VALUES", values);
return encode.sequential(json, el => el.sequence, el => el.point, deltas, values, type)
} else if (type == 3) {
/* Final positions and raw vs final errors:
@@ -279,6 +273,88 @@ function bundle (json, opts = {}) {
});
return encode.sequential(json, el => el.sequence, el => el.point, deltas, values, type)
} else if (type == 0xa) {
/* 4D comparison data:
*
* elem0: i differences
* elem1: j differences
*
* Note that line/point may not be unique.
*
*/
/*
deltas.push({
key: el => el.baseTStamp,
baseType: BigUint64Array,
incrType: Int32Array
});
deltas.push({
key: el => el.monTStamp,
baseType: BigUint64Array,
incrType: Int32Array
})
*/
values.push({
key: el => el[2],
type: Float32Array
});
values.push({
key: el => el[3],
type: Float32Array
});
/*
values.push({
key: el => el.baseSeq,
type: Uint16Array
});
values.push({
key: el => el.monSeq,
type: Uint16Array
});
*/
return encode.sequential(json, el => el[0], el => el[1], deltas, values, type)
} else if (type == 0xc) {
/* 4D comparison data (reduced sample)
*
* Input is comparison records, i.e.:
* [ [ line, point, δi, δj ], … ]
*
* elem0: line
* elem1: point
* elem2: δi
* elem3: δj
*
* Note that the chunk's `i` and `j` values are not used
*/
values.push({
key: el => el[0],
type: Uint16Array
});
values.push({
key: el => el[1],
type: Uint16Array
});
values.push({
key: el => el[2],
type: Float32Array
});
values.push({
key: el => el[3],
type: Float32Array
});
return encode.sequential(json, el => 0, el => 0, deltas, values, type)
}
}

View File

@@ -0,0 +1,578 @@
const d3a = require('d3-array');
const { DougalBinaryBundle } = require('@dougal/binary');
const { pool, setSurvey } = require('../db/connection');
const db = require('../db');
const { bundle } = require('../binary/bundle');
const setops = require('../utils/setops');
const { ijRMS, combinations, computeSample } = require('./utils');
const { computePCA } = require('./pca');
const { ERROR, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
async function fetchErrors (pid) {
const client = await setSurvey(pid);
try {
const text = `
SELECT
fs.line, fs.point,
ij_error(fs.line::double precision, fs.point::double precision, fs.geometry)::json AS errorfinal
FROM
final_shots fs
ORDER BY fs.line, fs.point;
`;
const res = await client.query(text);
return res.rows.map( row =>
[row.line, row.point, row.errorfinal.coordinates[0], row.errorfinal.coordinates[1]]
);
} catch (err) {
console.error(err);
} finally {
client.release();
}
}
async function groups () {
const projects = await db.project.get();
const groupNames = [
...projects
.reduce( (acc, cur) => acc.add(...cur.groups), new Set() )
].filter( i => !!i );
return Object.fromEntries(groupNames.map( g => [g, projects.filter( p => p.groups.includes(g) )] ));
}
/*
async function compare (baselineProjectID, monitorProjectID) {
console.log("Getting baseline", baselineProjectID);
const baselineData = await db.sequence.get(baselineProjectID);
console.log("Getting monitor", monitorProjectID);
const monitorData = await db.sequence.get(monitorProjectID);
console.log("Comparing");
const comparison = comparisonGeometricDifferences(baselineData, monitorData);
return comparison;
}
*/
function geometric_differences (baseline, monitor) {
if (!baseline || !baseline.length) {
throw new Error("No baseline data");
}
if (!monitor || !monitor.length) {
throw new Error("No monitor data");
}
const comparison = []; // An array of { line, point, εi, εj }; line + point may be repeated
for (const bp of baseline) {
const monitor_points = monitor.filter( mp => mp[0] === bp[0] && mp[1] === bp[1] );
if (!monitor_points.length) {
// console.log(`No match for L${bp[0]} P${bp[1]}`);
continue;
}
for (const mp of monitor_points) {
const εi = mp[2] - bp[2], εj = mp[3] - bp[3];
comparison.push([bp[0], bp[1], εi, εj]);
}
}
return comparison;
}
async function compare (baselineProjectID, monitorProjectID, infoObj) {
console.log("Getting baseline", baselineProjectID);
const baselineData = await fetchErrors(baselineProjectID);
console.log("Getting monitor", monitorProjectID);
const monitorData = await fetchErrors(monitorProjectID);
console.log("Comparing");
const comparison = geometric_differences(baselineData, monitorData);
if (infoObj instanceof Object) {
const baselineIJ = baselineData.map(i => i.slice(0,2));
const monitorIJ = monitorData.map(i => i.slice(0,2));
infoObj.compared = comparison.length;
infoObj.baselineLength = baselineData.length;
infoObj.monitorLength = monitorData.length;
infoObj.baselineUniqueLength = setops.unique(baselineIJ).length;
infoObj.monitorUniqueLength = setops.unique(monitorIJ).length;
infoObj.common = setops.intersection(baselineIJ, monitorIJ).length;
}
return comparison;
}
async function save (baselineProjectID, monitorProjectID, bundle, meta) {
const info = {};
if (!bundle) {
const comparison = await compare(baselineProjectID, monitorProjectID, info);
if (comparison.length) {
bundle = asBundle(comparison);
} else {
console.warn(`No matching points between ${baselineProjectID} and ${monitorProjectID}`);
return;
}
} else if (!(bundle instanceof DougalBinaryBundle)) {
throw new Error("Illegal data: `bundle` must of null or of type DougalBinaryBundle");
}
if (!bundle.byteLength) {
console.warn(`Empty comparison results between ${baselineProjectID} and ${monitorProjectID}. Refusing to store`);
return;
}
meta = {tstamp: (new Date()), ...info, ...stats(bundle), ...meta};
console.log("Storing in database");
const client = await pool.connect();
try {
const text = `
INSERT INTO comparisons.comparisons
(type, baseline_pid, monitor_pid, data, meta)
VALUES ('geometric_difference', $1, $2, $3, $4)
ON CONFLICT (type, baseline_pid, monitor_pid)
DO UPDATE SET
data = EXCLUDED.data,
meta = EXCLUDED.meta;
`;
const values = [ baselineProjectID, monitorProjectID, Buffer.from(bundle), meta ];
const res = await client.query(text, values);
return res.rowCount;
} catch (err) {
console.error(err);
} finally {
client.release();
}
}
async function saveSample (baselineProjectID, monitorProjectID, opts = {}) {
DEBUG("Not bothering to save samples. This feature will be removed.");
}
/*
async function saveSample (baselineProjectID, monitorProjectID, opts = {}) {
let sample = opts.sample;
let populationStats = opts.populationStats;
let sampleStats = opts.sampleStats;
if (!sample?.length) {
const sampleSize = opts.sampleSize ?? 2000;
const record = await get(baselineProjectID, monitorProjectID);
let data;
if (record) {
data = record.data;
} else {
console.log("Full data not found in database");
data = asBundle(await compare(baselineProjectID, monitorProjectID));
}
sample = computeSample(data, opts);
if (!populationStats) {
populationStats = stats(data);
}
}
const bundle = asBundle(sample, {type: 0x0c});
if (!sampleStats) {
sampleStats = stats(bundle);
}
meta = {tstamp: (new Date()), populationStats, sampleStats, ...(opts.meta??{})};
const client = await pool.connect();
try {
const text = `
INSERT INTO comparisons.comparisons
(type, baseline_pid, monitor_pid, data, meta)
VALUES ('geometric_difference_sample', $1, $2, $3, $4)
ON CONFLICT (type, baseline_pid, monitor_pid)
DO UPDATE SET
data = EXCLUDED.data,
meta = EXCLUDED.meta;
`;
const values = [ baselineProjectID, monitorProjectID, Buffer.from(bundle), meta ];
const res = await client.query(text, values);
return res.rowCount;
} catch (err) {
console.error(err);
} finally {
client.release();
}
}
*/
async function get (baselineProjectID, monitorProjectID, type = 'geometric_difference') {
const client = await pool.connect();
try {
const text = `
SELECT data, meta
FROM comparisons.comparisons
WHERE type = $3 AND baseline_pid = $1 AND monitor_pid = $2;
`;
const values = [ baselineProjectID, monitorProjectID, type ];
const res = await client.query(text, values);
if (!res.rows.length) {
console.log("Comparison not found in database");
return;
}
const { data, meta } = res.rows[0];
return {
data: DougalBinaryBundle.clone(data),
meta
};
} catch (err) {
console.error(err);
} finally {
client.release();
}
}
async function getSample (baselineProjectID, monitorProjectID) {
return await get(baselineProjectID, monitorProjectID, 'geometric_difference_sample');
}
async function remove (baselineProjectID, monitorProjectID) {
const client = await pool.connect();
try {
const text = `
DELETE
FROM comparisons.comparisons
WHERE
(type = 'geometric_difference' OR type = 'geometric_difference_sample')
AND baseline_pid = $1
AND monitor_pid = $2;
`;
const values = [ baselineProjectID, monitorProjectID ];
const res = await client.query(text, values);
return res.rowCount;
} catch (err) {
console.error(err);
} finally {
client.release();
}
}
function stats (comparison) {
let i, j, δi, δj;
if (comparison instanceof DougalBinaryBundle) {
console.log("Computing stats");
const udv = comparison.chunks()[0]?.udv;
if (!udv) {
console.error("Could not determine udv from first chunk");
console.log(comparison.chunks());
return;
}
let records;
if (udv == 0xa) {
records = comparison.records;
// Transpose the records
[ i, j, δi, δj ] = Array.from({ length: 4 }, (_, i) => records.map(row => row[i]));
} else if (udv == 0xc) {
records = comparison.records;
let _;
[ _, _, i, j, δi, δj ] = Array.from({ length: 6 }, (_, i) => records.map(row => row[i]));
} else {
throw new Error(`Unrecognised DougalBinaryBundle User Defined Value: ${udv}`);
}
return {
length: records.length,
μ: [ d3a.mean(δi), d3a.mean(δj) ],
σ: [ d3a.deviation(δi), d3a.deviation(δj) ],
//rms: ijRMS(δi, δj),
...computePCA(records)
}
} else if (Array.isArray(comparison)) {
if (Array.isArray(comparison[0])) {
return stats(asBundle(comparison, {type: 0xc}));
} else {
// Assume object
return stats(asBundle(comparison));
}
}
}
/** Compare two projects' errorfinal quantities.
*
* Assumes that the preplots are the same.
* It is not a terribly efficient way of doing it, but considering
* that this is, by and large only going to be done once every few
* hours for an active prospect, and never for inactive ones, I
* think and hope we can live with that.
*
* `baseline` and `monitor` are the result of calling
* db.sequence.get(projectId) on each of the respective
* projects.
*/
/*
function comparisonGeometricDifferences (baseline, monitor) {
if (!baseline || !baseline.length) {
throw new Error("No baseline data");
}
if (!monitor || !monitor.length) {
throw new Error("No monitor data");
}
const comparison = []; // An array of { line, point, εi, εj, δts }; line + point may be repeated
for (const bp of baseline) {
if (!bp.errorfinal) {
console.log(`No final data for baseline point L${bp.line} S${bp.sequence} P${bp.point}`);
continue;
}
const monitor_points = monitor.filter( mp => mp.line === bp.line && mp.point === bp.point );
for (const mp of monitor_points) {
if (!mp.errorfinal) {
console.log(`No final data for monitor point L${mp.line} S${mp.sequence} P${mp.point}`);
continue;
}
const line = bp.line;
const point = bp.point;
const baseSeq = bp.sequence;
const monSeq = mp.sequence;
const baseTStamp = bp.tstamp;
const monTStamp = mp.tstamp;
const δi = bp.errorfinal.coordinates[0] - mp.errorfinal.coordinates[0];
const δj = bp.errorfinal.coordinates[1] - mp.errorfinal.coordinates[1];
const obj = {line, point, baseSeq, monSeq, baseTStamp, monTStamp, δi, δj};
comparison.push(obj);
// console.log(obj);
}
}
return comparison.sort(sortFn);
}
function sortComparison (comparison) {
return comparison.sort( (a, b) => {
if (a.line == b.line) {
if (a.point == b.point) {
return a.baseTStamp - b.baseTStamp;
} else {
return a.point - b.point;
}
} else {
return a.line - b.line;
}
})
}
*/
function sortFn (a, b) {
if (a.line == b.line) {
if (a.point == b.point) {
return a.baseTStamp - b.baseTStamp;
} else {
return a.point - b.point;
}
} else {
return a.line - b.line;
}
}
function asBundle (comparison, opts = {type: 0x0a}) {
return DougalBinaryBundle.clone(bundle(comparison, opts));
}
function fromBundle (bundle) {
if (!(bundle instanceof DougalBinaryBundle)) {
bundle = DougalBinaryBundle.clone(bundle);
}
const json = [];
for (const record of bundle) {
record.shift();
json.push(record);
}
return json;
}
async function saveGroup (group, opts = {}) {
if (group == null) {
// Save everything
const g = await groups();
for (const group of Object.values(g)) {
await saveGroup(group)
}
} if (typeof group === "string") {
// This is a group name
const g = await groups();
group = groups[g];
}
if (Array.isArray(group)) {
const pids = group.map( i => i.pid ).sort();
for (const [ baselineProjectID, monitorProjectID ] of combinations(pids, 2)) {
try {
if (!opts.overwrite) {
const exists = await get(baselineProjectID, monitorProjectID);
if (exists) {
DEBUG("Not overwriting existing comparison between %s and %s. Skipping", baselineProjectID, monitorProjectID);
continue;
}
}
const isSaved = await save(baselineProjectID, monitorProjectID);
if (isSaved) {
await saveSample(baselineProjectID, monitorProjectID, opts.sampleOpts);
} else {
await remove(baselineProjectID, monitorProjectID);
}
DEBUG("Saved comparison between %s and %s", baselineProjectID, monitorProjectID);
} catch (err) {
console.error(err);
ERROR("Error saving comparison between %s and %s", baselineProjectID, monitorProjectID);
}
}
}
}
/*
async function getGroup (groupName, opts = {}) {
const group = (await groups())?.[groupName]?.map( i => i.pid)?.sort();
if (!group?.length) return;
const client = await pool.connect();
try {
const text = `
-- SQL query goes here
`;
const values = combinations(group, 2);
const res = await client.query(text, values);
if (!res.rows.length) {
console.log("Comparison not found in database");
return;
}
if (opts.returnData) {
return res.rows.map( row => ({
data: DougalBinaryBundle.clone(row.data),
meta: row.meta
});
} else {
return res.rows.map( row => row.meta );
}
} catch (err) {
console.error(err);
} finally {
client.release();
}
}
*/
async function getGroup (groupName, opts = {}) {
const group = (await groups())?.[groupName]?.map( i => i.pid)?.sort();
if (!group?.length) return;
const client = await pool.connect();
try {
const pairs = combinations(group, 2);
const flatValues = pairs.flat();
const placeholders = [];
for (let i = 0; i < pairs.length; i++) {
placeholders.push(`($${i * 2 + 1}, $${i * 2 + 2})`);
}
const inClause = placeholders.join(',');
const selectFields = opts.returnData ? 'data, meta' : 'meta';
const text = `
SELECT baseline_pid, monitor_pid, ${selectFields}
FROM comparisons.comparisons
WHERE type = 'geometric_difference'
AND (baseline_pid, monitor_pid) IN (VALUES ${inClause})
ORDER BY baseline_pid, monitor_pid
`;
console.log(text);
console.log(flatValues);
const res = await client.query(text, flatValues);
if (!res.rows.length) {
console.log("Comparison not found in database");
return;
}
if (opts.returnData) {
return res.rows.map( row => ({
...row,
data: DougalBinaryBundle.clone(row.data),
}));
} else {
return res.rows;
}
} catch (err) {
console.error(err);
} finally {
client.release();
}
}
module.exports = {
groups,
fetchErrors,
compare,
computeSample,
get,
save,
getSample,
saveSample,
saveGroup,
getGroup,
remove,
stats,
// comparisonGeometricDifferences,
asBundle,
fromBundle
};

View File

@@ -0,0 +1,4 @@
module.exports = {
...require('./geometric-differences')
}

View File

@@ -0,0 +1,83 @@
const math = require('mathjs');
/**
* Compute PCA (eigenvectors and eigenvalues) for deviation data to assess geometric repeatability.
* @param {Array<Array<number>>} deviations - Array of [point, line, i_deviation, j_deviation]
* @returns {Object} - { eigenvalues, eigenvectors, rms, anisotropy, primaryDirection }
*/
function computePCA(deviations) {
// Extract i_deviation and j_deviation
const deviationMatrix = deviations.map(row => [row[2], row[3]]);
// Convert to mathjs matrix
const D = math.matrix(deviationMatrix);
// Compute mean for centering (1 x 2 matrix)
const mean = math.mean(D, 0);
// Manually repeat-mean to match D's shape (n x 2)
const n = deviationMatrix.length;
const meanArr = mean.toArray();
const meanRepeated = math.matrix(
Array(n).fill().map(() => [meanArr[0], meanArr[1]])
);
// Center the data
const centered = math.subtract(D, meanRepeated);
// Compute covariance matrix: (1/(n-1)) * (D_centered^T * D_centered)
const covMatrix = math.multiply(
math.multiply(1 / (n - 1), math.transpose(centered)),
centered
);
// Perform eigen decomposition
const result = math.eigs(covMatrix);
let eigenvalues = result.values;
const evObjs = result.eigenvectors;
// Convert eigenvalues to array if it's a matrix
eigenvalues = Array.isArray(eigenvalues) ? eigenvalues : eigenvalues.toArray();
// Create pairs and convert vector to array if necessary
const pairs = eigenvalues.map((val, i) => {
let vec = evObjs[i].vector;
if (vec.toArray) vec = vec.toArray();
return { val, vec };
});
// Sort by descending eigenvalues
pairs.sort((a, b) => b.val - a.val);
// Sorted eigenvalues
const sortedEigenvalues = pairs.map(p => p.val);
// Build eigenvector matrix: rows as components, columns as eigenvectors
const dimension = pairs[0].vec.length; // e.g., 2
const evecRows = [];
for (let comp = 0; comp < dimension; comp++) {
evecRows.push(pairs.map(p => p.vec[comp]));
}
const sortedEigenvectors = math.matrix(evecRows);
// Compute RMS errors along principal axes
const rms = sortedEigenvalues.map(val => Math.sqrt(Math.max(val, 0)));
// Compute anisotropy (ratio of major to minor axis variance)
const anisotropy = sortedEigenvalues[0] / (sortedEigenvalues[1] || 1); // Avoid division by zero
// Primary direction (angle in degrees of major eigenvector)
const primaryVector = sortedEigenvectors.subset(math.index([0, 1], 0)).toArray();
const primaryDirection = Math.atan2(primaryVector[1], primaryVector[0]) * 180 / Math.PI;
return {
eigenvalues: sortedEigenvalues,
eigenvectors: sortedEigenvectors.toArray(),
rms: rms, // RMS errors along major/minor axes
anisotropy: anisotropy, // Ratio of variances
primaryDirection: primaryDirection // Angle of major axis (degrees)
};
}
module.exports = { computePCA };

View File

@@ -0,0 +1,310 @@
const d3 = require('d3-array');
// Function to calculate the root mean square (RMS) of position deviations
// This computes the RMS of the Euclidean distances: sqrt( (1/n) * sum(δi² + δj²) )
// Assumes deviations are already centered (mean deviation ~0); if normalization by std dev or range is needed, adjust accordingly
function ijRMS(δi, δj) {
if (!δi.length || !δj.length) return 0;
if (δi.length != δj.length) {
console.warn(`δi and δj have different lengths!`);
}
let sumSquares = 0;
const n = Math.min(δi.length, δj.length);
for (let i=0; i < n; i++) {
sumSquares += (δi[i] * δi[i]) + (δj[i] * δj[i]);
}
const meanSquare = sumSquares / n;
const rms = Math.sqrt(meanSquare);
return rms;
}
/**
* Performs stratified sampling on an array of [line, point, δi, δj] data points.
* Groups by line and samples proportionally to preserve shape and spread.
*
* @param {Array<Array<number>>} data - Input data: [[line, point, δi, δj], ...]
* @param {number} sampleSize - Target number of samples (e.g., 2000)
* @returns {Array<Array<number>>} Sampled data in same format
*/
function old_stratifiedSample(data, sampleSize) {
if (!Array.isArray(data) || data.length === 0) return [];
if (!Number.isInteger(sampleSize) || sampleSize <= 0) {
throw new Error('sampleSize must be a positive integer');
}
// Group data by line (first element)
const grouped = d3.group(data, d => d[0]);
const totalSize = data.length;
const sampled = [];
// Ensure sampleSize doesn't exceed data size
const effectiveSampleSize = Math.min(sampleSize, totalSize);
// Iterate over each line group
for (const [line, group] of grouped) {
// Calculate proportional sample size for this group
const groupSize = group.length;
const groupSampleSize = Math.max(1, Math.round((groupSize / totalSize) * effectiveSampleSize));
// Shuffle group and take first N elements
const shuffled = d3.shuffle([...group]);
sampled.push(...shuffled.slice(0, groupSampleSize));
}
// If sampled size is slightly off due to rounding, adjust
if (sampled.length > effectiveSampleSize) {
return d3.shuffle(sampled).slice(0, effectiveSampleSize);
} else if (sampled.length < effectiveSampleSize) {
// Pad with random samples from entire dataset if needed
const remaining = effectiveSampleSize - sampled.length;
const additional = d3.shuffle(data.filter(d => !sampled.includes(d))).slice(0, remaining);
sampled.push(...additional);
}
return sampled;
}
/**
* Performs stratified sampling on an array of [line, point, δi, δj] data points.
* Stratifies by line and δi quantiles to preserve shape and spread, with outlier control.
*
* @param {Array<Array<number>>} data - Input data: [[line, point, δi, δj], ...]
* @param {number} sampleSize - Target number of samples (e.g., 2000)
* @param {number} [binsPerLine=10] - Number of δi quantile bins per line
* @returns {Array<Array<number>>} Sampled data in same format
*/
function stratifiedSample(data, sampleSize, binsPerLine = 10) {
if (!Array.isArray(data) || data.length === 0) return [];
if (!Number.isInteger(sampleSize) || sampleSize <= 0) {
throw new Error('sampleSize must be a positive integer');
}
if (!Number.isInteger(binsPerLine) || binsPerLine <= 0) {
throw new Error('binsPerLine must be a positive integer');
}
const totalSize = data.length;
const effectiveSampleSize = Math.min(sampleSize, totalSize);
const sampled = [];
// Group by line
const groupedByLine = d3.group(data, d => d[0]);
// Compute population stats for validation
const populationStats = computeStats(data);
// Iterate over each line
for (const [line, group] of groupedByLine) {
const groupSize = group.length;
const lineSampleSize = Math.max(1, Math.round((groupSize / totalSize) * effectiveSampleSize));
// Create quantile-based bins for δi
const δiValues = group.map(d => d[2]).sort(d3.ascending);
const quantiles = d3.range(0, binsPerLine + 1).map(i => d3.quantile(δiValues, i / binsPerLine));
const binnedData = group.map(d => {
const δi = d[2];
let binIndex = 0;
for (let i = 0; i < binsPerLine; i++) {
if (δi >= quantiles[i] && δi < quantiles[i + 1]) {
binIndex = i;
break;
}
}
return { data: d, bin: binIndex };
});
const groupedByBin = d3.group(binnedData, d => d.bin);
// Allocate samples across bins, inversely weighted by density to control outliers
const binSampleSizes = new Map();
let remainingLineSamples = lineSampleSize;
const binCounts = Array(binsPerLine).fill(0);
for (const [bin, binGroup] of groupedByBin) {
binCounts[bin] = binGroup.length;
}
const maxBinCount = d3.max(binCounts);
for (const [bin, binGroup] of groupedByBin) {
const binSize = binGroup.length;
// Inverse weighting: smaller bins (outliers) get fewer samples
const weight = binSize > 0 ? Math.max(0.1, 1 - (binSize / maxBinCount) * 0.5) : 1;
const binSampleSize = Math.max(1, Math.round(lineSampleSize * (binSize / groupSize) * weight));
binSampleSizes.set(bin, Math.min(binSampleSize, binSize));
remainingLineSamples -= binSampleSizes.get(bin);
}
// Distribute remaining samples
if (remainingLineSamples > 0) {
const nonEmptyBins = Array.from(groupedByBin.keys());
for (let i = 0; i < remainingLineSamples && nonEmptyBins.length > 0; i++) {
const bin = nonEmptyBins[i % nonEmptyBins.length];
binSampleSizes.set(bin, binSampleSizes.get(bin) + 1);
}
}
// Sample from each bin
for (const [bin, binGroup] of groupedByBin) {
const samples = d3.shuffle([...binGroup]).slice(0, binSampleSizes.get(bin)).map(s => s.data);
sampled.push(...samples);
}
}
// Adjust sample size
let finalSample = sampled;
if (sampled.length > effectiveSampleSize) {
finalSample = d3.shuffle(sampled).slice(0, effectiveSampleSize);
} else if (sampled.length < effectiveSampleSize) {
const remaining = effectiveSampleSize - sampled.length;
const additional = d3.shuffle(data.filter(d => !sampled.includes(d))).slice(0, remaining);
finalSample = [...sampled, ...additional];
}
// Validate and adjust if stats are off
const sampleStats = computeStats(finalSample);
const statTolerance = { μ: 0.1, σ: 0.2 }; // Allowable relative deviation
const needsAdjustment =
Math.abs(sampleStats.μ[0] - populationStats.μ[0]) / populationStats.μ[0] > statTolerance.μ ||
Math.abs(sampleStats.μ[1] - populationStats.μ[1]) / populationStats.μ[1] > statTolerance.μ ||
Math.abs(sampleStats.σ[0] - populationStats.σ[0]) / populationStats.σ[0] > statTolerance.σ ||
Math.abs(sampleStats.σ[1] - populationStats.σ[1]) / populationStats.σ[1] > statTolerance.σ;
if (needsAdjustment) {
// Add points from underrepresented regions
const δiSample = finalSample.map(d => d[2]);
const δiPopulation = data.map(d => d[2]);
const quantiles = d3.range(0, binsPerLine + 1).map(i => d3.quantile(δiPopulation, i / binsPerLine));
const sampleBins = d3.histogram().domain(d3.extent(δiPopulation)).thresholds(quantiles)(δiSample);
const populationBins = d3.histogram().domain(d3.extent(δiPopulation)).thresholds(quantiles)(δiPopulation);
const underSampledBins = sampleBins
.map((b, i) => ({ bin: i, diff: populationBins[i].length / totalSize - b.length / finalSample.length }))
.filter(b => b.diff > 0.1); // Significant under-sampling
if (underSampledBins.length > 0) {
const additionalSamples = [];
for (const { bin } of underSampledBins) {
const binData = data.filter(d => d[2] >= quantiles[bin] && d[2] < quantiles[bin + 1] && !finalSample.includes(d));
const needed = Math.round((underSampledBins[0].diff * effectiveSampleSize) / 2);
additionalSamples.push(...d3.shuffle(binData).slice(0, needed));
}
finalSample = d3.shuffle([...finalSample, ...additionalSamples]).slice(0, effectiveSampleSize);
}
}
return finalSample;
}
function decimate (data, decimationCount = 20) {
return data.filter( (row, index) => (index % decimationCount) == 0 );
}
function computeSample (data, opts = {}) {
const DEFAULT_SAMPLE_SIZE = 2000;
let sample;
if (opts.decimate === true) {
if (opts.sampleSize > 0) {
sample = decimate(data.records, Math.floor(data.records.length / opts.sampleSize));
} else {
sample = decimate(data.records, Math.floor(data.records.length / DEFAULT_SAMPLE_SIZE));
}
} else if (opts.decimate > 0) {
sample = decimate(data.records, opts.decimate);
} else if (opts.sampleSize) {
sample = stratifiedSample(data.records, opt.sampleSize);
} else {
sample = stratifiedSample(data.records, DEFAULT_SAMPLE_SIZE);
}
return sample;
}
// Optional: Utility to compute stats for validation
function computeStats(data) {
const δi = data.map(d => d[2]);
const δj = data.map(d => d[3]);
const rms = Math.sqrt(d3.mean(data, d => d[2] ** 2 + d[3] ** 2));
return {
l: data.length,
μ: [d3.mean(δi), d3.mean(δj)],
σ: [d3.deviation(δi), d3.deviation(δj)],
rms
};
}
function centre (data) {
const stats = computeStats(data);
return data.map( row => [row[0], row[1], row[2]-stats.μ[0], row[3]-stats.μ[1]] )
}
function outliers (data, sd=1.96) {
const stats = computeStats(data);
function fn ([l, p, i, j]) {
return (i - stats.μ[0]) > stats.σ[0]*sd ||
(j - stats.μ[1]) > stats.σ[1]*sd;
}
return data.filter(fn)
}
function inliers (data, sd=1.96) {
const stats = computeStats(data);
function fn ([l, p, i, j]) {
return (i - stats.μ[0]) <= stats.σ[0]*sd &&
(j - stats.μ[1]) <= stats.σ[1]*sd;
}
return data.filter(fn)
}
function difference (a, b) {
const obj = Array.isArray(a) ? [] : {};
for (const k in a) {
const v0 = a[k];
const v1 = b[k]
if (v0 instanceof Object && v1 instanceof Object) {
obj[k] = difference (v0, v1);
} else if (!isNaN(Number(v0)) && !isNaN(Number(v1))) {
obj[k] = v1 - v0;
}
}
return obj;
}
function combinations (a, n) {
const results = [];
function combine(current, start) {
if (current.length === n) {
results.push([...current]);
return;
}
for (let i = start; i < a.length; i++) {
current.push(a[i]);
combine(current, i + 1);
current.pop();
}
}
combine([], 0);
return results;
}
module.exports = {
combinations,
centre,
ijRMS,
computeStats,
computeSample,
stratifiedSample,
old_stratifiedSample,
decimate,
difference,
outliers,
inliers
}

View File

@@ -10,5 +10,6 @@ module.exports = [
"planned_lines",
"raw_lines", "raw_shots",
"final_lines", "final_shots", "info",
"queue_items"
"queue_items",
"comparisons",
];

View File

@@ -0,0 +1,105 @@
const { DEBUG, ERROR } = require('DOUGAL_ROOT/debug')(__filename);
const { setSurvey, transaction } = require('../connection');
/** Remove a previous import from the database.
*
* ATTENTION!
*
* This will not just mark the events as deleted but actually
* remove them.
*/
async function bulk_unimport (projectId, filename, opts = {}) {
const client = opts.client ?? await setSurvey(projectId);
try {
const text = `
DELETE
FROM event_log
WHERE meta ? 'author'
AND meta->(meta->>'author')->>'filename' = $1;
`;
const values = [ filename ];
DEBUG("Removing all event data imported from filename '%s'", filename);
await client.query(text, values);
} catch (err) {
err.origin = __filename;
throw err;
} finally {
if (client !== opts.client) client.release();
}
return;
}
async function bulk_import (projectId, payload, opts = {}) {
const client = opts.client ?? await setSurvey(projectId);
try {
if (!payload.length) {
DEBUG("Called with no rows to be imported. Returning");
return [];
}
const filename = payload[0].meta[payload[0].meta.author].filename;
// Delete previous data from this file
await transaction.begin(client);
await bulk_unimport(projectId, filename, {client});
// Prepare arrays for each column
const tstamps = [];
const sequences = [];
const points = [];
const remarks = [];
const labels = [];
const metas = [];
for (const event of payload) {
tstamps.push(event.tstamp ? new Date(event.tstamp) : null);
sequences.push(Number.isInteger(event.sequence) ? event.sequence : null);
points.push(Number.isInteger(event.point) ? event.point : null);
remarks.push(event.remarks || '');
labels.push(Array.isArray(event.labels) && event.labels.length
? `{${event.labels.map(l => `"${l.replace(/"/g, '""')}"`).join(',')}}`
: '{}'
);
metas.push(event.meta ? JSON.stringify(event.meta) : '{}');
}
const text = `
INSERT INTO event_log (tstamp, sequence, point, remarks, labels, meta)
SELECT
UNNEST($1::TIMESTAMP[]) AS tstamp,
UNNEST($2::INTEGER[]) AS sequence,
UNNEST($3::INTEGER[]) AS point,
replace_placeholders(UNNEST($4::TEXT[]), UNNEST($1::TIMESTAMP[]), UNNEST($2::INTEGER[]), UNNEST($3::INTEGER[])) AS remarks,
UNNEST($5::TEXT[])::TEXT[] AS labels,
UNNEST($6::JSONB[]) AS meta
RETURNING id;
`;
const values = [ tstamps, sequences, points, remarks, labels, metas ];
DEBUG("Importing %d rows from filename '%s'", payload.length, filename);
const res = await client.query(text, values);
transaction.commit(client);
return res.rows.map(row => row.id);
} catch (err) {
err.origin = __filename;
throw err;
} finally {
if (client !== opts.client) client.release();
}
return;
}
module.exports = { import: bulk_import, unimport: bulk_unimport };

View File

@@ -6,5 +6,7 @@ module.exports = {
put: require('./put'),
patch: require('./patch'),
del: require('./delete'),
changes: require('./changes')
changes: require('./changes'),
import: require('./import').import,
unimport: require('./import').unimport,
}

View File

@@ -0,0 +1,37 @@
const { DEBUG, ERROR } = require('DOUGAL_ROOT/debug')(__filename);
const { setSurvey, transaction } = require('../connection');
/** Remove a previous import from the database.
*
* ATTENTION!
*
* This will not just mark the events as deleted but actually
* remove them.
*/
async function unimport (projectId, filename, opts = {}) {
const client = await setSurvey(projectId);
try {
const text = `
DELETE
FROM event_log
WHERE meta ? 'author'
AND meta->(meta->'author')->>'filename' = $1;
`;
const values = [ filename ];
DEBUG("Removing all event data imported from filename '%s'", filename);
await client.query(text, values);
} catch (err) {
err.origin = __filename;
throw err;
} finally {
client.release();
}
return;
}
module.exports = post;

View File

@@ -1,52 +0,0 @@
const Queue = require('./queue');
// Inspired by:
// https://stackoverflow.com/questions/53540348/js-async-await-tasks-queue#53540586
class ActionsQueue extends Queue {
constructor (items = []) {
super(items);
this.pending = false;
}
enqueue (action) {
return new Promise ((resolve, reject) => {
super.enqueue({ action, resolve, reject });
this.dequeue();
});
}
async dequeue () {
if (this.pending) {
return false;
}
const item = super.dequeue();
if (!item) {
return false;
}
try {
this.pending = true;
const result = await item.action(this);
this.pending = false;
item.resolve(result);
} catch (err) {
this.pending = false;
item.reject(err);
} finally {
this.dequeue();
}
}
}
module.exports = ActionsQueue;

View File

@@ -1,6 +0,0 @@
module.exports = {
Queue: require('./queue'),
ActionsQueue: require('./actions-queue')
};

View File

@@ -1,22 +0,0 @@
class Queue {
constructor (items = []) {
this.items = items;
}
enqueue (item) {
this.items.push(item);
}
dequeue () {
return this.items.shift();
}
length () {
return this.items.length;
}
}
module.exports = Queue;

View File

@@ -1,52 +1,110 @@
// TODO Append location to PATH
const path = require('path');
const fs = require('fs');
const {Builder, By, Key, until} = require('selenium-webdriver');
const firefox = require('selenium-webdriver/firefox');
const { Builder, By, Key, until } = require('selenium-webdriver');
const firefox = require('selenium-webdriver/firefox');
const { execSync } = require('child_process');
const geckodriverPath = path.resolve(__dirname, "geckodriver");
// We launch a browser instance and then start an activity timer.
// We shut down the browser after a period of inactivity, to
// save memory.
// State to prevent race conditions
let driver = null;
let timer = null;
let isShuttingDown = false;
function resetTimer () {
clearTimeout(timer);
timer = setTimeout(shutdown, 120000); // Yup, hardcoded to two minutes. For now anyway
// Verify GeckoDriver exists
if (!fs.existsSync(geckodriverPath)) {
throw new Error(`GeckoDriver not found at ${geckodriverPath}`);
}
async function launch () {
function resetTimer() {
clearTimeout(timer);
timer = setTimeout(shutdown, 120000); // 2 minutes inactivity timeout
}
async function launch() {
if (isShuttingDown) {
console.log("Shutdown in progress, waiting...");
await new Promise(resolve => setTimeout(resolve, 1000));
return launch(); // Retry after delay
}
resetTimer();
if (!driver) {
console.log("Launching Firefox");
const options = new firefox.Options();
// Explicitly set headless mode and optimize for server
options.addArguments('--headless', '--no-sandbox', '--disable-gpu');
// Limit content processes to reduce resource usage
options.setPreference('dom.ipc.processCount', 1);
const service = new firefox.ServiceBuilder(geckodriverPath);
driver = await new Builder()
.forBrowser('firefox')
.setFirefoxService(new firefox.ServiceBuilder(geckodriverPath))
.setFirefoxOptions(options.headless())
.setFirefoxService(service)
.setFirefoxOptions(options)
.build();
}
}
async function shutdown () {
if (driver) {
async function shutdown() {
if (driver && !isShuttingDown) {
isShuttingDown = true;
console.log("Shutting down Firefox");
// This is an attempt at avoiding a race condition if someone
// makes a call and resets the timer while the shutdown is in
// progress.
const d = driver;
driver = null;
await d.quit();
try {
const d = driver;
driver = null;
await d.quit();
// Explicitly stop the service
const service = d.service;
if (service) {
service.stop();
}
console.log("Firefox shutdown complete");
} catch (error) {
console.error("Error during shutdown:", error);
// Forcefully kill lingering processes (Linux/Unix)
try {
execSync('pkill -u $USER firefox || true');
execSync('pkill -u $USER geckodriver || true');
console.log("Terminated lingering Firefox/GeckoDriver processes");
} catch (killError) {
console.error("Error killing processes:", killError);
}
} finally {
isShuttingDown = false;
}
}
}
async function url2pdf (url) {
async function url2pdf(url) {
await launch();
await driver.get(url);
return await driver.printPage({width: 21.0, height: 29.7});
try {
console.log(`Navigating to ${url}`);
await driver.get(url);
// Add delay to stabilize Marionette communication
await driver.sleep(3000);
const pdf = await driver.printPage({ width: 21.0, height: 29.7 });
resetTimer(); // Reset timer after successful operation
return pdf;
} catch (error) {
console.error("Error in url2pdf:", error);
await shutdown(); // Force shutdown on error
throw error;
}
}
// Periodically clean up orphaned processes (every 5 minutes)
setInterval(() => {
try {
const firefoxCount = execSync('pgrep -c firefox || echo 0').toString().trim();
if (parseInt(firefoxCount) > 0 && !driver) {
console.log(`Found ${firefoxCount} orphaned Firefox processes, cleaning up...`);
execSync('pkill -u $USER firefox || true');
execSync('pkill -u $USER geckodriver || true');
console.log("Cleaned up orphaned processes");
}
} catch (error) {
console.error("Error checking orphaned processes:", error);
}
}, 300000);
module.exports = { url2pdf };

View File

@@ -0,0 +1,52 @@
function unique(arr) {
const set = new Set(arr.map(JSON.stringify));
return Array.from(set).map(JSON.parse);
}
function duplicates(arr) {
const seen = new Set();
const dups = new Set();
for (const item of arr.map(JSON.stringify)) {
if (seen.has(item)) {
dups.add(item);
} else {
seen.add(item);
}
}
return Array.from(dups).map(JSON.parse);
}
function union(arr1, arr2) {
const set = new Set([...arr1, ...arr2].map(JSON.stringify));
return Array.from(set).map(JSON.parse);
}
function intersection(arr1, arr2) {
const set2 = new Set(arr2.map(JSON.stringify));
return arr1.filter(item => set2.has(JSON.stringify(item)));
}
function difference(arr1, arr2) {
const set2 = new Set(arr2.map(JSON.stringify));
return arr1.filter(item => !set2.has(JSON.stringify(item)));
}
function symmetricDifference(arr1, arr2) {
const set1 = new Set(arr1.map(JSON.stringify));
const set2 = new Set(arr2.map(JSON.stringify));
return [
...arr1.filter(item => !set2.has(JSON.stringify(item))),
...arr2.filter(item => !set1.has(JSON.stringify(item)))
];
}
module.exports = {
unique,
duplicates,
union,
intersection,
difference,
symmetricDifference
}

View File

@@ -29,7 +29,9 @@
"@dougal/binary": "file:../../modules/@dougal/binary",
"@dougal/organisations": "file:../../modules/@dougal/organisations",
"@dougal/user": "file:../../modules/@dougal/user",
"async": "^3.2.6",
"body-parser": "gitlab:aaltronav/contrib/expressjs/body-parser",
"busboy": "^1.6.0",
"compression": "^1.8.1",
"cookie-parser": "^1.4.5",
"csv": "^6.3.3",
@@ -42,6 +44,7 @@
"jsonwebtoken": "^9.0.2",
"leaflet-headless": "git+https://git@gitlab.com/aaltronav/contrib/leaflet-headless.git#devel",
"marked": "^4.0.12",
"mathjs": "^14.6.0",
"node-fetch": "^2.6.1",
"nunjucks": "^3.2.3",
"path-to-regexp": "^6.2.1",

150
package-lock.json generated
View File

@@ -1678,17 +1678,6 @@
"dev": true,
"license": "MIT"
},
"lib/www/client/source/node_modules/@babel/runtime": {
"version": "7.23.2",
"dev": true,
"license": "MIT",
"dependencies": {
"regenerator-runtime": "^0.14.0"
},
"engines": {
"node": ">=6.9.0"
}
},
"lib/www/client/source/node_modules/@babel/template": {
"version": "7.27.2",
"dev": true,
@@ -7524,11 +7513,6 @@
"node": ">=4"
}
},
"lib/www/client/source/node_modules/regenerator-runtime": {
"version": "0.14.0",
"dev": true,
"license": "MIT"
},
"lib/www/client/source/node_modules/regenerator-transform": {
"version": "0.15.2",
"dev": true,
@@ -9359,7 +9343,9 @@
"@dougal/binary": "file:../../modules/@dougal/binary",
"@dougal/organisations": "file:../../modules/@dougal/organisations",
"@dougal/user": "file:../../modules/@dougal/user",
"async": "^3.2.6",
"body-parser": "gitlab:aaltronav/contrib/expressjs/body-parser",
"busboy": "^1.6.0",
"compression": "^1.8.1",
"cookie-parser": "^1.4.5",
"csv": "^6.3.3",
@@ -9372,6 +9358,7 @@
"jsonwebtoken": "^9.0.2",
"leaflet-headless": "git+https://git@gitlab.com/aaltronav/contrib/leaflet-headless.git#devel",
"marked": "^4.0.12",
"mathjs": "^14.6.0",
"node-fetch": "^2.6.1",
"nunjucks": "^3.2.3",
"path-to-regexp": "^6.2.1",
@@ -10525,17 +10512,6 @@
"node": ">=6.0.0"
}
},
"lib/www/server/node_modules/redoc-cli/node_modules/@babel/runtime": {
"version": "7.16.7",
"dev": true,
"license": "MIT",
"dependencies": {
"regenerator-runtime": "^0.13.4"
},
"engines": {
"node": ">=6.9.0"
}
},
"lib/www/server/node_modules/redoc-cli/node_modules/@babel/template": {
"version": "7.12.13",
"dev": true,
@@ -12530,11 +12506,6 @@
"url": "https://github.com/Mermade/oas-kit?sponsor=1"
}
},
"lib/www/server/node_modules/redoc-cli/node_modules/regenerator-runtime": {
"version": "0.13.9",
"dev": true,
"license": "MIT"
},
"lib/www/server/node_modules/redoc-cli/node_modules/require-directory": {
"version": "2.1.1",
"dev": true,
@@ -13295,6 +13266,15 @@
"node": ">=0.4"
}
},
"node_modules/@babel/runtime": {
"version": "7.28.3",
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.3.tgz",
"integrity": "sha512-9uIQ10o0WGdpP6GDhXcdOJPJuDgFtIDtN/9+ArJQ2NAfAmiuhTQdzkaTGR33v43GYS2UrSA0eX2pPPHoFVvpxA==",
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@deck.gl/aggregation-layers": {
"version": "9.1.13",
"resolved": "https://registry.npmjs.org/@deck.gl/aggregation-layers/-/aggregation-layers-9.1.13.tgz",
@@ -14170,6 +14150,11 @@
"node": ">=0.8"
}
},
"node_modules/async": {
"version": "3.2.6",
"resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz",
"integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA=="
},
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
@@ -14274,6 +14259,17 @@
"node": ">=0.10.0"
}
},
"node_modules/busboy": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz",
"integrity": "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==",
"dependencies": {
"streamsearch": "^1.1.0"
},
"engines": {
"node": ">=10.16.0"
}
},
"node_modules/bytes": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
@@ -14371,6 +14367,19 @@
"node": ">= 10"
}
},
"node_modules/complex.js": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/complex.js/-/complex.js-2.4.2.tgz",
"integrity": "sha512-qtx7HRhPGSCBtGiST4/WGHuW+zeaND/6Ld+db6PbrulIB1i2Ev/2UPiqcmpQNPSyfBKraC0EOvOKCB5dGZKt3g==",
"license": "MIT",
"engines": {
"node": "*"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/rawify"
}
},
"node_modules/compressible": {
"version": "2.0.18",
"resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz",
@@ -15071,6 +15080,12 @@
"node": ">= 0.4"
}
},
"node_modules/escape-latex": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/escape-latex/-/escape-latex-1.2.0.tgz",
"integrity": "sha512-nV5aVWW1K0wEiUIEdZ4erkGGH8mDxGyxSeqPzRNtWP7ataw+/olFObw7hujFWlVjNsaDFw5VZ5NzVSIqRgfTiw==",
"license": "MIT"
},
"node_modules/escodegen": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz",
@@ -15187,6 +15202,19 @@
"node": ">= 6"
}
},
"node_modules/fraction.js": {
"version": "5.3.1",
"resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.1.tgz",
"integrity": "sha512-PhqCuhSKIGbbkJ+cojHv47eEWClU71FIOhiUsYdZYTwhIzCeIN8rXeEjserTvPat5JLJChumn8chHz64WkZgTw==",
"license": "MIT",
"engines": {
"node": "*"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/rawify"
}
},
"node_modules/fs-minipass": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz",
@@ -15617,6 +15645,12 @@
"resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
"integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g=="
},
"node_modules/javascript-natural-sort": {
"version": "0.7.1",
"resolved": "https://registry.npmjs.org/javascript-natural-sort/-/javascript-natural-sort-0.7.1.tgz",
"integrity": "sha512-nO6jcEfZWQXDhOiBtG2KvKyEptz7RVbpGP4vTD2hLBdmNQSsCiicO2Ioinv6UI4y9ukqnBpy+XZ9H6uLNgJTlw==",
"license": "MIT"
},
"node_modules/jsbn": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
@@ -15830,6 +15864,29 @@
"node": ">= 0.4"
}
},
"node_modules/mathjs": {
"version": "14.6.0",
"resolved": "https://registry.npmjs.org/mathjs/-/mathjs-14.6.0.tgz",
"integrity": "sha512-5vI2BLB5GKQmiSK9BH6hVkZ+GgqpdnOgEfmHl7mqVmdQObLynr63KueyYYLCQMzj66q69mV2XZZGQqqxeftQbA==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime": "^7.26.10",
"complex.js": "^2.2.5",
"decimal.js": "^10.4.3",
"escape-latex": "^1.2.0",
"fraction.js": "^5.2.1",
"javascript-natural-sort": "^0.7.1",
"seedrandom": "^3.0.5",
"tiny-emitter": "^2.1.0",
"typed-function": "^4.2.1"
},
"bin": {
"mathjs": "bin/cli.js"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/md5": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz",
@@ -16325,6 +16382,12 @@
"node": ">=10"
}
},
"node_modules/seedrandom": {
"version": "3.0.5",
"resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-3.0.5.tgz",
"integrity": "sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==",
"license": "MIT"
},
"node_modules/semver": {
"version": "7.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
@@ -16504,6 +16567,14 @@
"node": ">= 0.8"
}
},
"node_modules/streamsearch": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz",
"integrity": "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==",
"engines": {
"node": ">=10.0.0"
}
},
"node_modules/string_decoder": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
@@ -16580,6 +16651,12 @@
"texture-compressor": "bin/texture-compressor.js"
}
},
"node_modules/tiny-emitter": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/tiny-emitter/-/tiny-emitter-2.1.0.tgz",
"integrity": "sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q==",
"license": "MIT"
},
"node_modules/toidentifier": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
@@ -16641,6 +16718,15 @@
"node": ">= 0.6"
}
},
"node_modules/typed-function": {
"version": "4.2.1",
"resolved": "https://registry.npmjs.org/typed-function/-/typed-function-4.2.1.tgz",
"integrity": "sha512-EGjWssW7Tsk4DGfE+5yluuljS1OGYWiI1J6e8puZz9nTMM51Oug8CD5Zo4gWMsOhq5BI+1bF+rWTm4Vbj3ivRA==",
"license": "MIT",
"engines": {
"node": ">= 18"
}
},
"node_modules/undici-types": {
"version": "7.8.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.8.0.tgz",