mirror of
https://gitlab.com/wgp/dougal/software.git
synced 2025-12-06 13:17:08 +00:00
Compare commits
66 Commits
271-qc-res
...
281-modify
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
851369a0b4 | ||
|
|
5065d62443 | ||
|
|
2d1e1e9532 | ||
|
|
051049581a | ||
|
|
da5ae18b0b | ||
|
|
ac9353c101 | ||
|
|
c4c5c44bf1 | ||
|
|
d3659ebf02 | ||
|
|
6b5070e634 | ||
|
|
09ff96ceee | ||
|
|
f231acf109 | ||
|
|
e576e1662c | ||
|
|
6a21ddd1cd | ||
|
|
c1e35b2459 | ||
|
|
eee2a96029 | ||
|
|
6f5e5a4d20 | ||
|
|
9e73cb7e00 | ||
|
|
d7ab4eec7c | ||
|
|
cdd96a4bc7 | ||
|
|
39a21766b6 | ||
|
|
0e33c18b5c | ||
|
|
7f411ac7dd | ||
|
|
ed1da11c9d | ||
|
|
66ec28dd83 | ||
|
|
b928d96774 | ||
|
|
73335f9c1e | ||
|
|
7b6b81dbc5 | ||
|
|
2e11c574c2 | ||
|
|
d07565807c | ||
|
|
6eccbf215a | ||
|
|
8abc05f04e | ||
|
|
8f587467f9 | ||
|
|
3d7a91c7ff | ||
|
|
3fd408074c | ||
|
|
f71cbd8f51 | ||
|
|
915df8ac16 | ||
|
|
d5ecb08a2d | ||
|
|
9388cd4861 | ||
|
|
180590b411 | ||
|
|
4ec37539bf | ||
|
|
8755fe01b6 | ||
|
|
0bfe54e0c2 | ||
|
|
29bc689b84 | ||
|
|
65682febc7 | ||
|
|
d408665d62 | ||
|
|
64fceb0a01 | ||
|
|
ab58e578c9 | ||
|
|
0e58b8fa5b | ||
|
|
99ac082f00 | ||
|
|
4d3fddc051 | ||
|
|
42456439a9 | ||
|
|
ee0c0e7308 | ||
|
|
998c272bf8 | ||
|
|
daddd1f0e8 | ||
|
|
17f20535cb | ||
|
|
0829ea3ea1 | ||
|
|
2069d9c3d7 | ||
|
|
8a2d526c50 | ||
|
|
8ad96d6f73 | ||
|
|
947faf8c05 | ||
|
|
a948556455 | ||
|
|
835384b730 | ||
|
|
c5b93794f4 | ||
|
|
056cd32f0e | ||
|
|
49bb413110 | ||
|
|
aa3379e1c6 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -12,3 +12,4 @@ etc/surveys/*.yaml
|
||||
!etc/surveys/_*.yaml
|
||||
etc/ssl/*
|
||||
etc/config.yaml
|
||||
var/*
|
||||
|
||||
@@ -11,11 +11,9 @@ from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
|
||||
@@ -115,7 +115,10 @@ if __name__ == '__main__':
|
||||
|
||||
process(layer_name, layer, realprefix)
|
||||
|
||||
else:
|
||||
elif os.path.isdir(realprefix):
|
||||
|
||||
if not "globs" in layer:
|
||||
layer["globs"] = [ "**/*.geojson" ]
|
||||
|
||||
for globspec in layer["globs"]:
|
||||
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
|
||||
|
||||
164
etc/db/upgrades/upgrade30-v0.4.3-large-notification-payloads.sql
Normal file
164
etc/db/upgrades/upgrade30-v0.4.3-large-notification-payloads.sql
Normal file
@@ -0,0 +1,164 @@
|
||||
-- Support notification payloads larger than Postgres' NOTIFY limit.
|
||||
--
|
||||
-- New schema version: 0.4.3
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects the public schema only.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This creates a new table where large notification payloads are stored
|
||||
-- temporarily and from which they might be recalled by the notification
|
||||
-- listeners. It also creates a purge_notifications() procedure used to
|
||||
-- clean up old notifications from the notifications log and finally,
|
||||
-- modifies notify() to support these changes. When a large payload is
|
||||
-- encountered, the payload is stored in the notify_payloads table and
|
||||
-- a trimmed down version containing a notification_id is sent to listeners
|
||||
-- instead. Listeners can then query notify_payloads to retrieve the full
|
||||
-- payloads. It is the application layer's responsibility to delete old
|
||||
-- notifications.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_schema () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating public schema';
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO public');
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.notify_payloads (
|
||||
id SERIAL,
|
||||
tstamp timestamptz NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
payload text NOT NULL DEFAULT '',
|
||||
PRIMARY KEY (id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS notify_payload_tstamp ON notify_payloads (tstamp);
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.notify() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
channel text := TG_ARGV[0];
|
||||
pid text;
|
||||
payload text;
|
||||
notification text;
|
||||
payload_id integer;
|
||||
BEGIN
|
||||
|
||||
SELECT projects.pid INTO pid FROM projects WHERE schema = TG_TABLE_SCHEMA;
|
||||
|
||||
payload := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'old', row_to_json(OLD),
|
||||
'new', row_to_json(NEW),
|
||||
'pid', pid
|
||||
)::text;
|
||||
|
||||
IF octet_length(payload) < 1000 THEN
|
||||
PERFORM pg_notify(channel, payload);
|
||||
ELSE
|
||||
-- We need to find another solution
|
||||
-- FIXME Consider storing the payload in a temporary memory table,
|
||||
-- referenced by some form of autogenerated ID. Then send the ID
|
||||
-- as the payload and then it's up to the user to fetch the original
|
||||
-- payload if interested. This needs a mechanism to expire older payloads
|
||||
-- in the interest of conserving memory.
|
||||
|
||||
INSERT INTO notify_payloads (payload) VALUES (payload) RETURNING id INTO payload_id;
|
||||
|
||||
notification := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'pid', pid,
|
||||
'payload_id', payload_id
|
||||
)::text;
|
||||
|
||||
PERFORM pg_notify(channel, notification);
|
||||
RAISE INFO 'Payload over limit';
|
||||
END IF;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$;
|
||||
|
||||
CREATE PROCEDURE public.purge_notifications (age_seconds numeric DEFAULT 120) AS $$
|
||||
DELETE FROM notify_payloads WHERE EXTRACT(epoch FROM CURRENT_TIMESTAMP - tstamp) > age_seconds;
|
||||
$$ LANGUAGE sql;
|
||||
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
-- This upgrade modified the `public` schema only, not individual
|
||||
-- project schemas.
|
||||
CALL pg_temp.upgrade_schema();
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_schema ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.3"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.3"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,104 @@
|
||||
-- Add event_log_changes function
|
||||
--
|
||||
-- New schema version: 0.4.4
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This adds a function event_log_changes which returns the subset of
|
||||
-- events from event_log_full which have been modified on or after a
|
||||
-- given timestamp.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_log_changes(ts0 timestamptz)
|
||||
RETURNS SETOF event_log_full
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
SELECT *
|
||||
FROM event_log_full
|
||||
WHERE lower(validity) > ts0 OR upper(validity) IS NOT NULL AND upper(validity) > ts0
|
||||
ORDER BY lower(validity);
|
||||
$$;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.4"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.4"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -44,7 +44,7 @@
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-text-field
|
||||
v-model="tsDate"
|
||||
:disabled="!!(sequence || point || entrySequence || entryPoint)"
|
||||
:disabled="!!(entrySequence || entryPoint)"
|
||||
label="Date"
|
||||
suffix="UTC"
|
||||
prepend-icon="mdi-calendar"
|
||||
@@ -64,7 +64,7 @@
|
||||
<v-col>
|
||||
<v-text-field
|
||||
v-model="tsTime"
|
||||
:disabled="!!(sequence || point || entrySequence || entryPoint)"
|
||||
:disabled="!!(entrySequence || entryPoint)"
|
||||
label="Time"
|
||||
suffix="UTC"
|
||||
prepend-icon="mdi-clock-outline"
|
||||
@@ -256,6 +256,15 @@
|
||||
>
|
||||
Cancel
|
||||
</v-btn>
|
||||
<v-btn v-if="!id && (entrySequence || entryPoint)"
|
||||
color="info"
|
||||
text
|
||||
title="Enter an event by time"
|
||||
@click="timed"
|
||||
>
|
||||
<v-icon left small>mdi-clock-outline</v-icon>
|
||||
Timed
|
||||
</v-btn>
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn
|
||||
:disabled="!canSave"
|
||||
@@ -632,6 +641,14 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
timed () {
|
||||
const tstamp = (new Date()).toISOString();
|
||||
this.entrySequence = null;
|
||||
this.entryPoint = null;
|
||||
this.tsDate = tstamp.substr(0, 10);
|
||||
this.tsTime = tstamp.substr(11, 8);
|
||||
},
|
||||
|
||||
close () {
|
||||
this.entryLabels = this.selectedLabels.map(this.labelToItem)
|
||||
this.$emit("input", false);
|
||||
|
||||
@@ -181,6 +181,9 @@ app.map({
|
||||
post: [ mw.auth.access.write, mw.event.post ],
|
||||
put: [ mw.auth.access.write, mw.event.put ],
|
||||
delete: [ mw.auth.access.write, mw.event.delete ],
|
||||
'changes/:since': {
|
||||
get: [ mw.event.changes ]
|
||||
},
|
||||
// TODO Rename -/:sequence → sequence/:sequence
|
||||
'-/:sequence/': { // NOTE: We need to avoid conflict with the next endpoint ☹
|
||||
get: [ mw.event.sequence.get ],
|
||||
|
||||
@@ -33,7 +33,7 @@ function saveResponse (res) {
|
||||
const cache = getCache(res);
|
||||
const req = res.req;
|
||||
console.log(`Saving ETag: ${req.method} ${req.url} → ${etag}`);
|
||||
const headers = res.getHeaders();
|
||||
const headers = structuredClone(res.getHeaders());
|
||||
delete headers["set-cookie"];
|
||||
cache[req.url] = {etag, headers};
|
||||
}
|
||||
|
||||
@@ -43,15 +43,26 @@ const rels = [
|
||||
matches: [ ],
|
||||
callback (url, data) {
|
||||
if (data.payload?.table == "info") {
|
||||
const pid = data.payload?.pid;
|
||||
const key = (data.payload?.new ?? data.payload?.old)?.key;
|
||||
|
||||
const rx = /^\/project\/([^\/]+)\/info\/([^\/?]+)[\/?]?/;
|
||||
const match = url.match(rx);
|
||||
if (match) {
|
||||
if (match[1] == data.payload.pid) {
|
||||
if (match[1] == pid) {
|
||||
if (match[2] == data.payload?.old?.key || match[2] == data.payload?.new?.key) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (key == "plan") {
|
||||
const rx = /^\/project\/([^\/]+)\/plan[\/?]?/;
|
||||
const match = url.match(rx);
|
||||
if (match) {
|
||||
return match[1] == pid;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
14
lib/www/server/api/middleware/event/changes.js
Normal file
14
lib/www/server/api/middleware/event/changes.js
Normal file
@@ -0,0 +1,14 @@
|
||||
|
||||
const { event } = require('../../../lib/db');
|
||||
|
||||
const json = async function (req, res, next) {
|
||||
try {
|
||||
const response = await event.changes(req.params.project, req.params.since, req.query);
|
||||
res.status(200).send(response);
|
||||
next();
|
||||
} catch (err) {
|
||||
next(err);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = json;
|
||||
@@ -6,5 +6,6 @@ module.exports = {
|
||||
post: require('./post'),
|
||||
put: require('./put'),
|
||||
patch: require('./patch'),
|
||||
delete: require('./delete')
|
||||
delete: require('./delete'),
|
||||
changes: require('./changes')
|
||||
}
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
|
||||
const { plan } = require('../../../../lib/db');
|
||||
const { plan, info } = require('../../../../lib/db');
|
||||
|
||||
const json = async function (req, res, next) {
|
||||
try {
|
||||
const response = await plan.list(req.params.project, req.query);
|
||||
const sequences = await plan.list(req.params.project, req.query) ?? [];
|
||||
const remarks = await info.get(req.params.project, "plan/remarks", req.query, req.user.role) ?? null;
|
||||
const response = {
|
||||
remarks,
|
||||
sequences
|
||||
};
|
||||
res.status(200).send(response);
|
||||
next();
|
||||
} catch (err) {
|
||||
|
||||
@@ -9,105 +9,16 @@ const { ALERT, ERROR, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
* the last shot and first shot of the previous and current dates, respectively.
|
||||
*/
|
||||
class DetectFDSP {
|
||||
/* Data may come much faster than we can process it, so we put it
|
||||
* in a queue and process it at our own pace.
|
||||
*
|
||||
* The run() method fills the queue with the necessary data and then
|
||||
* calls processQueue().
|
||||
*
|
||||
* The processQueue() method looks at the first two elements in
|
||||
* the queue and processes them if they are not already being taken
|
||||
* care of by a previous processQueue() call – this will happen when
|
||||
* data is coming in faster than it can be processed.
|
||||
*
|
||||
* If the processQueue() call is the first to see the two bottommost
|
||||
* two elements, it will process them and, when finished, it will set
|
||||
* the `isPending` flag of the bottommost element to `false`, thus
|
||||
* letting the next call know that it has work to do.
|
||||
*
|
||||
* If the queue was empty, run() will set the `isPending` flag of its
|
||||
* first element to a falsy value, thus bootstrapping the process.
|
||||
*/
|
||||
static MAX_QUEUE_SIZE = 125000;
|
||||
|
||||
queue = [];
|
||||
author = `*${this.constructor.name}*`;
|
||||
prev = null;
|
||||
|
||||
async processQueue () {
|
||||
DEBUG("Queue length", this.queue.length)
|
||||
while (this.queue.length > 1) {
|
||||
|
||||
if (this.queue[0].isPending) {
|
||||
setImmediate(() => this.processQueue());
|
||||
return;
|
||||
}
|
||||
|
||||
const prev = this.queue.shift();
|
||||
const cur = this.queue[0];
|
||||
|
||||
const sequence = Number(cur._sequence);
|
||||
|
||||
try {
|
||||
|
||||
if (prev.lineName == cur.lineName && prev._sequence == cur._sequence &&
|
||||
prev.lineStatus == "online" && cur.lineStatus == "online" && sequence) {
|
||||
|
||||
// DEBUG("Previous", prev);
|
||||
// DEBUG("Current", cur);
|
||||
|
||||
if (prev.time.substr(0, 10) != cur.time.substr(0, 10)) {
|
||||
// Possible a date change, but could also be a missing timestamp
|
||||
// or something else.
|
||||
|
||||
const ts0 = new Date(prev.time)
|
||||
const ts1 = new Date(cur.time);
|
||||
|
||||
if (!isNaN(ts0) && !isNaN(ts1) && ts0.getUTCDay() != ts1.getUTCDay()) {
|
||||
INFO("Sequence shot across midnight UTC detected", cur._sequence, cur.lineName);
|
||||
|
||||
const ldsp = {
|
||||
sequence: prev._sequence,
|
||||
point: prev._point,
|
||||
remarks: "Last shotpoint of the day",
|
||||
labels: ["LDSP", "Prod"],
|
||||
meta: {auto: true, insertedBy: this.constructor.name}
|
||||
};
|
||||
|
||||
const fdsp = {
|
||||
sequence: cur._sequence,
|
||||
point: cur._point,
|
||||
remarks: "First shotpoint of the day",
|
||||
labels: ["FDSP", "Prod"],
|
||||
meta: {auto: true, insertedBy: this.constructor.name}
|
||||
};
|
||||
|
||||
INFO("LDSP", ldsp);
|
||||
INFO("FDSP", fdsp);
|
||||
|
||||
const projectId = await schema2pid(prev._schema);
|
||||
|
||||
if (projectId) {
|
||||
await event.post(projectId, ldsp);
|
||||
await event.post(projectId, fdsp);
|
||||
} else {
|
||||
ERROR("projectId not found for", prev._schema);
|
||||
}
|
||||
} else {
|
||||
WARNING("False positive on these timestamps", prev.time, cur.time);
|
||||
WARNING("No events were created");
|
||||
}
|
||||
}
|
||||
}
|
||||
// Processing of this shot has already been completed.
|
||||
// The queue can now move forward.
|
||||
} catch (err) {
|
||||
ERROR(err);
|
||||
} finally {
|
||||
cur.isPending = false;
|
||||
}
|
||||
}
|
||||
constructor () {
|
||||
DEBUG(`${this.author} instantiated`);
|
||||
}
|
||||
|
||||
async run (data) {
|
||||
async run (data, ctx) {
|
||||
|
||||
if (!data || data.channel !== "realtime") {
|
||||
return;
|
||||
}
|
||||
@@ -116,27 +27,70 @@ class DetectFDSP {
|
||||
return;
|
||||
}
|
||||
|
||||
const meta = data.payload.new.meta;
|
||||
|
||||
if (this.queue.length < DetectFDSP.MAX_QUEUE_SIZE) {
|
||||
|
||||
const event = {
|
||||
isPending: this.queue.length,
|
||||
_schema: meta._schema,
|
||||
time: meta.time,
|
||||
lineStatus: meta.lineStatus,
|
||||
_sequence: meta._sequence,
|
||||
_point: meta._point,
|
||||
lineName: meta.lineName
|
||||
};
|
||||
this.queue.push(event);
|
||||
// DEBUG("EVENT", event);
|
||||
|
||||
} else {
|
||||
ALERT("Queue full at", this.queue.length);
|
||||
if (!this.prev) {
|
||||
DEBUG("Initialising `prev`");
|
||||
this.prev = data;
|
||||
return;
|
||||
}
|
||||
|
||||
this.processQueue();
|
||||
try {
|
||||
DEBUG("Running");
|
||||
const cur = data;
|
||||
const sequence = Number(cur._sequence);
|
||||
|
||||
if (this.prev.lineName == cur.lineName && this.prev._sequence == cur._sequence &&
|
||||
this.prev.lineStatus == "online" && cur.lineStatus == "online" && sequence) {
|
||||
|
||||
if (this.prev.time.substr(0, 10) != cur.time.substr(0, 10)) {
|
||||
// Possibly a date change, but could also be a missing timestamp
|
||||
// or something else.
|
||||
|
||||
const ts0 = new Date(this.prev.time)
|
||||
const ts1 = new Date(cur.time);
|
||||
|
||||
if (!isNaN(ts0) && !isNaN(ts1) && ts0.getUTCDay() != ts1.getUTCDay()) {
|
||||
INFO("Sequence shot across midnight UTC detected", cur._sequence, cur.lineName);
|
||||
|
||||
const ldsp = {
|
||||
sequence: this.prev._sequence,
|
||||
point: this.prev._point,
|
||||
remarks: "Last shotpoint of the day",
|
||||
labels: ["LDSP", "Prod"],
|
||||
meta: {auto: true, author: `*${this.constructor.name}*`}
|
||||
};
|
||||
|
||||
const fdsp = {
|
||||
sequence: cur._sequence,
|
||||
point: cur._point,
|
||||
remarks: "First shotpoint of the day",
|
||||
labels: ["FDSP", "Prod"],
|
||||
meta: {auto: true, author: `*${this.constructor.name}*`}
|
||||
};
|
||||
|
||||
INFO("LDSP", ldsp);
|
||||
INFO("FDSP", fdsp);
|
||||
|
||||
const projectId = await schema2pid(this.prev._schema);
|
||||
|
||||
if (projectId) {
|
||||
await event.post(projectId, ldsp);
|
||||
await event.post(projectId, fdsp);
|
||||
} else {
|
||||
ERROR("projectId not found for", this.prev._schema);
|
||||
}
|
||||
} else {
|
||||
WARNING("False positive on these timestamps", this.prev.time, cur.time);
|
||||
WARNING("No events were created");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
} catch (err) {
|
||||
DEBUG(`${this.author} error`, err);
|
||||
throw err;
|
||||
} finally {
|
||||
this.prev = data;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,60 @@
|
||||
const project = require('../../lib/db/project');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
class DetectProjectConfigurationChange {
|
||||
|
||||
author = `*${this.constructor.name}*`;
|
||||
|
||||
constructor (ctx) {
|
||||
DEBUG(`${this.author} instantiated`);
|
||||
|
||||
// Grab project configurations.
|
||||
// NOTE that this will run asynchronously
|
||||
this.run({channel: "project"}, ctx);
|
||||
}
|
||||
|
||||
async run (data, ctx) {
|
||||
|
||||
if (!data || data.channel !== "project") {
|
||||
return;
|
||||
}
|
||||
|
||||
// Project notifications, as of this writing, most likely
|
||||
// do not carry payloads as those exceed the notification
|
||||
// size limit.
|
||||
// For our purposes, we do not care as we just re-read all
|
||||
// the configurations for all non-archived projects.
|
||||
|
||||
try {
|
||||
DEBUG("Project configuration change detected")
|
||||
|
||||
const projects = await project.get();
|
||||
|
||||
const _ctx_data = {};
|
||||
for (let pid of projects.map(i => i.pid)) {
|
||||
DEBUG("Retrieving configuration for", pid);
|
||||
const cfg = await project.configuration.get(pid);
|
||||
if (cfg?.archived === true) {
|
||||
DEBUG(pid, "is archived. Ignoring");
|
||||
continue;
|
||||
}
|
||||
|
||||
DEBUG("Saving configuration for", pid);
|
||||
_ctx_data[pid] = cfg;
|
||||
}
|
||||
|
||||
if (! ("projects" in ctx)) {
|
||||
ctx.projects = {};
|
||||
}
|
||||
|
||||
ctx.projects.configuration = _ctx_data;
|
||||
DEBUG("Committed project configuration to ctx.projects.configuration");
|
||||
|
||||
} catch (err) {
|
||||
DEBUG(`${this.author} error`, err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DetectProjectConfigurationChange;
|
||||
80
lib/www/server/events/handlers/detect-soft-start.js
Normal file
80
lib/www/server/events/handlers/detect-soft-start.js
Normal file
@@ -0,0 +1,80 @@
|
||||
const { schema2pid } = require('../../lib/db/connection');
|
||||
const { event } = require('../../lib/db');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
class DetectSoftStart {
|
||||
|
||||
author = `*${this.constructor.name}*`;
|
||||
prev = null;
|
||||
|
||||
constructor () {
|
||||
DEBUG(`${this.author} instantiated`);
|
||||
}
|
||||
|
||||
async run (data, ctx) {
|
||||
|
||||
if (!data || data.channel !== "realtime") {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!(data.payload && data.payload.new && data.payload.new.meta)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.prev) {
|
||||
DEBUG("Initialising `prev`");
|
||||
this.prev = data;
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
DEBUG("Running");
|
||||
const cur = data?.payload?.new?.meta;
|
||||
const prev = this.prev?.payload?.new?.meta;
|
||||
// DEBUG("%j", prev);
|
||||
// DEBUG("%j", cur);
|
||||
DEBUG("cur.num_guns: %d\ncur.num_active: %d\nprv.num_active: %d\ntest passed: %j", cur.num_guns, cur.num_active, prev.num_active, cur.num_active >= 1 && !prev.num_active && cur.num_active < cur.num_guns);
|
||||
|
||||
|
||||
if (cur.num_active >= 1 && !prev.num_active && cur.num_active < cur.num_guns) {
|
||||
INFO("Soft start detected @", cur.tstamp);
|
||||
|
||||
// FIXME Shouldn't need to use schema2pid as pid already present in payload.
|
||||
const projectId = await schema2pid(cur._schema ?? prev._schema);
|
||||
|
||||
// TODO: Try and grab the corresponding comment from the configuration?
|
||||
const payload = {
|
||||
tstamp: cur.tstamp,
|
||||
remarks: "Soft start",
|
||||
labels: [ "Daily", "Guns", "Prod" ],
|
||||
meta: {auto: true, author: `*${this.constructor.name}*`}
|
||||
};
|
||||
DEBUG("Posting event", projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
|
||||
} else if (cur.num_active == cur.num_guns && prev.num_active < cur.num_active) {
|
||||
INFO("Full volume detected @", cur.tstamp);
|
||||
|
||||
const projectId = await schema2pid(cur._schema ?? prev._schema);
|
||||
|
||||
// TODO: Try and grab the corresponding comment from the configuration?
|
||||
const payload = {
|
||||
tstamp: cur.tstamp,
|
||||
remarks: "Full volume",
|
||||
labels: [ "Daily", "Guns", "Prod" ],
|
||||
meta: {auto: true, author: `*${this.constructor.name}*`}
|
||||
};
|
||||
DEBUG("Posting event", projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
DEBUG(`${this.author} error`, err);
|
||||
throw err;
|
||||
} finally {
|
||||
this.prev = data;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DetectSoftStart;
|
||||
@@ -1,114 +1,17 @@
|
||||
const { schema2pid } = require('../../lib/db/connection');
|
||||
const { event } = require('../../lib/db');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
class DetectSOLEOL {
|
||||
/* Data may come much faster than we can process it, so we put it
|
||||
* in a queue and process it at our own pace.
|
||||
*
|
||||
* The run() method fills the queue with the necessary data and then
|
||||
* calls processQueue().
|
||||
*
|
||||
* The processQueue() method looks takes the first two elements in
|
||||
* the queue and processes them if they are not already being taken
|
||||
* care of by a previous processQueue() call – this will happen when
|
||||
* data is coming in faster than it can be processed.
|
||||
*
|
||||
* If the processQueue() call is the first to see the two bottommost
|
||||
* two elements, it will process them and, when finished, it will set
|
||||
* the `isPending` flag of the bottommost element to `false`, thus
|
||||
* letting the next call know that it has work to do.
|
||||
*
|
||||
* If the queue was empty, run() will set the `isPending` flag of its
|
||||
* first element to a falsy value, thus bootstrapping the process.
|
||||
*/
|
||||
static MAX_QUEUE_SIZE = 125000;
|
||||
|
||||
queue = [];
|
||||
author = `*${this.constructor.name}*`;
|
||||
prev = null;
|
||||
|
||||
async processQueue () {
|
||||
while (this.queue.length > 1) {
|
||||
if (this.queue[0].isPending) {
|
||||
setImmediate(() => this.processQueue());
|
||||
return;
|
||||
}
|
||||
|
||||
const prev = this.queue.shift();
|
||||
const cur = this.queue[0];
|
||||
|
||||
const sequence = Number(cur._sequence);
|
||||
|
||||
try {
|
||||
|
||||
if (prev.lineName == cur.lineName && prev._sequence == cur._sequence &&
|
||||
prev.lineStatus != "online" && cur.lineStatus == "online" && sequence) {
|
||||
// console.log("TRANSITION TO ONLINE", prev, cur);
|
||||
|
||||
// Check if there are already FSP, FGSP events for this sequence
|
||||
const projectId = await schema2pid(cur._schema);
|
||||
const sequenceEvents = await event.list(projectId, {sequence});
|
||||
|
||||
const labels = ["FSP", "FGSP"].filter(l => !sequenceEvents.find(i => i.labels.includes(l)));
|
||||
|
||||
if (labels.includes("FSP")) {
|
||||
// At this point labels contains either FSP only or FSP + FGSP,
|
||||
// depending on whether a FGSP event has already been entered.
|
||||
|
||||
const remarks = `SEQ ${cur._sequence}, SOL ${cur.lineName}, BSP: ${(cur.speed*3.6/1.852).toFixed(1)} kt, Water depth: ${Number(cur.waterDepth).toFixed(0)} m.`;
|
||||
const payload = {
|
||||
type: "sequence",
|
||||
sequence,
|
||||
point: cur._point,
|
||||
remarks,
|
||||
labels
|
||||
}
|
||||
|
||||
// console.log(projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
} else {
|
||||
// A first shot point has been already entered in the log,
|
||||
// so we have nothing to do here.
|
||||
}
|
||||
} else if (prev.lineStatus == "online" && cur.lineStatus != "online") {
|
||||
// console.log("TRANSITION TO OFFLINE", prev, cur);
|
||||
|
||||
// Check if there are already LSP, LGSP events for this sequence
|
||||
const projectId = await schema2pid(prev._schema);
|
||||
const sequenceEvents = await event.list(projectId, {sequence});
|
||||
|
||||
const labels = ["LSP", "LGSP"].filter(l => !sequenceEvents.find(i => i.labels.includes(l)));
|
||||
|
||||
if (labels.includes("LSP")) {
|
||||
// At this point labels contains either LSP only or LSP + LGSP,
|
||||
// depending on whether a LGSP event has already been entered.
|
||||
|
||||
const remarks = `SEQ ${prev._sequence}, EOL ${prev.lineName}, BSP: ${(prev.speed*3.6/1.852).toFixed(1)} kt, Water depth: ${Number(prev.waterDepth).toFixed(0)} m.`;
|
||||
const payload = {
|
||||
type: "sequence",
|
||||
sequence,
|
||||
point: prev._point,
|
||||
remarks,
|
||||
labels
|
||||
}
|
||||
|
||||
// console.log(projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
} else {
|
||||
// A first shot point has been already entered in the log,
|
||||
// so we have nothing to do here.
|
||||
}
|
||||
}
|
||||
// Processing of this shot has already been completed.
|
||||
// The queue can now move forward.
|
||||
} catch (err) {
|
||||
console.error("DetectSOLEOL Error")
|
||||
console.log(err);
|
||||
} finally {
|
||||
cur.isPending = false;
|
||||
}
|
||||
}
|
||||
constructor () {
|
||||
DEBUG(`${this.author} instantiated`);
|
||||
}
|
||||
|
||||
async run (data) {
|
||||
async run (data, ctx) {
|
||||
if (!data || data.channel !== "realtime") {
|
||||
return;
|
||||
}
|
||||
@@ -117,30 +20,69 @@ class DetectSOLEOL {
|
||||
return;
|
||||
}
|
||||
|
||||
const meta = data.payload.new.meta;
|
||||
|
||||
if (this.queue.length < DetectSOLEOL.MAX_QUEUE_SIZE) {
|
||||
|
||||
this.queue.push({
|
||||
isPending: this.queue.length,
|
||||
_schema: meta._schema,
|
||||
time: meta.time,
|
||||
shot: meta.shot,
|
||||
lineStatus: meta.lineStatus,
|
||||
_sequence: meta._sequence,
|
||||
_point: meta._point,
|
||||
lineName: meta.lineName,
|
||||
speed: meta.speed,
|
||||
waterDepth: meta.waterDepth
|
||||
});
|
||||
|
||||
} else {
|
||||
// FIXME Change to alert
|
||||
console.error("DetectSOLEOL queue full at", this.queue.length);
|
||||
if (!this.prev) {
|
||||
DEBUG("Initialising `prev`");
|
||||
this.prev = data;
|
||||
return;
|
||||
}
|
||||
|
||||
this.processQueue();
|
||||
try {
|
||||
DEBUG("Running");
|
||||
// DEBUG("%j", data);
|
||||
const cur = data?.payload?.new?.meta;
|
||||
const prev = this.prev?.payload?.new?.meta;
|
||||
const sequence = Number(cur._sequence);
|
||||
|
||||
// DEBUG("%j", prev);
|
||||
// DEBUG("%j", cur);
|
||||
DEBUG("prv.lineName: %s\ncur.lineName: %s\nprv._sequence: %s\ncur._sequence: %s\nprv.lineStatus: %s\ncur.lineStatus: %s", prev.lineName, cur.lineName, prev._sequence, cur._sequence, prev.lineStatus, cur.lineStatus);
|
||||
|
||||
if (prev.lineName == cur.lineName && prev._sequence == cur._sequence &&
|
||||
prev.lineStatus != "online" && cur.lineStatus == "online" && sequence) {
|
||||
INFO("Transition to ONLINE detected");
|
||||
|
||||
// We must use schema2pid because the pid may not have been
|
||||
// populated for this event.
|
||||
const projectId = await schema2pid(cur._schema ?? prev._schema);
|
||||
const labels = ["FSP", "FGSP"];
|
||||
const remarks = `SEQ ${cur._sequence}, SOL ${cur.lineName}, BSP: ${(cur.speed*3.6/1.852).toFixed(1)} kt, Water depth: ${Number(cur.waterDepth).toFixed(0)} m.`;
|
||||
const payload = {
|
||||
type: "sequence",
|
||||
sequence,
|
||||
point: cur._point,
|
||||
remarks,
|
||||
labels,
|
||||
meta: {auto: true, author: `*${this.constructor.name}*`}
|
||||
}
|
||||
INFO("Posting event", projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
} else if (prev.lineName == cur.lineName && prev._sequence == cur._sequence &&
|
||||
prev.lineStatus == "online" && cur.lineStatus != "online" && sequence) {
|
||||
INFO("Transition to OFFLINE detected");
|
||||
|
||||
const projectId = await schema2pid(prev._schema ?? cur._schema);
|
||||
const labels = ["LSP", "LGSP"];
|
||||
const remarks = `SEQ ${cur._sequence}, EOL ${cur.lineName}, BSP: ${(cur.speed*3.6/1.852).toFixed(1)} kt, Water depth: ${Number(cur.waterDepth).toFixed(0)} m.`;
|
||||
const payload = {
|
||||
type: "sequence",
|
||||
sequence,
|
||||
point: cur._point,
|
||||
remarks,
|
||||
labels,
|
||||
meta: {auto: true, author: `*${this.constructor.name}*`}
|
||||
}
|
||||
INFO("Posting event", projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
DEBUG(`${this.author} error`, err);
|
||||
throw err;
|
||||
} finally {
|
||||
this.prev = data;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = DetectSOLEOL;
|
||||
|
||||
@@ -1,13 +1,44 @@
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
const Handlers = [
|
||||
require('./detect-project-configuration-change'),
|
||||
require('./detect-soleol'),
|
||||
require('./detect-soft-start'),
|
||||
require('./report-line-change-time'),
|
||||
require('./detect-fdsp')
|
||||
];
|
||||
|
||||
function init () {
|
||||
return Handlers.map(Handler => new Handler());
|
||||
function init (ctx) {
|
||||
|
||||
const instances = Handlers.map(Handler => new Handler(ctx));
|
||||
|
||||
function prepare (data, ctx) {
|
||||
const promises = [];
|
||||
for (let instance of instances) {
|
||||
const promise = new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
DEBUG("Run", instance.author);
|
||||
const result = await instance.run(data, ctx);
|
||||
DEBUG("%s result: %O", instance.author, result);
|
||||
resolve(result);
|
||||
} catch (err) {
|
||||
ERROR("%s error:\n%O", instance.author, err);
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
promises.push(promise);
|
||||
}
|
||||
return promises;
|
||||
}
|
||||
|
||||
function despatch (data, ctx) {
|
||||
return Promise.allSettled(prepare(data, ctx));
|
||||
}
|
||||
|
||||
return { instances, prepare, despatch };
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Handlers,
|
||||
init
|
||||
}
|
||||
};
|
||||
|
||||
231
lib/www/server/events/handlers/report-line-change-time.js
Normal file
231
lib/www/server/events/handlers/report-line-change-time.js
Normal file
@@ -0,0 +1,231 @@
|
||||
const { event, project } = require('../../lib/db');
|
||||
const { withinValidity } = require('../../lib/utils/ranges');
|
||||
const unique = require('../../lib/utils/unique');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
class ReportLineChangeTime {
|
||||
|
||||
author = `*${this.constructor.name}*`;
|
||||
|
||||
constructor () {
|
||||
DEBUG(`${this.author} instantiated`);
|
||||
}
|
||||
|
||||
async run (data, ctx) {
|
||||
|
||||
if (!data || data.channel !== "event") {
|
||||
return;
|
||||
}
|
||||
|
||||
const n = data.payload.new;
|
||||
const o = data.payload.old;
|
||||
|
||||
if (!(n?.labels) && !(o?.labels)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!n?.labels?.includes("FGSP") && !o?.labels?.includes("FGSP") &&
|
||||
!n?.labels?.includes("LGSP") && !o?.labels?.includes("LGSP")) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
DEBUG("Running");
|
||||
const cur = data;
|
||||
const projectId = cur?.payload?.pid;
|
||||
const forward = (cur?.payload?.old?.labels?.includes("LGSP") || cur?.payload?.new?.labels?.includes("LGSP"));
|
||||
DEBUG("%j", cur);
|
||||
|
||||
if (!projectId) {
|
||||
throw {message: "No projectID found in event", cur};
|
||||
return;
|
||||
}
|
||||
|
||||
async function getLineChangeTime (data, forward = false) {
|
||||
if (forward) {
|
||||
const ospEvents = await event.list(projectId, {label: "FGSP"});
|
||||
// DEBUG("ospEvents", ospEvents);
|
||||
const osp = ospEvents.filter(i => i.tstamp > data.tstamp).pop();
|
||||
DEBUG("fsp", osp);
|
||||
// DEBUG("data", data);
|
||||
|
||||
if (osp) {
|
||||
DEBUG("lineChangeTime", osp.tstamp - data.tstamp);
|
||||
return { lineChangeTime: osp.tstamp - data.tstamp, osp };
|
||||
}
|
||||
} else {
|
||||
const ospEvents = await event.list(projectId, {label: "LGSP"});
|
||||
// DEBUG("ospEvents", ospEvents);
|
||||
const osp = ospEvents.filter(i => i.tstamp < data.tstamp).shift();
|
||||
DEBUG("lsp", osp);
|
||||
// DEBUG("data", data);
|
||||
|
||||
if (osp) {
|
||||
DEBUG("lineChangeTime", data.tstamp - osp.tstamp);
|
||||
return { lineChangeTime: data.tstamp - osp.tstamp, osp };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function parseInterval (dt) {
|
||||
const daySeconds = (dt/1000) % 86400;
|
||||
const d = Math.floor((dt/1000) / 86400);
|
||||
const dateObject = new Date(null);
|
||||
dateObject.setSeconds(daySeconds);
|
||||
const [ h, m, s ] = dateObject.toISOString().slice(11, 19).split(":").map(Number);
|
||||
return {d, h, m, s};
|
||||
}
|
||||
|
||||
function formatInterval (i) {
|
||||
let str = "";
|
||||
for (let [k, v] of Object.entries(i)) {
|
||||
if (v) {
|
||||
str += " " + v + " " + k;
|
||||
}
|
||||
}
|
||||
return str.trim();
|
||||
}
|
||||
|
||||
const deleteStaleEvents = async (seq) => {
|
||||
if (seq) {
|
||||
DEBUG("Will delete lct events related to sequence(s)", seq);
|
||||
|
||||
const jpq = `$."${this.author}"`;
|
||||
|
||||
const opts = {jpq};
|
||||
|
||||
if (Array.isArray(seq)) {
|
||||
opts.sequences = unique(seq).filter(i => !!i);
|
||||
} else {
|
||||
opts.sequence = seq;
|
||||
}
|
||||
|
||||
const staleEvents = await event.list(projectId, opts);
|
||||
DEBUG(staleEvents.length ?? 0, "events to delete");
|
||||
for (let staleEvent of staleEvents) {
|
||||
DEBUG(`Deleting event id ${staleEvent.id} (seq = ${staleEvent.sequence}, point = ${staleEvent.point})`);
|
||||
await event.del(projectId, staleEvent.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const createLineChangeTimeEvents = async (lineChangeTime, data, osp) => {
|
||||
|
||||
const events = [];
|
||||
const cfg = ctx?.projects?.configuration?.[projectId] ?? {};
|
||||
const nlcd = cfg?.production?.nominalLineChangeDuration * 60*1000; // m → ms
|
||||
DEBUG("nlcd", nlcd);
|
||||
if (nlcd && lineChangeTime > nlcd) {
|
||||
const excess = lineChangeTime-nlcd;
|
||||
const excessString = formatInterval(parseInterval(excess));
|
||||
DEBUG("excess", excess, excessString);
|
||||
|
||||
// ref: The later of the two events
|
||||
const ref = forward ? osp : data;
|
||||
const payload = {
|
||||
// tstamp: new Date(ref.tstamp-1),
|
||||
sequence: ref.sequence,
|
||||
point: ref.point,
|
||||
remarks: `_Nominal line change duration exceeded by ${excessString}_`,
|
||||
labels: [ "Nav", "Prod" ],
|
||||
meta: {
|
||||
auto: true,
|
||||
author: this.author,
|
||||
[this.author]: {
|
||||
parents: [
|
||||
data.id,
|
||||
osp.id
|
||||
],
|
||||
type: "excess",
|
||||
value: excess
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
events.push(payload);
|
||||
DEBUG("Created line change duration exceeded event", projectId, payload);
|
||||
}
|
||||
|
||||
|
||||
const lctString = formatInterval(parseInterval(lineChangeTime));
|
||||
|
||||
// ref: The later of the two events
|
||||
const ref = forward ? osp : data;
|
||||
const payload = {
|
||||
// tstamp: new Date(ref.tstamp-1),
|
||||
sequence: ref.sequence,
|
||||
point: ref.point,
|
||||
remarks: `Line change time: ${lctString}`,
|
||||
labels: [ "Nav", "Prod" ],
|
||||
meta: {
|
||||
auto: true,
|
||||
author: this.author,
|
||||
[this.author]: {
|
||||
parents: [
|
||||
data.id,
|
||||
osp.id
|
||||
],
|
||||
type: "lineChangeTime",
|
||||
value: lineChangeTime
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
events.push(payload);
|
||||
DEBUG("Created line change duration event", projectId, payload);
|
||||
|
||||
return events;
|
||||
}
|
||||
|
||||
const maybePostEvent = async (projectId, payload) => {
|
||||
DEBUG("Posting event", projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
}
|
||||
|
||||
|
||||
await deleteStaleEvents([cur.old?.sequence, cur.new?.sequence]);
|
||||
|
||||
if (cur?.payload?.operation == "INSERT") {
|
||||
// NOTE: UPDATE on the event_log view translates to one UPDATE plus one INSERT
|
||||
// on event_log_full, so we don't need to worry about UPDATE here.
|
||||
const data = n;
|
||||
DEBUG("INSERT seen: will add lct events related to ", data.id);
|
||||
|
||||
if (withinValidity(data.validity)) {
|
||||
DEBUG("Event within validity period", data.validity, new Date());
|
||||
|
||||
data.tstamp = new Date(data.tstamp);
|
||||
const { lineChangeTime, osp } = await getLineChangeTime(data, forward);
|
||||
|
||||
if (lineChangeTime) {
|
||||
|
||||
const events = await createLineChangeTimeEvents(lineChangeTime, data, osp);
|
||||
|
||||
if (events?.length) {
|
||||
DEBUG("Deleting other events for sequence", events[0].sequence);
|
||||
await deleteStaleEvents(events[0].sequence);
|
||||
}
|
||||
|
||||
for (let payload of events) {
|
||||
await maybePostEvent(projectId, payload);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
DEBUG("Event outside of validity range", data.validity, "lct events not inserted");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
} catch (err) {
|
||||
ERROR(`${this.author} error`, err);
|
||||
throw err;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ReportLineChangeTime;
|
||||
@@ -1,23 +1,25 @@
|
||||
const { listen } = require('../ws/db');
|
||||
const { listen } = require('../lib/db/notify');
|
||||
const channels = require('../lib/db/channels');
|
||||
const handlers = require('./handlers').init();
|
||||
const handlers = require('./handlers');
|
||||
const { ActionsQueue } = require('../lib/queue');
|
||||
const { ERROR, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
function start () {
|
||||
listen(channels, async function (data) {
|
||||
|
||||
const queue = new ActionsQueue();
|
||||
const ctx = {}; // Context object
|
||||
|
||||
const { prepare, despatch } = handlers.init(ctx);
|
||||
|
||||
listen(channels, function (data) {
|
||||
DEBUG("Incoming data", data);
|
||||
for (const handler of handlers) {
|
||||
// NOTE: We are intentionally passing the same instance
|
||||
// of the data to every handler. This means that earlier
|
||||
// handlers could, in principle, modify the data to be
|
||||
// consumed by latter ones, provided that they are
|
||||
// synchronous (as otherwise, the completion order is
|
||||
// undefined).
|
||||
await handler.run(data);
|
||||
}
|
||||
|
||||
// We don't bother awaiting
|
||||
queue.enqueue(() => despatch(data, ctx));
|
||||
DEBUG("Queue size", queue.length());
|
||||
});
|
||||
|
||||
INFO("Events manager started.", handlers.length, "active handlers");
|
||||
INFO("Events manager started");
|
||||
}
|
||||
|
||||
module.exports = { start }
|
||||
|
||||
@@ -8,23 +8,55 @@ async function main () {
|
||||
INFO("Running version", await version.describe());
|
||||
version.compatible()
|
||||
.then( (versions) => {
|
||||
const api = require('./api');
|
||||
const ws = require('./ws');
|
||||
try {
|
||||
const api = require('./api');
|
||||
const ws = require('./ws');
|
||||
const periodicTasks = require('./periodic-tasks').init();
|
||||
|
||||
const { fork } = require('child_process');
|
||||
const { fork } = require('child_process');
|
||||
|
||||
const port = process.env.HTTP_PORT || 3000;
|
||||
const host = process.env.HTTP_HOST || "127.0.0.1";
|
||||
const path = process.env.HTTP_PATH ?? "/api";
|
||||
const server = api.start(port, host, path);
|
||||
ws.start(server);
|
||||
const port = process.env.HTTP_PORT || 3000;
|
||||
const host = process.env.HTTP_HOST || "127.0.0.1";
|
||||
const path = process.env.HTTP_PATH ?? "/api";
|
||||
const server = api.start(port, host, path);
|
||||
ws.start(server);
|
||||
|
||||
const eventManagerPath = [__dirname, "events"].join("/");
|
||||
const eventManager = fork(eventManagerPath, /*{ stdio: 'ignore' }*/);
|
||||
INFO("Versions:", versions);
|
||||
|
||||
INFO("Versions:", versions);
|
||||
periodicTasks.start();
|
||||
|
||||
process.on('exit', () => eventManager.kill());
|
||||
const eventManagerPath = [__dirname, "events"].join("/");
|
||||
const eventManager = fork(eventManagerPath, /*{ stdio: 'ignore' }*/);
|
||||
|
||||
process.on("SIGINT", async () => {
|
||||
DEBUG("Interrupted (SIGINT)");
|
||||
eventManager.kill()
|
||||
await periodicTasks.cleanup();
|
||||
process.exit(0);
|
||||
})
|
||||
|
||||
process.on("SIGHUP", async () => {
|
||||
DEBUG("Stopping (SIGHUP)");
|
||||
eventManager.kill()
|
||||
await periodicTasks.cleanup();
|
||||
process.exit(0);
|
||||
})
|
||||
|
||||
process.on('beforeExit', async () => {
|
||||
DEBUG("Preparing to exit");
|
||||
eventManager.kill()
|
||||
await periodicTasks.cleanup();
|
||||
});
|
||||
|
||||
process.on('exit', async () => {
|
||||
DEBUG("Exiting");
|
||||
// eventManager.kill()
|
||||
// periodicTasks.cleanup();
|
||||
});
|
||||
} catch (err) {
|
||||
ERROR(err);
|
||||
process.exit(2);
|
||||
}
|
||||
})
|
||||
.catch( ({current, wanted, component}) => {
|
||||
console.error(`Fatal error: incompatible ${component} version ${current} (wanted: ${wanted})`);
|
||||
|
||||
61
lib/www/server/lib/db/event/changes.js
Normal file
61
lib/www/server/lib/db/event/changes.js
Normal file
@@ -0,0 +1,61 @@
|
||||
const { setSurvey } = require('../connection');
|
||||
const { replaceMarkers } = require('../../utils');
|
||||
|
||||
function parseValidity (row) {
|
||||
if (row.validity) {
|
||||
const rx = /^(.)("([\d :.+-]+)")?,("([\d :.+-]+)")?([\]\)])$/;
|
||||
const m = row.validity.match(rx);
|
||||
row.validity = [ m[1], m[3], m[5], m[6] ];
|
||||
}
|
||||
return row;
|
||||
}
|
||||
|
||||
function transform (row) {
|
||||
if (row.validity[2]) {
|
||||
return {
|
||||
uid: row.uid,
|
||||
id: row.id,
|
||||
is_deleted: true
|
||||
}
|
||||
} else {
|
||||
row.is_deleted = false;
|
||||
row.has_edits = row.id != row.uid;
|
||||
row.modified_on = row.validity[1];
|
||||
delete row.uid;
|
||||
delete row.validity;
|
||||
return row;
|
||||
}
|
||||
}
|
||||
|
||||
function unique (rows) {
|
||||
const o = {};
|
||||
rows.forEach(row => o[row.id] = row);
|
||||
return Object.values(o);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the event change history from a given epoch (ts0),
|
||||
* for all events.
|
||||
*/
|
||||
async function changes (projectId, ts0, opts = {}) {
|
||||
|
||||
if (!projectId || !ts0) {
|
||||
throw {status: 400, message: "Invalid request" };
|
||||
return;
|
||||
}
|
||||
|
||||
const client = await setSurvey(projectId);
|
||||
|
||||
const text = `
|
||||
SELECT *
|
||||
FROM event_log_changes($1);
|
||||
`;
|
||||
|
||||
const res = await client.query(text, [ts0]);
|
||||
client.release();
|
||||
return opts.unique
|
||||
? unique(res.rows.map(i => transform(replaceMarkers(parseValidity(i)))))
|
||||
: res.rows.map(i => transform(replaceMarkers(parseValidity(i))));
|
||||
}
|
||||
|
||||
module.exports = changes;
|
||||
@@ -5,5 +5,6 @@ module.exports = {
|
||||
post: require('./post'),
|
||||
put: require('./put'),
|
||||
patch: require('./patch'),
|
||||
del: require('./delete')
|
||||
del: require('./delete'),
|
||||
changes: require('./changes')
|
||||
}
|
||||
|
||||
@@ -10,25 +10,34 @@ async function list (projectId, opts = {}) {
|
||||
const offset = Math.abs((opts.page-1)*opts.itemsPerPage) || 0;
|
||||
const limit = Math.abs(Number(opts.itemsPerPage)) || null;
|
||||
|
||||
const filter = opts.sequence
|
||||
? String(opts.sequence).includes(";")
|
||||
? [ "sequence = ANY ( $1 )", [ opts.sequence.split(";") ] ]
|
||||
: [ "sequence = $1", [ opts.sequence ] ]
|
||||
: opts.date0
|
||||
? opts.date1
|
||||
? [ "date(tstamp) BETWEEN SYMMETRIC $1 AND $2", [ opts.date0, opts.date1 ] ]
|
||||
: [ "date(tstamp) = $1", [ opts.date0 ] ]
|
||||
: [ "true = true", [] ];
|
||||
const sequence = opts.sequence && Number(opts.sequence) || null;
|
||||
const sequences = opts.sequences && (Array.isArray(opts.sequences)
|
||||
? opts.sequences.map(Number)
|
||||
: opts.sequences.split(/[^0-9]+/).map(Number)) || null;
|
||||
const date0 = opts.date0 ?? null;
|
||||
const date1 = opts.date1 ?? null;
|
||||
const jpq = opts.jpq || null; // jpq: JSONPath Query
|
||||
const label = opts.label ?? null;
|
||||
|
||||
const text = `
|
||||
SELECT *
|
||||
FROM event_log e
|
||||
WHERE
|
||||
${filter[0]}
|
||||
ORDER BY ${sortKey} ${sortDir};
|
||||
($1::numeric IS NULL OR sequence = $1) AND
|
||||
($2::numeric[] IS NULL OR sequence = ANY( $2 )) AND
|
||||
($3::timestamptz IS NULL OR date(tstamp) = $3) AND
|
||||
($3::timestamptz IS NULL OR
|
||||
(($4::timestamptz IS NULL AND date(tstamp) = $3) OR
|
||||
date(tstamp) BETWEEN SYMMETRIC $3 AND $4)) AND
|
||||
($5::jsonpath IS NULL OR jsonb_path_exists(meta::jsonb, $5::jsonpath)) AND
|
||||
($6::text IS NULL OR $6 = ANY(labels))
|
||||
ORDER BY ${sortKey} ${sortDir}
|
||||
LIMIT ${limit};
|
||||
`;
|
||||
|
||||
const res = await client.query(text, filter[1]);
|
||||
const values = [ sequence, sequences, date0, date1, jpq, label ];
|
||||
|
||||
const res = await client.query(text, values);
|
||||
client.release();
|
||||
return res.rows.map(i => replaceMarkers(i));
|
||||
}
|
||||
|
||||
@@ -9,10 +9,10 @@ async function post (projectId, payload, opts = {}) {
|
||||
|
||||
const text = `
|
||||
INSERT
|
||||
INTO event_log (tstamp, sequence, point, remarks, labels)
|
||||
VALUES ($1, $2, $3, replace_placeholders($4, $1, $2, $3), $5);
|
||||
INTO event_log (tstamp, sequence, point, remarks, labels, meta)
|
||||
VALUES ($1, $2, $3, replace_placeholders($4, $1, $2, $3), $5, $6);
|
||||
`;
|
||||
const values = [ p.tstamp, p.sequence, p.point, p.remarks, p.labels ];
|
||||
const values = [ p.tstamp, p.sequence, p.point, p.remarks, p.labels, p.meta ];
|
||||
|
||||
DEBUG("Inserting new values: %O", values);
|
||||
await client.query(text, values);
|
||||
|
||||
@@ -1,21 +1,43 @@
|
||||
// FIXME This code is in painful need of refactoring
|
||||
|
||||
const { DEBUG } = require("DOUGAL_ROOT/debug")(__filename);
|
||||
const { setSurvey, transaction, pool } = require('../connection');
|
||||
const { listen } = require('../notify');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
let last_tstamp = 0;
|
||||
|
||||
let project_configs, listener;
|
||||
|
||||
async function getAllProjectConfigs () {
|
||||
const client = await pool.connect();
|
||||
|
||||
const res0 = await client.query("SELECT schema FROM projects;");
|
||||
const text = res0.rows.map(r => {
|
||||
return `SELECT '${r.schema}' AS schema, data FROM ${r.schema}.file_data WHERE (data->>'archived')::boolean IS NOT true AND data->>'id' IS NOT NULL`;
|
||||
}).join("\nUNION ALL ");
|
||||
async function getFromDatabase () {
|
||||
DEBUG("Getting project configurations");
|
||||
const client = await pool.connect();
|
||||
|
||||
const res1 = await client.query(text);
|
||||
client.release();
|
||||
return res1.rows.map(r => Object.assign(r.data, {schema: r.schema}));
|
||||
try {
|
||||
const text = `
|
||||
SELECT schema, meta AS data
|
||||
FROM projects
|
||||
WHERE (meta->>'archived')::boolean IS NOT true;
|
||||
`;
|
||||
const res = await client.query(text);
|
||||
project_configs = res.rows;
|
||||
DEBUG("Have configurations for projects", project_configs.map(i => i.data.id));
|
||||
} catch (err) {
|
||||
ERROR(err);
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
return project_configs;
|
||||
}
|
||||
|
||||
if (project_configs) {
|
||||
return project_configs;
|
||||
} else {
|
||||
listener = await listen(["project"], getFromDatabase);
|
||||
DEBUG("Added project configuration change listener");
|
||||
return await getFromDatabase();
|
||||
}
|
||||
}
|
||||
|
||||
async function getNearestPreplot (candidates) {
|
||||
@@ -74,9 +96,9 @@ async function getNearestOfflinePreplot (candidates) {
|
||||
if ("latitude" in candidates[0] && "longitude" in candidates[0]) {
|
||||
text = `
|
||||
SELECT
|
||||
'${c._schema}' AS _schema,
|
||||
'${c.schema}' AS schema,
|
||||
ST_Distance(ST_Transform(ST_SetSRID(ST_MakePoint($1, $2), 4326), ST_SRID(geometry)), geometry) AS distance
|
||||
FROM ${c._schema}.preplot_points
|
||||
FROM ${c.schema}.preplot_points
|
||||
ORDER BY distance ASC
|
||||
LIMIT 1;
|
||||
`;
|
||||
@@ -84,9 +106,9 @@ async function getNearestOfflinePreplot (candidates) {
|
||||
} else if ("easting" in candidates[0] && "northing" in candidates[0]) {
|
||||
text = `
|
||||
SELECT
|
||||
'${c._schema}' AS _schema,
|
||||
'${c.schema}' AS schema,
|
||||
ST_Distance(ST_SetSRID(ST_MakePoint($1, $2), ST_SRID(geometry)), geometry) AS distance
|
||||
FROM ${c._schema}.preplot_points
|
||||
FROM ${c.schema}.preplot_points
|
||||
ORDER BY distance ASC
|
||||
LIMIT 1;
|
||||
`;
|
||||
@@ -102,13 +124,13 @@ async function getNearestOfflinePreplot (candidates) {
|
||||
const results = [];
|
||||
for (const qry of queries) {
|
||||
const res = await client.query(qry.text, qry.values);
|
||||
if (res.rows[0] && res.rows[0]._schema) {
|
||||
if (res.rows[0] && res.rows[0].schema) {
|
||||
results.push(res.rows[0]);
|
||||
}
|
||||
}
|
||||
client.release();
|
||||
const _schema = results.sort( (a, b) => a.distance - b.distance).shift()?._schema;
|
||||
return candidates.find(c => c._schema == _schema);
|
||||
const schema = results.sort( (a, b) => a.distance - b.distance).shift()?.schema;
|
||||
return candidates.find(c => c.schema == schema);
|
||||
}
|
||||
|
||||
async function saveOnline (dataset, opts = {}) {
|
||||
@@ -141,14 +163,14 @@ async function saveOnline (dataset, opts = {}) {
|
||||
await client.query(`
|
||||
INSERT INTO raw_shots
|
||||
(sequence, line, point, objref, tstamp, geometry, hash)
|
||||
VALUES ($1, $2, $3, $4, $5, ST_SetSRID(ST_MakePoint($6, $7), (SELECT (data->>'epsg')::integer AS epsg FROM file_data WHERE data ? 'id')), '*online*')
|
||||
VALUES ($1, $2, $3, $4, $5, ST_SetSRID(ST_MakePoint($6, $7), (select (project_configuration()->>'epsg')::integer as epsg)), '*online*')
|
||||
ON CONFLICT DO NOTHING;
|
||||
`, [item.sequence, item.line, item.point, 0, item.tstamp, item.easting, item.northing]);
|
||||
} else if (item.latitude && item.longitude) {
|
||||
await client.query(`
|
||||
INSERT INTO raw_shots
|
||||
(sequence, line, point, objref, tstamp, geometry, hash)
|
||||
VALUES ($1, $2, $3, $4, $5, ST_Transform(ST_SetSRID(ST_MakePoint($6, $7), 4326), (SELECT (data->>'epsg')::integer AS epsg FROM file_data WHERE data ? 'id')), '*online*')
|
||||
VALUES ($1, $2, $3, $4, $5, ST_Transform(ST_SetSRID(ST_MakePoint($6, $7), 4326), (select (project_configuration()->>'epsg')::integer as epsg)), '*online*')
|
||||
ON CONFLICT DO NOTHING;
|
||||
`, [item.sequence, item.line, item.point, 0, item.tstamp, item.longitude, item.latitude]);
|
||||
} else {
|
||||
@@ -158,8 +180,8 @@ async function saveOnline (dataset, opts = {}) {
|
||||
}
|
||||
await transaction.commit(client);
|
||||
} catch (error) {
|
||||
console.error("ONLINE DATA INSERT ERROR");
|
||||
console.error(error);
|
||||
ERROR("ONLINE DATA INSERT ERROR");
|
||||
ERROR(error);
|
||||
await transaction.rollback(client);
|
||||
} finally {
|
||||
client.release();
|
||||
@@ -186,7 +208,7 @@ async function saveOffline (navData, opts = {}) {
|
||||
} else if (schema && hasEastNorth) {
|
||||
const text = `
|
||||
INSERT INTO real_time_inputs (tstamp, geometry, meta)
|
||||
VALUES ($1, ST_Transform(ST_SetSRID(ST_MakePoint($2, $3), (SELECT (data->>'epsg')::integer AS epsg FROM ${schema}.file_data)), 4326), $4);
|
||||
VALUES ($1, ST_Transform(ST_SetSRID(ST_MakePoint($2, $3), (select (project_configuration()->>'epsg')::integer as epsg), 4326), $4);
|
||||
`;
|
||||
|
||||
const values = [navData.tstamp, navData.longitude, navData.latitude, navData.payload];
|
||||
@@ -215,6 +237,37 @@ async function saveOffline (navData, opts = {}) {
|
||||
client.release();
|
||||
}
|
||||
|
||||
async function getCandidates (navData) {
|
||||
|
||||
const configs = await getAllProjectConfigs();
|
||||
|
||||
// We just get the bits of interest: pattern and schema
|
||||
const candidates = configs.map(c => {
|
||||
if (!c?.data?.online?.line || c?.archived === true) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const p = c.data.online.line.pattern; // For short
|
||||
|
||||
const rx = new RegExp(p.regex, p.flags);
|
||||
const matches = navData.lineName.match(rx);
|
||||
|
||||
if (!matches || ((matches.length+1) < p.captures.length)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
matches.shift(); // Get rid of the full matched text
|
||||
const obj = Object.assign({}, navData, {schema: c.schema});
|
||||
p.captures.forEach( (k, i) => {
|
||||
obj[k] = matches[i];
|
||||
});
|
||||
return obj;
|
||||
}).filter(c => !!c);
|
||||
// DEBUG("Candidates: %j", candidates.map(c => c.schema));
|
||||
|
||||
return candidates;
|
||||
}
|
||||
|
||||
async function save (navData, opts = {}) {
|
||||
|
||||
const hasLatLon = ("latitude" in navData && "longitude" in navData);
|
||||
@@ -222,50 +275,27 @@ async function save (navData, opts = {}) {
|
||||
const hasLinePoint = ("lineName" in navData && "point" in navData);
|
||||
if (!(hasLinePoint || hasLatLon || hasEastNorth)) {
|
||||
// This is of no interest to us
|
||||
console.warning("Ignoring data without useful values", navData);
|
||||
NOTICE("Ignoring data without useful values", navData);
|
||||
return;
|
||||
}
|
||||
|
||||
// DEBUG("navData", navData);
|
||||
|
||||
if (navData.online === true) {
|
||||
|
||||
// So we have a lineName, see which projects match the line pattern.
|
||||
// For this we need to get all the project configs
|
||||
const configs = await getAllProjectConfigs();
|
||||
|
||||
// We just get the bits of interest: pattern and schema
|
||||
const candidates = configs.map(c => {
|
||||
if (!(c && c.online && c.online.line)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const p = c.online.line.pattern; // For short
|
||||
|
||||
const rx = new RegExp(p.regex, p.flags);
|
||||
const matches = navData.lineName.match(rx);
|
||||
|
||||
if (!matches || ((matches.length+1) < p.captures.length)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
matches.shift(); // Get rid of the full matched text
|
||||
const obj = Object.assign({}, navData, {schema: c.schema});
|
||||
p.captures.forEach( (k, i) => {
|
||||
obj[k] = matches[i];
|
||||
});
|
||||
return obj;
|
||||
}).filter(c => !!c);
|
||||
DEBUG("Candidates: %j", candidates);
|
||||
// console.log("CANDIDATES", candidates);
|
||||
const candidates = await getCandidates(navData);
|
||||
|
||||
if (candidates.length == 0) {
|
||||
// This is probably a test line, so we treat it as offline
|
||||
console.log("No match");
|
||||
WARNING("No match");
|
||||
} else {
|
||||
if (candidates.length == 1) {
|
||||
// Only one candidate, associate with it
|
||||
// console.log("Save into schema", candidates[0].match.schema);
|
||||
await saveOnline(candidates);
|
||||
navData.payload._schema = candidates[0].match.schema;
|
||||
navData.payload._schema = candidates[0].schema;
|
||||
} else {
|
||||
// More than one candidate, go for the closest. If more than one active
|
||||
// project with the same preplots, highest numbered schema.
|
||||
@@ -275,7 +305,7 @@ async function save (navData, opts = {}) {
|
||||
await saveOnline(candidates.filter(c => c.schema == destinationSchema), opts);
|
||||
navData.payload._schema = destinationSchema;
|
||||
} else {
|
||||
console.log("Nowhere to save to");
|
||||
WARNING("Nowhere to save to");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -286,17 +316,18 @@ async function save (navData, opts = {}) {
|
||||
}
|
||||
} else {
|
||||
// We are offline. We only assign _schema once every save_interval seconds at most
|
||||
// unless there is gun data present.
|
||||
if (opts.offline_survey_heuristics == "nearest_preplot") {
|
||||
const now = Date.now();
|
||||
const do_save = !opts.offline_survey_detect_interval ||
|
||||
(now - last_tstamp) >= opts.offline_survey_detect_interval;
|
||||
|
||||
if (do_save) {
|
||||
if (do_save || "guns" in navData?.payload) {
|
||||
const configs = await getAllProjectConfigs();
|
||||
const candidates = configs.map(c => Object.assign({}, navData, {_schema: c.schema}));
|
||||
const candidates = await getCandidates(navData);
|
||||
const bestCandidate = await getNearestOfflinePreplot(candidates);
|
||||
if (bestCandidate) {
|
||||
navData.payload._schema = bestCandidate._schema;
|
||||
navData.payload._schema = bestCandidate.schema;
|
||||
last_tstamp = now;
|
||||
}
|
||||
}
|
||||
@@ -304,6 +335,7 @@ async function save (navData, opts = {}) {
|
||||
}
|
||||
|
||||
await saveOffline(navData, opts);
|
||||
DEBUG("Saved");
|
||||
}
|
||||
|
||||
module.exports = save;
|
||||
|
||||
@@ -1,5 +1,43 @@
|
||||
const { makeSubscriber } = require('./connection');
|
||||
const { makeSubscriber, pool } = require('./connection');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
async function purge () {
|
||||
DEBUG("Purging old notifications");
|
||||
const client = await pool.connect();
|
||||
try {
|
||||
await client.query("CALL purge_notifications();");
|
||||
} catch (err) {
|
||||
ERROR(err);
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
|
||||
async function fullPayload (payload) {
|
||||
|
||||
if (!payload.payload_id) {
|
||||
return payload;
|
||||
} else {
|
||||
let client, res;
|
||||
try {
|
||||
client = await pool.connect();
|
||||
const text = `SELECT payload FROM notify_payloads WHERE id = $1;`;
|
||||
const values = [ payload.payload_id ];
|
||||
res = await client.query(text, values);
|
||||
res = res?.rows[0]?.payload;
|
||||
DEBUG(`Oversize notification payload retrieved with id ${payload.payload_id} and size ${res.length}`);
|
||||
// DEBUG(res);
|
||||
res = JSON.parse(res);
|
||||
} catch (err) {
|
||||
ERROR(err);
|
||||
} finally {
|
||||
if (client) {
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
async function listen (addChannels, callback) {
|
||||
|
||||
@@ -18,11 +56,11 @@ async function listen (addChannels, callback) {
|
||||
|
||||
for (const channel of addChannels) {
|
||||
await client.listenTo(channel);
|
||||
client.notifications.on(channel, (payload) => {
|
||||
client.notifications.on(channel, async (payload) => {
|
||||
const data = {
|
||||
channel,
|
||||
_received: new Date(),
|
||||
payload
|
||||
payload: await fullPayload(payload)
|
||||
};
|
||||
callback(data);
|
||||
});
|
||||
@@ -32,5 +70,6 @@ async function listen (addChannels, callback) {
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
listen
|
||||
listen,
|
||||
purge
|
||||
};
|
||||
|
||||
@@ -36,6 +36,9 @@ async function patch (projectId, payload, opts = {}) {
|
||||
}
|
||||
}
|
||||
|
||||
// We do not allow users to change the schema
|
||||
delete payload.schema;
|
||||
|
||||
const dest = removeNulls(deepMerge(source, payload));
|
||||
await modify(projectId, dest);
|
||||
return dest;
|
||||
|
||||
@@ -7,10 +7,11 @@ const { INFO, DEBUG, WARNING, ERROR } = require('DOUGAL_ROOT/debug')(__filename)
|
||||
|
||||
|
||||
function checkSyntax (value, type = "project") {
|
||||
var requiredFields = {};
|
||||
|
||||
switch (type) {
|
||||
case "project":
|
||||
var requiredFields = {
|
||||
requiredFields = {
|
||||
id: "string",
|
||||
name: "string",
|
||||
epsg: "number",
|
||||
@@ -18,7 +19,7 @@ function checkSyntax (value, type = "project") {
|
||||
};
|
||||
break;
|
||||
case "binning":
|
||||
var requiredFields = {
|
||||
requiredFields = {
|
||||
theta: "number",
|
||||
I_inc: "number",
|
||||
J_inc: "number",
|
||||
@@ -28,23 +29,19 @@ function checkSyntax (value, type = "project") {
|
||||
}
|
||||
break
|
||||
case "origin":
|
||||
var requiredFields = {
|
||||
requiredFields = {
|
||||
easting: "number",
|
||||
northing: "number",
|
||||
I: "number",
|
||||
J: "number"
|
||||
}
|
||||
break;
|
||||
break;
|
||||
default:
|
||||
return typeof type == "function"
|
||||
? type(value)
|
||||
: typeof value == type;
|
||||
}
|
||||
|
||||
// return Object.entries(requiredFields).every( ([field, test]) => {
|
||||
// return value.hasOwnProperty(field) && checkSyntax(value[field], test);
|
||||
// });
|
||||
|
||||
for (const [field, test] of Object.entries(requiredFields)) {
|
||||
if (!value.hasOwnProperty(field)) {
|
||||
return `Missing required property: ${field}`;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const fs = require('fs');
|
||||
const YAML = require('yaml');
|
||||
const flattenQCDefinitions = require('../../../utils/flattenQCDefinitions');
|
||||
const { translatePath } = require('../../../utils/logicalPath');
|
||||
const project = require('../../project'); // lib/db/project
|
||||
|
||||
|
||||
@@ -8,7 +9,7 @@ async function get (projectId, opts = {}) {
|
||||
const qcConfig = (await project.configuration.get(projectId))?.qc;
|
||||
if (qcConfig?.definitions) {
|
||||
try {
|
||||
const definitions = YAML.parse(fs.readFileSync(qcConfig.definitions).toString());
|
||||
const definitions = YAML.parse(fs.readFileSync(translatePath(qcConfig.definitions)).toString());
|
||||
|
||||
return opts.flat ? flattenQCDefinitions(definitions) : definitions;
|
||||
} catch (err) {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const fs = require('fs/promises');
|
||||
const Path = require('path');
|
||||
const mime = require('./mime-types');
|
||||
const { translatePath, logicalRoot } = require('./logical');
|
||||
const { translatePath, logicalRoot } = require('../utils/logicalPath');
|
||||
const systemCfg = require('../config');
|
||||
const projectCfg = require('../db/configuration');
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ const { pool, setSurvey, transaction, fetchRow } = require('../db/connection')
|
||||
const { project, sequence, configuration, info } = require('../db')
|
||||
const flattenQCDefinitions = require('./flatten');
|
||||
const { projectHash, sequenceHash } = require('./last-modified');
|
||||
const { translatePath } = require('../utils/logicalPath');
|
||||
|
||||
const { runShotsQC, saveShotsQC } = require('./shots');
|
||||
const { runSequenceQCs, saveSequenceQCs } = require('./sequences');
|
||||
@@ -46,8 +47,8 @@ async function getProjectQCConfig (projectId) {
|
||||
console.log("qcConfig", qcConfig);
|
||||
if (qcConfig?.definitions && qcConfig?.parameters) {
|
||||
const definitions =
|
||||
flattenQCDefinitions(YAML.parse(fs.readFileSync(qcConfig.definitions).toString()));
|
||||
const parameters = YAML.parse(fs.readFileSync(qcConfig.parameters).toString());
|
||||
flattenQCDefinitions(YAML.parse(fs.readFileSync(translatePath(qcConfig.definitions)).toString()));
|
||||
const parameters = YAML.parse(fs.readFileSync(translatePath(qcConfig.parameters)).toString());
|
||||
|
||||
return { definitions, parameters };
|
||||
}
|
||||
|
||||
52
lib/www/server/lib/queue/actions-queue.js
Normal file
52
lib/www/server/lib/queue/actions-queue.js
Normal file
@@ -0,0 +1,52 @@
|
||||
const Queue = require('./queue');
|
||||
|
||||
// Inspired by:
|
||||
// https://stackoverflow.com/questions/53540348/js-async-await-tasks-queue#53540586
|
||||
|
||||
class ActionsQueue extends Queue {
|
||||
|
||||
constructor (items = []) {
|
||||
super(items);
|
||||
|
||||
this.pending = false;
|
||||
}
|
||||
|
||||
enqueue (action) {
|
||||
return new Promise ((resolve, reject) => {
|
||||
super.enqueue({ action, resolve, reject });
|
||||
this.dequeue();
|
||||
});
|
||||
}
|
||||
|
||||
async dequeue () {
|
||||
|
||||
if (this.pending) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const item = super.dequeue();
|
||||
|
||||
if (!item) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
this.pending = true;
|
||||
|
||||
const result = await item.action(this);
|
||||
|
||||
this.pending = false;
|
||||
item.resolve(result);
|
||||
} catch (err) {
|
||||
this.pending = false;
|
||||
item.reject(err);
|
||||
} finally {
|
||||
this.dequeue();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = ActionsQueue;
|
||||
6
lib/www/server/lib/queue/index.js
Normal file
6
lib/www/server/lib/queue/index.js
Normal file
@@ -0,0 +1,6 @@
|
||||
|
||||
module.exports = {
|
||||
Queue: require('./queue'),
|
||||
ActionsQueue: require('./actions-queue')
|
||||
};
|
||||
|
||||
22
lib/www/server/lib/queue/queue.js
Normal file
22
lib/www/server/lib/queue/queue.js
Normal file
@@ -0,0 +1,22 @@
|
||||
|
||||
class Queue {
|
||||
|
||||
constructor (items = []) {
|
||||
this.items = items;
|
||||
}
|
||||
|
||||
enqueue (item) {
|
||||
this.items.push(item);
|
||||
}
|
||||
|
||||
dequeue () {
|
||||
return this.items.shift();
|
||||
}
|
||||
|
||||
length () {
|
||||
return this.items.length;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = Queue;
|
||||
@@ -5,5 +5,8 @@ module.exports = {
|
||||
replaceMarkers: require('./replaceMarkers'),
|
||||
flattenQCDefinitions: require('./flattenQCDefinitions'),
|
||||
deepMerge: require('./deepMerge'),
|
||||
removeNulls: require('./removeNulls')
|
||||
removeNulls: require('./removeNulls'),
|
||||
logicalPath: require('./logicalPath'),
|
||||
ranges: require('./ranges'),
|
||||
unique: require('./unique')
|
||||
};
|
||||
|
||||
@@ -10,6 +10,7 @@ function translatePath (file) {
|
||||
return physicalPath;
|
||||
} else {
|
||||
// An attempt to break out of the logical path?
|
||||
console.warn("Attempting to break out of the logical path?", physicalPath, prefix);
|
||||
throw {
|
||||
status: 404,
|
||||
message: "Not found"
|
||||
74
lib/www/server/lib/utils/ranges.js
Normal file
74
lib/www/server/lib/utils/ranges.js
Normal file
@@ -0,0 +1,74 @@
|
||||
|
||||
function parseRange (str) {
|
||||
const rx = /^[\[(].*,.*[)\]]$/
|
||||
|
||||
if (rx.test(str)) {
|
||||
const lower_inclusive = str[0] == '[';
|
||||
const upper_inclusive = str[str.length-1] == ']';
|
||||
const [ lower, upper ] = str.slice(1,-1).split(",");
|
||||
return {
|
||||
upper,
|
||||
lower,
|
||||
upper_inclusive,
|
||||
lower_inclusive
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function parseValidity (str) {
|
||||
const range = parseRange(str);
|
||||
|
||||
if (range) {
|
||||
ts0 = range.lower ? new Date(range.lower) : null;
|
||||
ts1 = range.upper ? new Date(range.upper) : null;
|
||||
|
||||
return {
|
||||
...range,
|
||||
lower: ts0,
|
||||
upper: ts1
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function withinValidity (range, ts) {
|
||||
if (!ts) {
|
||||
ts = new Date();
|
||||
}
|
||||
|
||||
if (typeof range === "string") {
|
||||
range = parseValidity(range);
|
||||
}
|
||||
|
||||
if (range.lower) {
|
||||
if (range.lower_inclusive) {
|
||||
if (!(range.lower <= ts)) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if (!(range.lower < ts)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (range.upper) {
|
||||
if (range.upper_inclusive) {
|
||||
if (!(range.upper >= ts)) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if (!(range.upper > ts)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseRange,
|
||||
parseValidity,
|
||||
withinValidity
|
||||
}
|
||||
|
||||
6
lib/www/server/lib/utils/unique.js
Normal file
6
lib/www/server/lib/utils/unique.js
Normal file
@@ -0,0 +1,6 @@
|
||||
|
||||
function unique(array) {
|
||||
return [...new Set(array)];
|
||||
}
|
||||
|
||||
module.exports = unique;
|
||||
38
lib/www/server/periodic-tasks/index.js
Normal file
38
lib/www/server/periodic-tasks/index.js
Normal file
@@ -0,0 +1,38 @@
|
||||
const tasks = require('./tasks');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
function init () {
|
||||
const iids = [];
|
||||
|
||||
function start () {
|
||||
INFO("Initialising %d periodic tasks", tasks.length);
|
||||
for (let t of tasks) {
|
||||
const iid = setInterval(t.task, t.timeout);
|
||||
iids.push(iid);
|
||||
}
|
||||
return iids;
|
||||
};
|
||||
|
||||
function stop () {
|
||||
INFO("Stopping %d periodic tasks", iids.length);
|
||||
for (let iid of iids) {
|
||||
clearInterval(iid);
|
||||
}
|
||||
}
|
||||
|
||||
async function cleanup () {
|
||||
stop();
|
||||
DEBUG("Cleaning up %d periodic tasks", tasks.length);
|
||||
for (let t of tasks) {
|
||||
if (t.cleanup) {
|
||||
await t.cleanup();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { start, stop, cleanup, iids };
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
init
|
||||
};
|
||||
4
lib/www/server/periodic-tasks/tasks/index.js
Normal file
4
lib/www/server/periodic-tasks/tasks/index.js
Normal file
@@ -0,0 +1,4 @@
|
||||
|
||||
module.exports = [
|
||||
require('./purge-notifications')
|
||||
];
|
||||
20
lib/www/server/periodic-tasks/tasks/purge-notifications.js
Normal file
20
lib/www/server/periodic-tasks/tasks/purge-notifications.js
Normal file
@@ -0,0 +1,20 @@
|
||||
const { purge } = require('../../lib/db/notify');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
const timeout = 120*1000; // 2 minutes
|
||||
|
||||
function task () {
|
||||
DEBUG("Running task");
|
||||
purge();
|
||||
}
|
||||
|
||||
async function cleanup () {
|
||||
DEBUG("Running cleanup");
|
||||
await purge();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
task,
|
||||
timeout,
|
||||
cleanup
|
||||
};
|
||||
@@ -180,6 +180,16 @@ components:
|
||||
required: true
|
||||
example: 14707
|
||||
|
||||
Since:
|
||||
description: Starting epoch
|
||||
name: since
|
||||
in: path
|
||||
schema:
|
||||
type: string
|
||||
format: date-time
|
||||
required: true
|
||||
example: 1970-01-01T00:00:00Z
|
||||
|
||||
QueryLimit:
|
||||
description: Maximum number of results to return
|
||||
name: limit
|
||||
@@ -206,6 +216,16 @@ components:
|
||||
pattern: "(([^\\s,;:]+)(\\s*[,;:\\s]\\s*)?)+"
|
||||
example: "line,point,tstamp"
|
||||
|
||||
Unique:
|
||||
description: |
|
||||
Return unique results. Any value at all represents `true`.
|
||||
name: unique
|
||||
in: query
|
||||
schema:
|
||||
type: string
|
||||
pattern: ".+"
|
||||
example: "t"
|
||||
|
||||
|
||||
schemas:
|
||||
Duration:
|
||||
@@ -602,14 +622,26 @@ components:
|
||||
Flag to indicate that this event is read-only. It cannot be edited by the user or deleted. Typically this concerns system-generated events such as QC results or midnight shots.
|
||||
additionalProperties: true
|
||||
|
||||
EventIDAbstract:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: number
|
||||
description: Event ID.
|
||||
|
||||
|
||||
EventUIDAbstract:
|
||||
type: object
|
||||
properties:
|
||||
uid:
|
||||
type: number
|
||||
description: Event instance unique ID. When an event is modified, the new entry acquires a different `uid` while keeping the same `id` as the original event.
|
||||
|
||||
|
||||
EventAbstract:
|
||||
allOf:
|
||||
-
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: number
|
||||
description: Event ID.
|
||||
$ref: "#/components/schemas/EventIDAbstract"
|
||||
-
|
||||
$ref: "#/components/schemas/EventNew"
|
||||
|
||||
@@ -659,6 +691,47 @@ components:
|
||||
* The third element is either an ISO-8601 timestamp or `null`. The latter indicates +∞. These are the events returned by endpoints that do not concern themselves with event history.
|
||||
* The fourth element is one of `]` or `)`. As before, it indicates either an open or closed interval.
|
||||
|
||||
EventChangesIsDeletedAbstract:
|
||||
type: object
|
||||
properties:
|
||||
is_deleted:
|
||||
type: boolean
|
||||
description: >
|
||||
Flag to indicate whether this event or event instance (depending on the presence of a `uid` attribute) has been deleted.
|
||||
|
||||
|
||||
EventChangesModified:
|
||||
description: An event modification.
|
||||
allOf:
|
||||
-
|
||||
$ref: "#/components/schemas/EventAbstract"
|
||||
-
|
||||
$ref: "#/components/schemas/EventChangesIsDeletedAbstract"
|
||||
|
||||
EventChangesDeleted:
|
||||
description: |
|
||||
Identification of a deleted event or event instance.
|
||||
|
||||
**Note:** the details of the deleted event are not included, only its `id` and `uid`.
|
||||
allOf:
|
||||
-
|
||||
$ref: "#/components/schemas/EventIDAbstract"
|
||||
-
|
||||
$ref: "#/components/schemas/EventUIDAbstract"
|
||||
-
|
||||
$ref: "#/components/schemas/EventChangesIsDeletedAbstract"
|
||||
|
||||
EventChanges:
|
||||
description: List of event changes since the given epoch.
|
||||
type: array
|
||||
items:
|
||||
anyOf:
|
||||
-
|
||||
$ref: "#/components/schemas/EventChangesDeleted"
|
||||
-
|
||||
$ref: "#/components/schemas/EventChangesModified"
|
||||
|
||||
|
||||
SeisExportEntryFSP:
|
||||
type: object
|
||||
properties:
|
||||
@@ -1159,9 +1232,55 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/PlannedSequence"
|
||||
type: object
|
||||
properties:
|
||||
remarks:
|
||||
type: string
|
||||
description: Planner remarks
|
||||
sequences:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/PlannedSequence"
|
||||
text/csv:
|
||||
schema:
|
||||
type: string
|
||||
format: csv
|
||||
description: |
|
||||
Returns a CSV response containing one row for each planned sequence, with the following columns:
|
||||
|
||||
* `sequence`: Sequence number
|
||||
* `line`: Line number
|
||||
* `fsp`: First shotpoint
|
||||
* `lsp`: Last shotpoint
|
||||
* `ts0`: Estimated timestamp of the first shotpoint
|
||||
* `ts1`: Estimated timestamp of the last shotpoint
|
||||
* `name`: Line name
|
||||
* `remarks`: Arbitrary comments
|
||||
* `num_points`: Number of shotpoints
|
||||
* `duration`: Estimated duration in seconds
|
||||
* `length`: Line length in metres
|
||||
* `azimuth`: Line azimuth
|
||||
* `lon0`: Longitude of the first shotpoint
|
||||
* `lat0`: Latitude of the first shotpoint
|
||||
* `lon1` Longitude of the last shotpoint
|
||||
* `lat1`: Latitude of the last shotpoint
|
||||
example: |
|
||||
"sequence","line","fsp","lsp","ts0","ts1","name","remarks","num_points","duration","length","azimuth","lon0","lat0","lon1","lat1"
|
||||
81,5162,2422,1158,"2023-10-22T11:09:24.912Z","2023-10-22T12:56:03.395Z","2051621081S00000","",633,6398,15799.988472147348,26.4703415983101,2.474872,59.086695,2.596266,59.214146
|
||||
82,5178,2444,1146,"2023-10-22T12:56:03.000Z","2023-10-22T14:45:33.607Z","2051781082S00000","",650,6570,16225.02094944685,26.470137885560813,2.469632,59.085264,2.594277,59.216147
|
||||
text/html:
|
||||
schema:
|
||||
type: string
|
||||
format: html
|
||||
description: |
|
||||
An HTML representation of the plan.
|
||||
application/pdf:
|
||||
schema:
|
||||
type: string
|
||||
contentMediaType: application/pdf
|
||||
description: |
|
||||
A PDF representation of the plan.
|
||||
|
||||
|
||||
post:
|
||||
description: Add a new sequence to the plan.
|
||||
@@ -1382,6 +1501,31 @@ paths:
|
||||
$ref: "#/components/responses/401"
|
||||
|
||||
|
||||
/project/{project}/changes/{since}:
|
||||
get:
|
||||
summary: Get event change history since epoch.
|
||||
tags: [ "log" ]
|
||||
security:
|
||||
- BearerAuthGuest: []
|
||||
- CookieAuthGuest: []
|
||||
parameters:
|
||||
- $ref: "#/components/parameters/Project"
|
||||
- $ref: "#/components/parameters/Since"
|
||||
- $ref: "#/components/parameters/Unique"
|
||||
responses:
|
||||
"200":
|
||||
description: List of project event changes. If `unique` is given, only the latest version of each event will be returned, otherwise the entire modification history is given, potentially including the same event `id` multiple times.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/EventChanges"
|
||||
|
||||
"401":
|
||||
$ref: "#/components/responses/401"
|
||||
|
||||
|
||||
/project/{project}/label:
|
||||
get:
|
||||
summary: Get project labels.
|
||||
|
||||
@@ -95,7 +95,8 @@ for (const header of (cfg._("global.navigation.headers") || []).filter(h => h.ty
|
||||
const server = dgram.createSocket('udp4');
|
||||
|
||||
server.on('error', (err) => {
|
||||
console.error(`server error:\n${err.stack}`);
|
||||
ERROR(err);
|
||||
// console.error(`server error:\n${err.stack}`);
|
||||
maybeSendError(err, {title: "UDP listener error on port "+header.port});
|
||||
// server.close();
|
||||
});
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
const { pool } = require('../lib/db/connection');
|
||||
|
||||
var client;
|
||||
|
||||
const channels = {};
|
||||
|
||||
async function notify (data) {
|
||||
|
||||
if (data.channel in channels) {
|
||||
data._received = new Date();
|
||||
try {
|
||||
const json = JSON.parse(data.payload);
|
||||
data.payload = json;
|
||||
} catch {
|
||||
// Ignore the error
|
||||
}
|
||||
for (const listener of channels[data.channel]) {
|
||||
await listener(JSON.parse(JSON.stringify(data)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function reconnect () {
|
||||
console.log("Reconnecting");
|
||||
// No need to provide parameters, channels should already be populated.
|
||||
listen();
|
||||
}
|
||||
|
||||
async function listen (addChannels, callback) {
|
||||
if (!client) {
|
||||
try {
|
||||
client = await pool.connect();
|
||||
} catch (err) {
|
||||
console.error("Error connecting to DB", err);
|
||||
console.log("Will try again in 15 seconds");
|
||||
setImmediate(() => client = null);
|
||||
setTimeout(() => {
|
||||
listen(addChannels, callback);
|
||||
}, 15000);
|
||||
return;
|
||||
}
|
||||
client.on('notification', notify);
|
||||
console.log("Websocket client connected", Object.keys(channels));
|
||||
client.on('error', (err) => console.error("Events client error: ", err));
|
||||
client.on('end', () => {
|
||||
console.warn("Websocket events client disconnected. Will attempt to reconnect in five seconds");
|
||||
setImmediate(() => client = null);
|
||||
setTimeout(reconnect, 5000);
|
||||
});
|
||||
}
|
||||
|
||||
if (addChannels) {
|
||||
if (!Array.isArray(addChannels)) {
|
||||
addChannels = [addChannels];
|
||||
}
|
||||
|
||||
for (const channel of addChannels) {
|
||||
if (!(channel in channels)) {
|
||||
await client.query("LISTEN "+channel);
|
||||
channels[channel] = [];
|
||||
console.log("Listening to ", channel);
|
||||
}
|
||||
|
||||
channels[channel].push(callback);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
listen
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
const ws = require('ws');
|
||||
const URL = require('url');
|
||||
const db = require('./db');
|
||||
const { listen } = require('../lib/db/notify');
|
||||
const channels = require('../lib/db/channels');
|
||||
|
||||
function start (server, pingInterval=30000) {
|
||||
@@ -22,7 +22,7 @@ function start (server, pingInterval=30000) {
|
||||
}
|
||||
});
|
||||
|
||||
db.listen(channels, (data) => {
|
||||
listen(channels, (data) => {
|
||||
wsServer.clients.forEach( (socket) => {
|
||||
socket.send(JSON.stringify(data));
|
||||
})
|
||||
|
||||
@@ -16,7 +16,12 @@ OUTPATH="$OUTDIR/$OUTNAME"
|
||||
# 30000/UDP: Navigation system headers
|
||||
# Not all inputs will be present in all systems.
|
||||
#
|
||||
EXPR="udp and (port 4461 or port 4462 or port 30000)"
|
||||
# NOTE: $INS_HOST must be defined and point to the
|
||||
# navigation server. The reason we don't use a port
|
||||
# filter for this data is because that doesn't work
|
||||
# with fragmented UDP packets.
|
||||
#
|
||||
EXPR="udp and (port 4461 or port 4462 or src host $INS_HOST)"
|
||||
|
||||
if [[ ! -d "$OUTDIR" ]]; then
|
||||
mkdir "$OUTDIR"
|
||||
|
||||
42
sbin/rewrite-captures.sh
Executable file
42
sbin/rewrite-captures.sh
Executable file
@@ -0,0 +1,42 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Rewrite packet captures in order to be able to replay them.
|
||||
#
|
||||
# SINET: Rewrite all packets with this source IP address
|
||||
# SETHER: Rewrite all packets with this MAC
|
||||
#
|
||||
# DINET: Rewrite all packets with this destination IP address
|
||||
# DETHER: Rewrite all packets with this destination MAC address
|
||||
#
|
||||
# The resulting files have the original name with "-rewritten.pcap"
|
||||
# appended as a suffix. Those packets may then be replayed from a
|
||||
# different computer or virtual container, for instance with:
|
||||
#
|
||||
# sudo bittwist -i 1 -v -m10 capture-rewritten.pcap
|
||||
#
|
||||
# Where -i n is the interface name (use bittwist -d to list available
|
||||
# interfaces), -v is the verbose flag and -m10 replays at 10× speed.
|
||||
#
|
||||
|
||||
SINET=${SINET:-$(ip -o -4 addr |grep -v " lo " |head -n 1 |sed -r 's/^.*inet\s([0-9.]+).*$/\1/')}
|
||||
SETHER=${SETHER:-$(ip -o link |grep -v " lo" |head -n 1 |sed -r 's/^.*ether\s([0-9a-fA-F:]+).*$/\1/')}
|
||||
|
||||
DINET=${DINET:-$(ip -o -4 addr |grep -v " lo " |head -n 1 |sed -r 's/^.*inet\s([0-9.]+).*$/\1/')}
|
||||
DETHER=${DETHER:-$(ip -o link |grep -v " lo" |head -n 1 |sed -r 's/^.*ether\s([0-9a-fA-F:]+).*$/\1/')}
|
||||
|
||||
for f in $*; do
|
||||
|
||||
OUTFNAME=$f-rewritten.pcap
|
||||
echo $f → $OUTFNAME
|
||||
if [[ -n "$SINET" && -n "$SETHER" ]]; then
|
||||
tcprewrite -S 0.0.0.0/0:$SINET --enet-smac=$SETHER \
|
||||
-D 0.0.0.0/0:$DINET --enet-dmac=$DETHER \
|
||||
--infile "$f" \
|
||||
--outfile "$OUTFNAME"
|
||||
else
|
||||
tcprewrite -D 0.0.0.0/0:$DINET --enet-dmac=$DETHER \
|
||||
--infile "$f" \
|
||||
--outfile "$OUTFNAME"
|
||||
fi
|
||||
|
||||
done
|
||||
Reference in New Issue
Block a user