mirror of
https://gitlab.com/wgp/dougal/software.git
synced 2025-12-06 10:57:07 +00:00
Compare commits
97 Commits
245-export
...
280-consol
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4c2a2617a1 | ||
|
|
5021888d03 | ||
|
|
bf633f7fdf | ||
|
|
847f49ad7c | ||
|
|
171feb9dd2 | ||
|
|
503a0de12f | ||
|
|
cf89a43f64 | ||
|
|
680e376ed1 | ||
|
|
a26974670a | ||
|
|
16a6cb59dc | ||
|
|
829e206831 | ||
|
|
83244fcd1a | ||
|
|
851369a0b4 | ||
|
|
5065d62443 | ||
|
|
2d1e1e9532 | ||
|
|
051049581a | ||
|
|
da5ae18b0b | ||
|
|
ac9353c101 | ||
|
|
c4c5c44bf1 | ||
|
|
d3659ebf02 | ||
|
|
6b5070e634 | ||
|
|
09ff96ceee | ||
|
|
f231acf109 | ||
|
|
e576e1662c | ||
|
|
6a21ddd1cd | ||
|
|
c1e35b2459 | ||
|
|
eee2a96029 | ||
|
|
6f5e5a4d20 | ||
|
|
9e73cb7e00 | ||
|
|
d7ab4eec7c | ||
|
|
cdd96a4bc7 | ||
|
|
39a21766b6 | ||
|
|
0e33c18b5c | ||
|
|
7f411ac7dd | ||
|
|
ed1da11c9d | ||
|
|
66ec28dd83 | ||
|
|
b928d96774 | ||
|
|
73335f9c1e | ||
|
|
7b6b81dbc5 | ||
|
|
2e11c574c2 | ||
|
|
d07565807c | ||
|
|
6eccbf215a | ||
|
|
8abc05f04e | ||
|
|
8f587467f9 | ||
|
|
3d7a91c7ff | ||
|
|
3fd408074c | ||
|
|
f71cbd8f51 | ||
|
|
915df8ac16 | ||
|
|
d5ecb08a2d | ||
|
|
9388cd4861 | ||
|
|
180590b411 | ||
|
|
4ec37539bf | ||
|
|
8755fe01b6 | ||
|
|
0bfe54e0c2 | ||
|
|
29bc689b84 | ||
|
|
65682febc7 | ||
|
|
d408665d62 | ||
|
|
64fceb0a01 | ||
|
|
ab58e578c9 | ||
|
|
0e58b8fa5b | ||
|
|
99ac082f00 | ||
|
|
4d3fddc051 | ||
|
|
42456439a9 | ||
|
|
ee0c0e7308 | ||
|
|
998c272bf8 | ||
|
|
daddd1f0e8 | ||
|
|
17f20535cb | ||
|
|
0829ea3ea1 | ||
|
|
2069d9c3d7 | ||
|
|
8a2d526c50 | ||
|
|
8ad96d6f73 | ||
|
|
947faf8c05 | ||
|
|
a948556455 | ||
|
|
835384b730 | ||
|
|
c5b93794f4 | ||
|
|
056cd32f0e | ||
|
|
49bb413110 | ||
|
|
ceccc42050 | ||
|
|
aa3379e1c6 | ||
|
|
4063af0e25 | ||
|
|
d53e6060a4 | ||
|
|
85d8fc8cc0 | ||
|
|
0fe40b1839 | ||
|
|
21de4b757f | ||
|
|
96cdbb2cff | ||
|
|
d531643b58 | ||
|
|
a1779ef488 | ||
|
|
5239dece1e | ||
|
|
a7d7837816 | ||
|
|
ebcfc7df47 | ||
|
|
dc4b9002fe | ||
|
|
33618b6b82 | ||
|
|
597d407acc | ||
|
|
6162a5bdee | ||
|
|
696bbf7a17 | ||
|
|
821fcf0922 | ||
|
|
b1712d838f |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -11,3 +11,5 @@ lib/www/client/dist/
|
||||
etc/surveys/*.yaml
|
||||
!etc/surveys/_*.yaml
|
||||
etc/ssl/*
|
||||
etc/config.yaml
|
||||
var/*
|
||||
|
||||
@@ -11,11 +11,9 @@ from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
|
||||
@@ -115,7 +115,10 @@ if __name__ == '__main__':
|
||||
|
||||
process(layer_name, layer, realprefix)
|
||||
|
||||
else:
|
||||
elif os.path.isdir(realprefix):
|
||||
|
||||
if not "globs" in layer:
|
||||
layer["globs"] = [ "**/*.geojson" ]
|
||||
|
||||
for globspec in layer["globs"]:
|
||||
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
|
||||
|
||||
@@ -132,14 +132,14 @@ run $BINDIR/import_preplots.py
|
||||
print_log "Import raw P1/11"
|
||||
run $BINDIR/import_raw_p111.py
|
||||
|
||||
print_log "Import raw P1/90"
|
||||
run $BINDIR/import_raw_p190.py
|
||||
#print_log "Import raw P1/90"
|
||||
#run $BINDIR/import_raw_p190.py
|
||||
|
||||
print_log "Import final P1/11"
|
||||
run $BINDIR/import_final_p111.py
|
||||
|
||||
print_log "Import final P1/90"
|
||||
run $BINDIR/import_final_p190.py
|
||||
#print_log "Import final P1/90"
|
||||
#run $BINDIR/import_final_p190.py
|
||||
|
||||
print_log "Import SmartSource data"
|
||||
run $BINDIR/import_smsrc.py
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
\connect dougal
|
||||
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.13"}')
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.2"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.13"}' WHERE public.info.key = 'version';
|
||||
SET value = public.info.value || '{"db_schema": "0.4.2"}' WHERE public.info.key = 'version';
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
-- PostgreSQL database dump
|
||||
--
|
||||
|
||||
-- Dumped from database version 14.2
|
||||
-- Dumped by pg_dump version 14.2
|
||||
-- Dumped from database version 14.8
|
||||
-- Dumped by pg_dump version 14.9
|
||||
|
||||
SET statement_timeout = 0;
|
||||
SET lock_timeout = 0;
|
||||
@@ -70,173 +70,171 @@ If the path matches that of an existing entry, delete that entry (which cascades
|
||||
CREATE PROCEDURE _SURVEY__TEMPLATE_.adjust_planner()
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
_planner_config jsonb;
|
||||
_planned_line planned_lines%ROWTYPE;
|
||||
_lag interval;
|
||||
_last_sequence sequences_summary%ROWTYPE;
|
||||
_deltatime interval;
|
||||
_shotinterval interval;
|
||||
_tstamp timestamptz;
|
||||
_incr integer;
|
||||
BEGIN
|
||||
DECLARE
|
||||
_planner_config jsonb;
|
||||
_planned_line planned_lines%ROWTYPE;
|
||||
_lag interval;
|
||||
_last_sequence sequences_summary%ROWTYPE;
|
||||
_deltatime interval;
|
||||
_shotinterval interval;
|
||||
_tstamp timestamptz;
|
||||
_incr integer;
|
||||
BEGIN
|
||||
|
||||
SET CONSTRAINTS planned_lines_pkey DEFERRED;
|
||||
SET CONSTRAINTS planned_lines_pkey DEFERRED;
|
||||
|
||||
SELECT data->'planner'
|
||||
INTO _planner_config
|
||||
FROM file_data
|
||||
WHERE data ? 'planner';
|
||||
SELECT project_configuration()->'planner'
|
||||
INTO _planner_config;
|
||||
|
||||
SELECT *
|
||||
INTO _last_sequence
|
||||
FROM sequences_summary
|
||||
ORDER BY sequence DESC
|
||||
LIMIT 1;
|
||||
SELECT *
|
||||
INTO _last_sequence
|
||||
FROM sequences_summary
|
||||
ORDER BY sequence DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT *
|
||||
INTO _planned_line
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
SELECT *
|
||||
INTO _planned_line
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
SELECT
|
||||
COALESCE(
|
||||
((lead(ts0) OVER (ORDER BY sequence)) - ts1),
|
||||
make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer)
|
||||
)
|
||||
INTO _lag
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
_incr = sign(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE '_planner_config: %', _planner_config;
|
||||
RAISE NOTICE '_last_sequence: %', _last_sequence;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
RAISE NOTICE '_incr: %', _incr;
|
||||
|
||||
-- Does the latest sequence match a planned sequence?
|
||||
IF _planned_line IS NULL THEN -- No it doesn't
|
||||
RAISE NOTICE 'Latest sequence shot does not match a planned sequence';
|
||||
SELECT * INTO _planned_line FROM planned_lines ORDER BY sequence ASC LIMIT 1;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
|
||||
IF _planned_line.sequence <= _last_sequence.sequence THEN
|
||||
RAISE NOTICE 'Renumbering the planned sequences starting from %', _planned_line.sequence + 1;
|
||||
-- Renumber the planned sequences starting from last shot sequence number + 1
|
||||
UPDATE planned_lines
|
||||
SET sequence = sequence + _last_sequence.sequence - _planned_line.sequence + 1;
|
||||
END IF;
|
||||
|
||||
-- The correction to make to the first planned line's ts0 will be based on either the last
|
||||
-- sequence's EOL + default line change time or the current time, whichever is later.
|
||||
_deltatime := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1) + make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer), current_timestamp) - _planned_line.ts0;
|
||||
|
||||
-- Is the first of the planned lines start time in the past? (±5 mins)
|
||||
IF _planned_line.ts0 < (current_timestamp - make_interval(mins => 5)) THEN
|
||||
RAISE NOTICE 'First planned line is in the past. Adjusting times by %', _deltatime;
|
||||
-- Adjust the start / end time of the planned lines by assuming that we are at
|
||||
-- `defaultLineChangeDuration` minutes away from SOL of the first planned line.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime;
|
||||
END IF;
|
||||
|
||||
ELSE -- Yes it does
|
||||
RAISE NOTICE 'Latest sequence does match a planned sequence: %, %', _planned_line.sequence, _planned_line.line;
|
||||
|
||||
-- Is it online?
|
||||
IF EXISTS(SELECT 1 FROM raw_lines_files WHERE sequence = _last_sequence.sequence AND hash = '*online*') THEN
|
||||
-- Yes it is
|
||||
RAISE NOTICE 'Sequence % is online', _last_sequence.sequence;
|
||||
|
||||
-- Let us get the SOL from the events log if we can
|
||||
RAISE NOTICE 'Trying to set fsp, ts0 from events log FSP, FGSP';
|
||||
WITH e AS (
|
||||
SELECT * FROM event_log
|
||||
WHERE
|
||||
sequence = _last_sequence.sequence
|
||||
AND ('FSP' = ANY(labels) OR 'FGSP' = ANY(labels))
|
||||
ORDER BY tstamp LIMIT 1
|
||||
SELECT
|
||||
COALESCE(
|
||||
((lead(ts0) OVER (ORDER BY sequence)) - ts1),
|
||||
make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer)
|
||||
)
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
fsp = COALESCE(e.point, fsp),
|
||||
ts0 = COALESCE(e.tstamp, ts0)
|
||||
FROM e
|
||||
WHERE planned_lines.sequence = _last_sequence.sequence;
|
||||
INTO _lag
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
-- Shot interval
|
||||
_shotinterval := (_last_sequence.ts1 - _last_sequence.ts0) / abs(_last_sequence.lsp - _last_sequence.fsp);
|
||||
_incr = sign(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE 'Estimating EOL from current shot interval: %', _shotinterval;
|
||||
RAISE NOTICE '_planner_config: %', _planner_config;
|
||||
RAISE NOTICE '_last_sequence: %', _last_sequence;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
RAISE NOTICE '_incr: %', _incr;
|
||||
|
||||
SELECT (abs(lsp-fsp) * _shotinterval + ts0) - ts1
|
||||
INTO _deltatime
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
-- Does the latest sequence match a planned sequence?
|
||||
IF _planned_line IS NULL THEN -- No it doesn't
|
||||
RAISE NOTICE 'Latest sequence shot does not match a planned sequence';
|
||||
SELECT * INTO _planned_line FROM planned_lines ORDER BY sequence ASC LIMIT 1;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
|
||||
---- Set ts1 for the current sequence
|
||||
--UPDATE planned_lines
|
||||
--SET
|
||||
--ts1 = (abs(lsp-fsp) * _shotinterval) + ts0
|
||||
--WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Adjustment is %', _deltatime;
|
||||
|
||||
IF abs(EXTRACT(EPOCH FROM _deltatime)) < 8 THEN
|
||||
RAISE NOTICE 'Adjustment too small (< 8 s), so not applying it';
|
||||
RETURN;
|
||||
IF _planned_line.sequence <= _last_sequence.sequence THEN
|
||||
RAISE NOTICE 'Renumbering the planned sequences starting from %', _planned_line.sequence + 1;
|
||||
-- Renumber the planned sequences starting from last shot sequence number + 1
|
||||
UPDATE planned_lines
|
||||
SET sequence = sequence + _last_sequence.sequence - _planned_line.sequence + 1;
|
||||
END IF;
|
||||
|
||||
-- Adjust ts1 for the current sequence
|
||||
UPDATE planned_lines
|
||||
SET ts1 = ts1 + _deltatime
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
-- The correction to make to the first planned line's ts0 will be based on either the last
|
||||
-- sequence's EOL + default line change time or the current time, whichever is later.
|
||||
_deltatime := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1) + make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer), current_timestamp) - _planned_line.ts0;
|
||||
|
||||
-- Now shift all sequences after
|
||||
UPDATE planned_lines
|
||||
SET ts0 = ts0 + _deltatime, ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _last_sequence.sequence;
|
||||
-- Is the first of the planned lines start time in the past? (±5 mins)
|
||||
IF _planned_line.ts0 < (current_timestamp - make_interval(mins => 5)) THEN
|
||||
RAISE NOTICE 'First planned line is in the past. Adjusting times by %', _deltatime;
|
||||
-- Adjust the start / end time of the planned lines by assuming that we are at
|
||||
-- `defaultLineChangeDuration` minutes away from SOL of the first planned line.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime;
|
||||
END IF;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences before %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence < _last_sequence.sequence;
|
||||
ELSE -- Yes it does
|
||||
RAISE NOTICE 'Latest sequence does match a planned sequence: %, %', _planned_line.sequence, _planned_line.line;
|
||||
|
||||
ELSE
|
||||
-- No it isn't
|
||||
RAISE NOTICE 'Sequence % is offline', _last_sequence.sequence;
|
||||
-- Is it online?
|
||||
IF EXISTS(SELECT 1 FROM raw_lines_files WHERE sequence = _last_sequence.sequence AND hash = '*online*') THEN
|
||||
-- Yes it is
|
||||
RAISE NOTICE 'Sequence % is online', _last_sequence.sequence;
|
||||
|
||||
-- We were supposed to finish at _planned_line.ts1 but we finished at:
|
||||
_tstamp := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1), current_timestamp);
|
||||
-- WARNING Next line is for testing only
|
||||
--_tstamp := COALESCE(_last_sequence.ts1_final, _last_sequence.ts1);
|
||||
-- So we need to adjust timestamps by:
|
||||
_deltatime := _tstamp - _planned_line.ts1;
|
||||
-- Let us get the SOL from the events log if we can
|
||||
RAISE NOTICE 'Trying to set fsp, ts0 from events log FSP, FGSP';
|
||||
WITH e AS (
|
||||
SELECT * FROM event_log
|
||||
WHERE
|
||||
sequence = _last_sequence.sequence
|
||||
AND ('FSP' = ANY(labels) OR 'FGSP' = ANY(labels))
|
||||
ORDER BY tstamp LIMIT 1
|
||||
)
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
fsp = COALESCE(e.point, fsp),
|
||||
ts0 = COALESCE(e.tstamp, ts0)
|
||||
FROM e
|
||||
WHERE planned_lines.sequence = _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Planned end: %, actual end: % (%, %)', _planned_line.ts1, _tstamp, _planned_line.sequence, _last_sequence.sequence;
|
||||
RAISE NOTICE 'Shifting times by % for sequences > %', _deltatime, _planned_line.sequence;
|
||||
-- NOTE: This won't work if sequences are not, err… sequential.
|
||||
-- NOTE: This has been known to happen in 2020.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _planned_line.sequence;
|
||||
-- Shot interval
|
||||
_shotinterval := (_last_sequence.ts1 - _last_sequence.ts0) / abs(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences up to %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence <= _last_sequence.sequence;
|
||||
RAISE NOTICE 'Estimating EOL from current shot interval: %', _shotinterval;
|
||||
|
||||
SELECT (abs(lsp-fsp) * _shotinterval + ts0) - ts1
|
||||
INTO _deltatime
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
---- Set ts1 for the current sequence
|
||||
--UPDATE planned_lines
|
||||
--SET
|
||||
--ts1 = (abs(lsp-fsp) * _shotinterval) + ts0
|
||||
--WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Adjustment is %', _deltatime;
|
||||
|
||||
IF abs(EXTRACT(EPOCH FROM _deltatime)) < 8 THEN
|
||||
RAISE NOTICE 'Adjustment too small (< 8 s), so not applying it';
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- Adjust ts1 for the current sequence
|
||||
UPDATE planned_lines
|
||||
SET ts1 = ts1 + _deltatime
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
-- Now shift all sequences after
|
||||
UPDATE planned_lines
|
||||
SET ts0 = ts0 + _deltatime, ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences before %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence < _last_sequence.sequence;
|
||||
|
||||
ELSE
|
||||
-- No it isn't
|
||||
RAISE NOTICE 'Sequence % is offline', _last_sequence.sequence;
|
||||
|
||||
-- We were supposed to finish at _planned_line.ts1 but we finished at:
|
||||
_tstamp := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1), current_timestamp);
|
||||
-- WARNING Next line is for testing only
|
||||
--_tstamp := COALESCE(_last_sequence.ts1_final, _last_sequence.ts1);
|
||||
-- So we need to adjust timestamps by:
|
||||
_deltatime := _tstamp - _planned_line.ts1;
|
||||
|
||||
RAISE NOTICE 'Planned end: %, actual end: % (%, %)', _planned_line.ts1, _tstamp, _planned_line.sequence, _last_sequence.sequence;
|
||||
RAISE NOTICE 'Shifting times by % for sequences > %', _deltatime, _planned_line.sequence;
|
||||
-- NOTE: This won't work if sequences are not, err… sequential.
|
||||
-- NOTE: This has been known to happen in 2020.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _planned_line.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences up to %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence <= _last_sequence.sequence;
|
||||
|
||||
END IF;
|
||||
|
||||
END IF;
|
||||
|
||||
END IF;
|
||||
END;
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
@@ -367,8 +365,8 @@ COMMENT ON PROCEDURE _SURVEY__TEMPLATE_.augment_event_data(IN maxspan numeric) I
|
||||
CREATE FUNCTION _SURVEY__TEMPLATE_.binning_parameters() RETURNS jsonb
|
||||
LANGUAGE sql STABLE LEAKPROOF PARALLEL SAFE
|
||||
AS $$
|
||||
SELECT data->'binning' binning FROM file_data WHERE data->>'binning' IS NOT NULL LIMIT 1;
|
||||
$$;
|
||||
SELECT project_configuration()->'binning' binning;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION _SURVEY__TEMPLATE_.binning_parameters() OWNER TO postgres;
|
||||
@@ -671,7 +669,7 @@ BEGIN
|
||||
id <> NEW.id
|
||||
AND label = NEW.label
|
||||
AND id IN (SELECT id FROM events_seq WHERE sequence = _sequence);
|
||||
|
||||
|
||||
DELETE
|
||||
FROM events_timed_labels
|
||||
WHERE
|
||||
@@ -854,7 +852,7 @@ CREATE FUNCTION _SURVEY__TEMPLATE_.ij_error(line double precision, point double
|
||||
DECLARE
|
||||
bp jsonb := binning_parameters();
|
||||
ij public.geometry := to_binning_grid(geom, bp);
|
||||
|
||||
|
||||
theta numeric := (bp->>'theta')::numeric * pi() / 180;
|
||||
I_inc numeric DEFAULT 1;
|
||||
J_inc numeric DEFAULT 1;
|
||||
@@ -869,13 +867,13 @@ DECLARE
|
||||
yoff numeric := (bp->'origin'->>'J')::numeric;
|
||||
E0 numeric := (bp->'origin'->>'easting')::numeric;
|
||||
N0 numeric := (bp->'origin'->>'northing')::numeric;
|
||||
|
||||
|
||||
error_i double precision;
|
||||
error_j double precision;
|
||||
BEGIN
|
||||
error_i := (public.st_x(ij) - line) * I_width;
|
||||
error_j := (public.st_y(ij) - point) * J_width;
|
||||
|
||||
|
||||
RETURN public.ST_MakePoint(error_i, error_j);
|
||||
END
|
||||
$$;
|
||||
@@ -1042,6 +1040,39 @@ ALTER PROCEDURE _SURVEY__TEMPLATE_.log_midnight_shots(IN dt0 date, IN dt1 date)
|
||||
COMMENT ON PROCEDURE _SURVEY__TEMPLATE_.log_midnight_shots(IN dt0 date, IN dt1 date) IS 'Add midnight shots between two dates dt0 and dt1 to the event_log, unless the events already exist.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: project_configuration(); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION _SURVEY__TEMPLATE_.project_configuration() RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
schema_name text;
|
||||
configuration jsonb;
|
||||
BEGIN
|
||||
|
||||
SELECT nspname
|
||||
INTO schema_name
|
||||
FROM pg_namespace
|
||||
WHERE oid = (
|
||||
SELECT pronamespace
|
||||
FROM pg_proc
|
||||
WHERE oid = 'project_configuration'::regproc::oid
|
||||
);
|
||||
|
||||
SELECT meta
|
||||
INTO configuration
|
||||
FROM public.projects
|
||||
WHERE schema = schema_name;
|
||||
|
||||
RETURN configuration;
|
||||
END
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION _SURVEY__TEMPLATE_.project_configuration() OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: replace_placeholders(text, timestamp with time zone, integer, integer); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
@@ -0,0 +1,122 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.4.0
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This adapts the schema to the change in how project configurations are
|
||||
-- handled (https://gitlab.com/wgp/dougal/software/-/merge_requests/29)
|
||||
-- by creating a project_configuration() function which returns the
|
||||
-- current project's configuration data.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION project_configuration()
|
||||
RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
schema_name text;
|
||||
configuration jsonb;
|
||||
BEGIN
|
||||
|
||||
SELECT nspname
|
||||
INTO schema_name
|
||||
FROM pg_namespace
|
||||
WHERE oid = (
|
||||
SELECT pronamespace
|
||||
FROM pg_proc
|
||||
WHERE oid = 'project_configuration'::regproc::oid
|
||||
);
|
||||
|
||||
SELECT meta
|
||||
INTO configuration
|
||||
FROM public.projects
|
||||
WHERE schema = schema_name;
|
||||
|
||||
RETURN configuration;
|
||||
END
|
||||
$$;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.3.12' AND current_db_version != '0.3.13' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.0"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.0"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,264 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.4.1
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This modifies adjust_planner() to use project_configuration()
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
|
||||
CREATE OR REPLACE PROCEDURE adjust_planner()
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
_planner_config jsonb;
|
||||
_planned_line planned_lines%ROWTYPE;
|
||||
_lag interval;
|
||||
_last_sequence sequences_summary%ROWTYPE;
|
||||
_deltatime interval;
|
||||
_shotinterval interval;
|
||||
_tstamp timestamptz;
|
||||
_incr integer;
|
||||
BEGIN
|
||||
|
||||
SET CONSTRAINTS planned_lines_pkey DEFERRED;
|
||||
|
||||
SELECT project_configuration()->'planner'
|
||||
INTO _planner_config;
|
||||
|
||||
SELECT *
|
||||
INTO _last_sequence
|
||||
FROM sequences_summary
|
||||
ORDER BY sequence DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT *
|
||||
INTO _planned_line
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
SELECT
|
||||
COALESCE(
|
||||
((lead(ts0) OVER (ORDER BY sequence)) - ts1),
|
||||
make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer)
|
||||
)
|
||||
INTO _lag
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
_incr = sign(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE '_planner_config: %', _planner_config;
|
||||
RAISE NOTICE '_last_sequence: %', _last_sequence;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
RAISE NOTICE '_incr: %', _incr;
|
||||
|
||||
-- Does the latest sequence match a planned sequence?
|
||||
IF _planned_line IS NULL THEN -- No it doesn't
|
||||
RAISE NOTICE 'Latest sequence shot does not match a planned sequence';
|
||||
SELECT * INTO _planned_line FROM planned_lines ORDER BY sequence ASC LIMIT 1;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
|
||||
IF _planned_line.sequence <= _last_sequence.sequence THEN
|
||||
RAISE NOTICE 'Renumbering the planned sequences starting from %', _planned_line.sequence + 1;
|
||||
-- Renumber the planned sequences starting from last shot sequence number + 1
|
||||
UPDATE planned_lines
|
||||
SET sequence = sequence + _last_sequence.sequence - _planned_line.sequence + 1;
|
||||
END IF;
|
||||
|
||||
-- The correction to make to the first planned line's ts0 will be based on either the last
|
||||
-- sequence's EOL + default line change time or the current time, whichever is later.
|
||||
_deltatime := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1) + make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer), current_timestamp) - _planned_line.ts0;
|
||||
|
||||
-- Is the first of the planned lines start time in the past? (±5 mins)
|
||||
IF _planned_line.ts0 < (current_timestamp - make_interval(mins => 5)) THEN
|
||||
RAISE NOTICE 'First planned line is in the past. Adjusting times by %', _deltatime;
|
||||
-- Adjust the start / end time of the planned lines by assuming that we are at
|
||||
-- `defaultLineChangeDuration` minutes away from SOL of the first planned line.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime;
|
||||
END IF;
|
||||
|
||||
ELSE -- Yes it does
|
||||
RAISE NOTICE 'Latest sequence does match a planned sequence: %, %', _planned_line.sequence, _planned_line.line;
|
||||
|
||||
-- Is it online?
|
||||
IF EXISTS(SELECT 1 FROM raw_lines_files WHERE sequence = _last_sequence.sequence AND hash = '*online*') THEN
|
||||
-- Yes it is
|
||||
RAISE NOTICE 'Sequence % is online', _last_sequence.sequence;
|
||||
|
||||
-- Let us get the SOL from the events log if we can
|
||||
RAISE NOTICE 'Trying to set fsp, ts0 from events log FSP, FGSP';
|
||||
WITH e AS (
|
||||
SELECT * FROM event_log
|
||||
WHERE
|
||||
sequence = _last_sequence.sequence
|
||||
AND ('FSP' = ANY(labels) OR 'FGSP' = ANY(labels))
|
||||
ORDER BY tstamp LIMIT 1
|
||||
)
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
fsp = COALESCE(e.point, fsp),
|
||||
ts0 = COALESCE(e.tstamp, ts0)
|
||||
FROM e
|
||||
WHERE planned_lines.sequence = _last_sequence.sequence;
|
||||
|
||||
-- Shot interval
|
||||
_shotinterval := (_last_sequence.ts1 - _last_sequence.ts0) / abs(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE 'Estimating EOL from current shot interval: %', _shotinterval;
|
||||
|
||||
SELECT (abs(lsp-fsp) * _shotinterval + ts0) - ts1
|
||||
INTO _deltatime
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
---- Set ts1 for the current sequence
|
||||
--UPDATE planned_lines
|
||||
--SET
|
||||
--ts1 = (abs(lsp-fsp) * _shotinterval) + ts0
|
||||
--WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Adjustment is %', _deltatime;
|
||||
|
||||
IF abs(EXTRACT(EPOCH FROM _deltatime)) < 8 THEN
|
||||
RAISE NOTICE 'Adjustment too small (< 8 s), so not applying it';
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- Adjust ts1 for the current sequence
|
||||
UPDATE planned_lines
|
||||
SET ts1 = ts1 + _deltatime
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
-- Now shift all sequences after
|
||||
UPDATE planned_lines
|
||||
SET ts0 = ts0 + _deltatime, ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences before %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence < _last_sequence.sequence;
|
||||
|
||||
ELSE
|
||||
-- No it isn't
|
||||
RAISE NOTICE 'Sequence % is offline', _last_sequence.sequence;
|
||||
|
||||
-- We were supposed to finish at _planned_line.ts1 but we finished at:
|
||||
_tstamp := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1), current_timestamp);
|
||||
-- WARNING Next line is for testing only
|
||||
--_tstamp := COALESCE(_last_sequence.ts1_final, _last_sequence.ts1);
|
||||
-- So we need to adjust timestamps by:
|
||||
_deltatime := _tstamp - _planned_line.ts1;
|
||||
|
||||
RAISE NOTICE 'Planned end: %, actual end: % (%, %)', _planned_line.ts1, _tstamp, _planned_line.sequence, _last_sequence.sequence;
|
||||
RAISE NOTICE 'Shifting times by % for sequences > %', _deltatime, _planned_line.sequence;
|
||||
-- NOTE: This won't work if sequences are not, err… sequential.
|
||||
-- NOTE: This has been known to happen in 2020.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _planned_line.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences up to %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence <= _last_sequence.sequence;
|
||||
|
||||
END IF;
|
||||
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.1"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.1"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,98 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.4.2
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This modifies binning_parameters() to use project_configuration()
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION binning_parameters() RETURNS jsonb
|
||||
LANGUAGE sql STABLE LEAKPROOF PARALLEL SAFE
|
||||
AS $$
|
||||
SELECT project_configuration()->'binning' binning;
|
||||
$$;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.2"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.2"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
164
etc/db/upgrades/upgrade30-v0.4.3-large-notification-payloads.sql
Normal file
164
etc/db/upgrades/upgrade30-v0.4.3-large-notification-payloads.sql
Normal file
@@ -0,0 +1,164 @@
|
||||
-- Support notification payloads larger than Postgres' NOTIFY limit.
|
||||
--
|
||||
-- New schema version: 0.4.3
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects the public schema only.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This creates a new table where large notification payloads are stored
|
||||
-- temporarily and from which they might be recalled by the notification
|
||||
-- listeners. It also creates a purge_notifications() procedure used to
|
||||
-- clean up old notifications from the notifications log and finally,
|
||||
-- modifies notify() to support these changes. When a large payload is
|
||||
-- encountered, the payload is stored in the notify_payloads table and
|
||||
-- a trimmed down version containing a notification_id is sent to listeners
|
||||
-- instead. Listeners can then query notify_payloads to retrieve the full
|
||||
-- payloads. It is the application layer's responsibility to delete old
|
||||
-- notifications.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_schema () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating public schema';
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO public');
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.notify_payloads (
|
||||
id SERIAL,
|
||||
tstamp timestamptz NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
payload text NOT NULL DEFAULT '',
|
||||
PRIMARY KEY (id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS notify_payload_tstamp ON notify_payloads (tstamp);
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.notify() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
channel text := TG_ARGV[0];
|
||||
pid text;
|
||||
payload text;
|
||||
notification text;
|
||||
payload_id integer;
|
||||
BEGIN
|
||||
|
||||
SELECT projects.pid INTO pid FROM projects WHERE schema = TG_TABLE_SCHEMA;
|
||||
|
||||
payload := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'old', row_to_json(OLD),
|
||||
'new', row_to_json(NEW),
|
||||
'pid', pid
|
||||
)::text;
|
||||
|
||||
IF octet_length(payload) < 1000 THEN
|
||||
PERFORM pg_notify(channel, payload);
|
||||
ELSE
|
||||
-- We need to find another solution
|
||||
-- FIXME Consider storing the payload in a temporary memory table,
|
||||
-- referenced by some form of autogenerated ID. Then send the ID
|
||||
-- as the payload and then it's up to the user to fetch the original
|
||||
-- payload if interested. This needs a mechanism to expire older payloads
|
||||
-- in the interest of conserving memory.
|
||||
|
||||
INSERT INTO notify_payloads (payload) VALUES (payload) RETURNING id INTO payload_id;
|
||||
|
||||
notification := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'pid', pid,
|
||||
'payload_id', payload_id
|
||||
)::text;
|
||||
|
||||
PERFORM pg_notify(channel, notification);
|
||||
RAISE INFO 'Payload over limit';
|
||||
END IF;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$;
|
||||
|
||||
CREATE PROCEDURE public.purge_notifications (age_seconds numeric DEFAULT 120) AS $$
|
||||
DELETE FROM notify_payloads WHERE EXTRACT(epoch FROM CURRENT_TIMESTAMP - tstamp) > age_seconds;
|
||||
$$ LANGUAGE sql;
|
||||
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
-- This upgrade modified the `public` schema only, not individual
|
||||
-- project schemas.
|
||||
CALL pg_temp.upgrade_schema();
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_schema ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.3"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.3"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,104 @@
|
||||
-- Add event_log_changes function
|
||||
--
|
||||
-- New schema version: 0.4.4
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This adds a function event_log_changes which returns the subset of
|
||||
-- events from event_log_full which have been modified on or after a
|
||||
-- given timestamp.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_log_changes(ts0 timestamptz)
|
||||
RETURNS SETOF event_log_full
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
SELECT *
|
||||
FROM event_log_full
|
||||
WHERE lower(validity) > ts0 OR upper(validity) IS NOT NULL AND upper(validity) > ts0
|
||||
ORDER BY lower(validity);
|
||||
$$;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.4"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.4"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -44,7 +44,7 @@
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-text-field
|
||||
v-model="tsDate"
|
||||
:disabled="!!(sequence || point || entrySequence || entryPoint)"
|
||||
:disabled="!!(entrySequence || entryPoint)"
|
||||
label="Date"
|
||||
suffix="UTC"
|
||||
prepend-icon="mdi-calendar"
|
||||
@@ -64,7 +64,7 @@
|
||||
<v-col>
|
||||
<v-text-field
|
||||
v-model="tsTime"
|
||||
:disabled="!!(sequence || point || entrySequence || entryPoint)"
|
||||
:disabled="!!(entrySequence || entryPoint)"
|
||||
label="Time"
|
||||
suffix="UTC"
|
||||
prepend-icon="mdi-clock-outline"
|
||||
@@ -256,6 +256,15 @@
|
||||
>
|
||||
Cancel
|
||||
</v-btn>
|
||||
<v-btn v-if="!id && (entrySequence || entryPoint)"
|
||||
color="info"
|
||||
text
|
||||
title="Enter an event by time"
|
||||
@click="timed"
|
||||
>
|
||||
<v-icon left small>mdi-clock-outline</v-icon>
|
||||
Timed
|
||||
</v-btn>
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn
|
||||
:disabled="!canSave"
|
||||
@@ -632,6 +641,14 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
timed () {
|
||||
const tstamp = (new Date()).toISOString();
|
||||
this.entrySequence = null;
|
||||
this.entryPoint = null;
|
||||
this.tsDate = tstamp.substr(0, 10);
|
||||
this.tsTime = tstamp.substr(11, 8);
|
||||
},
|
||||
|
||||
close () {
|
||||
this.entryLabels = this.selectedLabels.map(this.labelToItem)
|
||||
this.$emit("input", false);
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
<div class="line-status" v-if="sequences.length == 0">
|
||||
<slot name="empty"></slot>
|
||||
</div>
|
||||
<div class="line-status" v-else-if="sequenceHref">
|
||||
<router-link v-for="sequence in sequences" :key="sequence.sequence"
|
||||
<div class="line-status" v-else-if="sequenceHref || plannedSequenceHref || pendingReshootHref">
|
||||
<router-link v-for="sequence in sequences" :key="sequence.sequence" v-if="sequenceHref"
|
||||
class="sequence"
|
||||
:class="sequence.status"
|
||||
:style="style(sequence)"
|
||||
@@ -11,15 +11,41 @@
|
||||
:to="sequenceHref(sequence)"
|
||||
>
|
||||
</router-link>
|
||||
<router-link v-for="sequence in plannedSequences" :key="sequence.sequence" v-if="plannedSequenceHref"
|
||||
class="sequence planned"
|
||||
:style="style(sequence)"
|
||||
:title="title(sequence, 'planned')"
|
||||
:to="plannedSequenceHref(sequence)"
|
||||
>
|
||||
</router-link>
|
||||
<router-link v-for="(line, key) in pendingReshoots" :key="key" v-if="pendingReshootHref"
|
||||
class="sequence reshoot"
|
||||
:style="style(line)"
|
||||
:title="title(line, 'reshoot')"
|
||||
:to="pendingReshootHref(line)"
|
||||
>
|
||||
</router-link>
|
||||
</div>
|
||||
<div class="line-status" v-else>
|
||||
<div v-for="sequence in sequences"
|
||||
<div v-for="sequence in sequences" :key="sequence.sequence"
|
||||
class="sequence"
|
||||
:class="sequence.status"
|
||||
:style="style(sequence)"
|
||||
:title="title(sequence)"
|
||||
>
|
||||
</div>
|
||||
<div v-for="sequence in plannedSequences" :key="sequence.sequence"
|
||||
class="sequence planned"
|
||||
:style="style(sequence)"
|
||||
:title="title(sequence, 'planned')"
|
||||
>
|
||||
</div>
|
||||
<div v-for="(line, key) in pendingReshoots" :key="key"
|
||||
class="sequence reshoot"
|
||||
:style="style(line)"
|
||||
:title="title(line, 'reshoot')"
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
@@ -48,6 +74,8 @@
|
||||
background-color blue
|
||||
&.planned
|
||||
background-color magenta
|
||||
&.reshoot
|
||||
background repeating-linear-gradient(-45deg, rgba(255,0,255,0.302), brown 5px, rgba(247, 247, 247, 0.1) 5px, rgba(242, 241, 241, 0.08) 10px), repeating-linear-gradient(45deg, rgba(255,0,255,0.302), brown 5px, rgba(247, 247, 247, 0.1) 5px, rgba(242, 241, 241, 0.08) 10px)
|
||||
</style>
|
||||
|
||||
<script>
|
||||
@@ -58,7 +86,11 @@ export default {
|
||||
props: {
|
||||
preplot: Object,
|
||||
sequences: Array,
|
||||
"sequence-href": Function
|
||||
"sequence-href": Function,
|
||||
"planned-sequences": Array,
|
||||
"planned-sequence-href": Function,
|
||||
"pending-reshoots": Array,
|
||||
"pending-reshoot-href": Function
|
||||
},
|
||||
|
||||
methods: {
|
||||
@@ -68,13 +100,13 @@ export default {
|
||||
? s.fsp_final
|
||||
: s.status == "ntbp"
|
||||
? (s.fsp_final || s.fsp)
|
||||
: s.fsp; /* status == "raw" */
|
||||
: s.fsp; /* status == "raw" or planned sequence or pending reshoot */
|
||||
|
||||
const lsp = s.status == "final"
|
||||
? s.lsp_final
|
||||
: s.status == "ntbp"
|
||||
? (s.lsp_final || s.lsp)
|
||||
: s.lsp; /* status == "raw" */
|
||||
: s.lsp; /* status == "raw" or planned sequence or pending reshoot */
|
||||
|
||||
const pp0 = Math.min(this.preplot.fsp, this.preplot.lsp);
|
||||
const pp1 = Math.max(this.preplot.fsp, this.preplot.lsp);
|
||||
@@ -91,20 +123,24 @@ export default {
|
||||
return values;
|
||||
},
|
||||
|
||||
title (s) {
|
||||
const status = s.status == "final"
|
||||
? "Final"
|
||||
: s.status == "raw"
|
||||
? "Acquired"
|
||||
: s.status == "ntbp"
|
||||
? "NTBP"
|
||||
: s.status == "planned"
|
||||
? "Planned"
|
||||
: s.status;
|
||||
title (s, type) {
|
||||
if (s.status || type == "planned") {
|
||||
const status = s.status == "final"
|
||||
? "Final"
|
||||
: s.status == "raw"
|
||||
? "Acquired"
|
||||
: s.status == "ntbp"
|
||||
? "NTBP"
|
||||
: type == "planned"
|
||||
? "Planned"
|
||||
: s.status;
|
||||
|
||||
const remarks = "\n"+[s.remarks, s.remarks_final].join("\n").trim()
|
||||
const remarks = "\n"+[s.remarks, s.remarks_final].join("\n").trim()
|
||||
|
||||
return `Sequence ${s.sequence} – ${status} (${s.fsp_final || s.fsp}−${s.lsp_final || s.lsp})${remarks}`;
|
||||
return `Sequence ${s.sequence} – ${status} (${s.fsp_final || s.fsp}−${s.lsp_final || s.lsp})${remarks}`;
|
||||
} else if (type == "reshoot") {
|
||||
return `Pending reshoot (${s.fsp}‒${s.lsp})${s.remarks? "\n"+s.remarks : ""}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,11 @@ import api from './modules/api'
|
||||
import user from './modules/user'
|
||||
import snack from './modules/snack'
|
||||
import project from './modules/project'
|
||||
import event from './modules/event'
|
||||
import label from './modules/label'
|
||||
import sequence from './modules/sequence'
|
||||
import plan from './modules/plan'
|
||||
import line from './modules/line'
|
||||
import notify from './modules/notify'
|
||||
|
||||
Vue.use(Vuex)
|
||||
@@ -15,6 +20,11 @@ export default new Vuex.Store({
|
||||
user,
|
||||
snack,
|
||||
project,
|
||||
event,
|
||||
label,
|
||||
sequence,
|
||||
plan,
|
||||
line,
|
||||
notify
|
||||
}
|
||||
})
|
||||
|
||||
129
lib/www/client/source/src/store/modules/event/actions.js
Normal file
129
lib/www/client/source/src/store/modules/event/actions.js
Normal file
@@ -0,0 +1,129 @@
|
||||
|
||||
/** Fetch events from server
|
||||
*/
|
||||
async function refreshEvents ({commit, dispatch, state, rootState}, [modifiedAfter] = []) {
|
||||
|
||||
if (!modifiedAfter) {
|
||||
modifiedAfter = state.timestamp;
|
||||
}
|
||||
|
||||
if (state.loading) {
|
||||
commit('abortEventsLoading');
|
||||
}
|
||||
|
||||
commit('setEventsLoading');
|
||||
const pid = rootState.project.projectId;
|
||||
const url = modifiedAfter
|
||||
? `/project/${pid}/event/changes/${(new Date(modifiedAfter)).toISOString()}?unique=t`
|
||||
: `/project/${pid}/event`;
|
||||
const init = {
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
|
||||
if (res) {
|
||||
if (modifiedAfter) {
|
||||
commit('setModifiedEvents', res);
|
||||
} else {
|
||||
commit('setEvents', res);
|
||||
}
|
||||
commit('setEventsTimestamp');
|
||||
}
|
||||
commit('clearEventsLoading');
|
||||
|
||||
}
|
||||
|
||||
/** Return a subset of events from state.events
|
||||
*/
|
||||
async function getEvents ({commit, dispatch, state}, [projectId, {sequence, date0, date1, sortBy, sortDesc, itemsPerPage, page, text, label}]) {
|
||||
let filteredEvents = [...state.events];
|
||||
|
||||
if (sortBy) {
|
||||
|
||||
sortBy.forEach( (key, idx) => {
|
||||
filteredEvents.sort( (el0, el1) => {
|
||||
const a = el0?.[key];
|
||||
const b = el1?.[key];
|
||||
if (a < b) {
|
||||
return -1;
|
||||
} else if (a > b) {
|
||||
return 1;
|
||||
} else if (a == b) {
|
||||
return 0;
|
||||
} else if (a && !b) {
|
||||
return 1;
|
||||
} else if (!a && b) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
if (sortDesc && sortDesc[idx] === true) {
|
||||
filteredEvents.reverse();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
if (sequence) {
|
||||
filteredEvents = filteredEvents.filter( event => event.sequence == sequence );
|
||||
}
|
||||
|
||||
if (date0 && date1) {
|
||||
filteredEvents = filteredEvents.filter( event =>
|
||||
event.tstamp.substr(0, 10) >= date0 && event.tstamp.substr(0, 10) <= date1
|
||||
);
|
||||
} else if (date0) {
|
||||
filteredEvents = filteredEvents.filter( event => event.tstamp.substr(0, 10) == date0 );
|
||||
}
|
||||
|
||||
if (text) {
|
||||
const tstampFilter = (value, search, item) => {
|
||||
return textFilter(value, search, item);
|
||||
};
|
||||
|
||||
const numberFilter = (value, search, item) => {
|
||||
return value == search;
|
||||
};
|
||||
|
||||
const textFilter = (value, search, item) => {
|
||||
return String(value).toLowerCase().includes(search.toLowerCase());
|
||||
};
|
||||
|
||||
const searchFunctions = {
|
||||
tstamp: tstampFilter,
|
||||
sequence: numberFilter,
|
||||
point: numberFilter,
|
||||
remarks: textFilter,
|
||||
labels: (value, search, item) => value.some(label => textFilter(label, search, item))
|
||||
};
|
||||
|
||||
filteredEvents = filteredEvents.filter ( event => {
|
||||
for (let key in searchFunctions) {
|
||||
const fn = searchFunctions[key];
|
||||
if (fn(event[key], text, event)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
|
||||
if (label) {
|
||||
filteredEvents = filteredEvents.filter( event => event.labels?.includes(label) );
|
||||
}
|
||||
|
||||
const count = filteredEvents.length;
|
||||
|
||||
if (itemsPerPage && itemsPerPage > 0) {
|
||||
const offset = (page > 0)
|
||||
? (page-1) * itemsPerPage
|
||||
: 0;
|
||||
|
||||
filteredEvents = filteredEvents.slice(offset, offset+itemsPerPage);
|
||||
}
|
||||
|
||||
return {events: filteredEvents, count};
|
||||
}
|
||||
|
||||
export default { refreshEvents, getEvents };
|
||||
14
lib/www/client/source/src/store/modules/event/getters.js
Normal file
14
lib/www/client/source/src/store/modules/event/getters.js
Normal file
@@ -0,0 +1,14 @@
|
||||
|
||||
function events (state) {
|
||||
return state.events;
|
||||
}
|
||||
|
||||
function eventCount (state) {
|
||||
return state.events?.length ?? 0;
|
||||
}
|
||||
|
||||
function eventsLoading (state) {
|
||||
return !!state.loading;
|
||||
}
|
||||
|
||||
export default { events, eventCount, eventsLoading };
|
||||
6
lib/www/client/source/src/store/modules/event/index.js
Normal file
6
lib/www/client/source/src/store/modules/event/index.js
Normal file
@@ -0,0 +1,6 @@
|
||||
import state from './state'
|
||||
import getters from './getters'
|
||||
import actions from './actions'
|
||||
import mutations from './mutations'
|
||||
|
||||
export default { state, getters, actions, mutations };
|
||||
73
lib/www/client/source/src/store/modules/event/mutations.js
Normal file
73
lib/www/client/source/src/store/modules/event/mutations.js
Normal file
@@ -0,0 +1,73 @@
|
||||
|
||||
function setEvents (state, events) {
|
||||
// We don't need or want the events array to be reactive, since
|
||||
// it can be tens of thousands of items long.
|
||||
state.events = Object.freeze(events);
|
||||
}
|
||||
|
||||
/** Selectively replace / insert / delete events
|
||||
* from state.events.
|
||||
*
|
||||
* modifiedEvents is the result of
|
||||
* /api/project/:project/event/changes?unique=t
|
||||
*/
|
||||
function setModifiedEvents (state, modifiedEvents) {
|
||||
const events = [...state.events];
|
||||
for (let evt of modifiedEvents) {
|
||||
const idx = events.findIndex(i => i.id == evt.id);
|
||||
if (idx != -1) {
|
||||
if (evt.is_deleted) {
|
||||
events.splice(idx, 1);
|
||||
} else {
|
||||
delete evt.is_deleted;
|
||||
events.splice(idx, 1, evt);
|
||||
}
|
||||
} else {
|
||||
if (!evt.is_deleted) {
|
||||
delete evt.is_deleted;
|
||||
events.unshift(evt);
|
||||
}
|
||||
}
|
||||
}
|
||||
setEvents(state, events);
|
||||
}
|
||||
|
||||
function setEventsLoading (state, abortController = new AbortController()) {
|
||||
state.loading = abortController;
|
||||
}
|
||||
|
||||
function clearEventsLoading (state) {
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
function setEventsTimestamp (state, timestamp = new Date()) {
|
||||
if (timestamp === true) {
|
||||
const tstamp = state.events
|
||||
.map( event => event.modified_on )
|
||||
.reduce( (acc, cur) => acc > cur ? acc : cur );
|
||||
state.timestamp = tstamp ? new Date(tstamp) : new Date();
|
||||
} else {
|
||||
state.timestamp = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
function setEventsETag (state, etag) {
|
||||
state.etag = etag;
|
||||
}
|
||||
|
||||
function abortEventsLoading (state) {
|
||||
if (state.loading) {
|
||||
state.loading.abort();
|
||||
}
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
export default {
|
||||
setEvents,
|
||||
setModifiedEvents,
|
||||
setEventsLoading,
|
||||
clearEventsLoading,
|
||||
abortEventsLoading,
|
||||
setEventsTimestamp,
|
||||
setEventsETag
|
||||
};
|
||||
8
lib/www/client/source/src/store/modules/event/state.js
Normal file
8
lib/www/client/source/src/store/modules/event/state.js
Normal file
@@ -0,0 +1,8 @@
|
||||
const state = () => ({
|
||||
events: Object.freeze([]),
|
||||
loading: null,
|
||||
timestamp: null,
|
||||
etag: null,
|
||||
});
|
||||
|
||||
export default state;
|
||||
106
lib/www/client/source/src/store/modules/label/actions.js
Normal file
106
lib/www/client/source/src/store/modules/label/actions.js
Normal file
@@ -0,0 +1,106 @@
|
||||
|
||||
/** Fetch labels from server
|
||||
*/
|
||||
async function refreshLabels ({commit, dispatch, state, rootState}) {
|
||||
|
||||
if (state.loading) {
|
||||
commit('abortLabelsLoading');
|
||||
}
|
||||
|
||||
commit('setLabelsLoading');
|
||||
const pid = rootState.project.projectId;
|
||||
const url = `/project/${pid}/label`;
|
||||
const init = {
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
|
||||
if (res) {
|
||||
commit('setLabels', res);
|
||||
commit('setLabelsTimestamp');
|
||||
}
|
||||
commit('clearLabelsLoading');
|
||||
}
|
||||
|
||||
/** Return a subset of labels from state.labels.
|
||||
*
|
||||
* Note that, unlike other actions in the get* family,
|
||||
* the return value is not isomorphic to the state.
|
||||
*
|
||||
* While state.labels is an object, getLabels() returns
|
||||
* an array with each item have the shape:
|
||||
*
|
||||
* { label: "labelName", view: {…}, model: {…} }
|
||||
*
|
||||
* This is intended to be useful, for instance, for a table
|
||||
* of labels.
|
||||
*/
|
||||
async function getLabels ({commit, dispatch, state}, [projectId, {sortBy, sortDesc, itemsPerPage, page, text, label}]) {
|
||||
|
||||
let filteredLabels = Object.entries(state.labels).map(i => {
|
||||
return {
|
||||
label: i[0],
|
||||
...i[1]
|
||||
}
|
||||
});
|
||||
|
||||
if (sortBy) {
|
||||
|
||||
sortBy.forEach( (key, idx) => {
|
||||
filteredLabels.sort( (el0, el1) => {
|
||||
const a = key == "label" ? el0[0] : el0[1].view[key];
|
||||
const b = key == "label" ? el1[0] : el1[1].view[key];
|
||||
if (a < b) {
|
||||
return -1;
|
||||
} else if (a > b) {
|
||||
return 1;
|
||||
} else if (a == b) {
|
||||
return 0;
|
||||
} else if (a && !b) {
|
||||
return 1;
|
||||
} else if (!a && b) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
if (sortDesc && sortDesc[idx] === true) {
|
||||
filteredLabels.reverse();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
if (label) {
|
||||
filteredLabels = filteredLabels.filter( label => label.label == label );
|
||||
}
|
||||
|
||||
if (text) {
|
||||
const textFilter = (value, search, item) => {
|
||||
return String(value).toLowerCase().includes(search.toLowerCase());
|
||||
};
|
||||
|
||||
const searchFunctions = {
|
||||
label: numberFilter,
|
||||
description: textFilter,
|
||||
};
|
||||
|
||||
filteredLabels = filteredLabels.filter ( item => {
|
||||
return textFilter(item.label, text, item) ?? textFilter(item.view.description, text, item);
|
||||
});
|
||||
}
|
||||
|
||||
const count = filteredLabels.length;
|
||||
|
||||
if (itemsPerPage && itemsPerPage > 0) {
|
||||
const offset = (page > 0)
|
||||
? (page-1) * itemsPerPage
|
||||
: 0;
|
||||
|
||||
filteredLabels = filteredLabels.slice(offset, offset+itemsPerPage);
|
||||
}
|
||||
|
||||
return {labels: filteredLabels, count};
|
||||
}
|
||||
|
||||
export default { refreshLabels, getLabels };
|
||||
22
lib/www/client/source/src/store/modules/label/getters.js
Normal file
22
lib/www/client/source/src/store/modules/label/getters.js
Normal file
@@ -0,0 +1,22 @@
|
||||
|
||||
function labels (state) {
|
||||
return state.labels;
|
||||
}
|
||||
|
||||
/** Return labels that can be added by users.
|
||||
*
|
||||
* As opposed to system labels.
|
||||
*/
|
||||
function userLabels (state) {
|
||||
return Object.fromEntries(Object.entries(state.labels).filter(i => i[1].model.user));
|
||||
}
|
||||
|
||||
function labelCount (state) {
|
||||
return state.labels?.length ?? 0;
|
||||
}
|
||||
|
||||
function labelsLoading (state) {
|
||||
return !!state.loading;
|
||||
}
|
||||
|
||||
export default { labels, userLabels, labelCount, labelsLoading };
|
||||
6
lib/www/client/source/src/store/modules/label/index.js
Normal file
6
lib/www/client/source/src/store/modules/label/index.js
Normal file
@@ -0,0 +1,6 @@
|
||||
import state from './state'
|
||||
import getters from './getters'
|
||||
import actions from './actions'
|
||||
import mutations from './mutations'
|
||||
|
||||
export default { state, getters, actions, mutations };
|
||||
49
lib/www/client/source/src/store/modules/label/mutations.js
Normal file
49
lib/www/client/source/src/store/modules/label/mutations.js
Normal file
@@ -0,0 +1,49 @@
|
||||
|
||||
function setLabels (state, labels) {
|
||||
// We don't need or want the events array to be reactive, since
|
||||
// it can be tens of thousands of items long.
|
||||
state.labels = Object.freeze(labels);
|
||||
}
|
||||
|
||||
function setLabelsLoading (state, abortController = new AbortController()) {
|
||||
state.loading = abortController;
|
||||
}
|
||||
|
||||
// This assumes that we know any transactions have finished or we
|
||||
// don't care about aborting.
|
||||
function clearLabelsLoading (state) {
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
function setLabelsTimestamp (state, timestamp = new Date()) {
|
||||
// NOTE: There is no `modified_on` property in the labels
|
||||
// result or in the database schema, but we could add
|
||||
// one.
|
||||
if (timestamp === true) {
|
||||
const tstamp = state.labels
|
||||
.map( i => i.modified_on )
|
||||
.reduce( (acc, cur) => acc > cur ? acc : cur );
|
||||
state.timestamp = tstamp ? new Date(tstamp) : new Date();
|
||||
} else {
|
||||
state.timestamp = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
function setLabelsETag (state, etag) {
|
||||
state.etag = etag;
|
||||
}
|
||||
|
||||
function abortLabelsLoading (state) {
|
||||
if (state.loading) {
|
||||
state.loading.abort();
|
||||
}
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
export default {
|
||||
setLabels,
|
||||
setLabelsLoading,
|
||||
clearLabelsLoading,
|
||||
setLabelsTimestamp,
|
||||
setLabelsETag
|
||||
};
|
||||
8
lib/www/client/source/src/store/modules/label/state.js
Normal file
8
lib/www/client/source/src/store/modules/label/state.js
Normal file
@@ -0,0 +1,8 @@
|
||||
const state = () => ({
|
||||
labels: Object.freeze([]),
|
||||
loading: null,
|
||||
timestamp: null,
|
||||
etag: null,
|
||||
});
|
||||
|
||||
export default state;
|
||||
117
lib/www/client/source/src/store/modules/line/actions.js
Normal file
117
lib/www/client/source/src/store/modules/line/actions.js
Normal file
@@ -0,0 +1,117 @@
|
||||
|
||||
/** Fetch lines from server
|
||||
*/
|
||||
async function refreshLines ({commit, dispatch, state, rootState}) {
|
||||
|
||||
if (state.loading) {
|
||||
commit('abortLinesLoading');
|
||||
}
|
||||
|
||||
commit('setLinesLoading');
|
||||
const pid = rootState.project.projectId;
|
||||
const url = `/project/${pid}/line`;
|
||||
const init = {
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
|
||||
if (res) {
|
||||
commit('setLines', res);
|
||||
commit('setLinesTimestamp');
|
||||
}
|
||||
commit('clearLinesLoading');
|
||||
}
|
||||
|
||||
/** Return a subset of lines from state.lines
|
||||
*/
|
||||
async function getLines ({commit, dispatch, state}, [projectId, {line, fsp, lsp, incr, sortBy, sortDesc, itemsPerPage, page, text}]) {
|
||||
let filteredLines = [...state.lines];
|
||||
|
||||
if (sortBy) {
|
||||
|
||||
sortBy.forEach( (key, idx) => {
|
||||
filteredLines.sort( (el0, el1) => {
|
||||
const a = el0?.[key];
|
||||
const b = el1?.[key];
|
||||
if (a < b) {
|
||||
return -1;
|
||||
} else if (a > b) {
|
||||
return 1;
|
||||
} else if (a == b) {
|
||||
return 0;
|
||||
} else if (a && !b) {
|
||||
return 1;
|
||||
} else if (!a && b) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
if (sortDesc && sortDesc[idx] === true) {
|
||||
filteredLines.reverse();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
if (line) {
|
||||
filteredLines = filteredLines.filter( line => line.line == line );
|
||||
}
|
||||
|
||||
if (fsp) {
|
||||
filteredLines = filteredLines.filter( line => line.fsp == fsp );
|
||||
}
|
||||
|
||||
if (lsp) {
|
||||
filteredLines = filteredLines.filter( line => line.lsp == lsp );
|
||||
}
|
||||
|
||||
if (text) {
|
||||
const numberFilter = (value, search, item) => {
|
||||
return value == search;
|
||||
};
|
||||
|
||||
const textFilter = (value, search, item) => {
|
||||
return String(value).toLowerCase().includes(search.toLowerCase());
|
||||
};
|
||||
|
||||
const incrFilter = (value, search, item) => {
|
||||
const inc = /^(incr(ement)?|↑|\+)/i;
|
||||
const dec = /^(decr(ement)?|↓|-)/i;
|
||||
return (inc.test(search) && value) || (dec.test(search) && !value)
|
||||
}
|
||||
|
||||
const searchFunctions = {
|
||||
line: numberFilter,
|
||||
fsp: numberFilter,
|
||||
lsp: numberFilter,
|
||||
remarks: textFilter,
|
||||
incr: incrFilter,
|
||||
ntba: (value, search, item) => text.toLowerCase() == "ntba" && value
|
||||
};
|
||||
|
||||
filteredLines = filteredLines.filter ( line => {
|
||||
for (let key in searchFunctions) {
|
||||
const fn = searchFunctions[key];
|
||||
if (fn(line[key], text, line)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
|
||||
const count = filteredLines.length;
|
||||
|
||||
if (itemsPerPage && itemsPerPage > 0) {
|
||||
const offset = (page > 0)
|
||||
? (page-1) * itemsPerPage
|
||||
: 0;
|
||||
|
||||
filteredLines = filteredLines.slice(offset, offset+itemsPerPage);
|
||||
}
|
||||
|
||||
return {lines: filteredLines, count};
|
||||
}
|
||||
|
||||
export default { refreshLines, getLines };
|
||||
14
lib/www/client/source/src/store/modules/line/getters.js
Normal file
14
lib/www/client/source/src/store/modules/line/getters.js
Normal file
@@ -0,0 +1,14 @@
|
||||
|
||||
function lines (state) {
|
||||
return state.lines;
|
||||
}
|
||||
|
||||
function lineCount (state) {
|
||||
return state.lines?.length ?? 0;
|
||||
}
|
||||
|
||||
function linesLoading (state) {
|
||||
return !!state.loading;
|
||||
}
|
||||
|
||||
export default { lines, lineCount, linesLoading };
|
||||
6
lib/www/client/source/src/store/modules/line/index.js
Normal file
6
lib/www/client/source/src/store/modules/line/index.js
Normal file
@@ -0,0 +1,6 @@
|
||||
import state from './state'
|
||||
import getters from './getters'
|
||||
import actions from './actions'
|
||||
import mutations from './mutations'
|
||||
|
||||
export default { state, getters, actions, mutations };
|
||||
49
lib/www/client/source/src/store/modules/line/mutations.js
Normal file
49
lib/www/client/source/src/store/modules/line/mutations.js
Normal file
@@ -0,0 +1,49 @@
|
||||
|
||||
function setLines (state, lines) {
|
||||
// We don't need or want the events array to be reactive, since
|
||||
// it can be tens of thousands of items long.
|
||||
state.lines = Object.freeze(lines);
|
||||
}
|
||||
|
||||
function setLinesLoading (state, abortController = new AbortController()) {
|
||||
state.loading = abortController;
|
||||
}
|
||||
|
||||
// This assumes that we know any transactions have finished or we
|
||||
// don't care about aborting.
|
||||
function clearLinesLoading (state) {
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
function setLinesTimestamp (state, timestamp = new Date()) {
|
||||
// NOTE: There is no `modified_on` property in the lines
|
||||
// result or in the database schema, but we could perhaps add
|
||||
// one.
|
||||
if (timestamp === true) {
|
||||
const tstamp = state.lines
|
||||
.map( event => event.modified_on )
|
||||
.reduce( (acc, cur) => acc > cur ? acc : cur );
|
||||
state.timestamp = tstamp ? new Date(tstamp) : new Date();
|
||||
} else {
|
||||
state.timestamp = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
function setLinesETag (state, etag) {
|
||||
state.etag = etag;
|
||||
}
|
||||
|
||||
function abortLinesLoading (state) {
|
||||
if (state.loading) {
|
||||
state.loading.abort();
|
||||
}
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
export default {
|
||||
setLines,
|
||||
setLinesLoading,
|
||||
clearLinesLoading,
|
||||
setLinesTimestamp,
|
||||
setLinesETag
|
||||
};
|
||||
8
lib/www/client/source/src/store/modules/line/state.js
Normal file
8
lib/www/client/source/src/store/modules/line/state.js
Normal file
@@ -0,0 +1,8 @@
|
||||
const state = () => ({
|
||||
lines: Object.freeze([]),
|
||||
loading: null,
|
||||
timestamp: null,
|
||||
etag: null,
|
||||
});
|
||||
|
||||
export default state;
|
||||
114
lib/www/client/source/src/store/modules/plan/actions.js
Normal file
114
lib/www/client/source/src/store/modules/plan/actions.js
Normal file
@@ -0,0 +1,114 @@
|
||||
|
||||
/** Fetch sequences from server
|
||||
*/
|
||||
async function refreshPlan ({commit, dispatch, state, rootState}) {
|
||||
|
||||
if (state.loading) {
|
||||
commit('abortPlanLoading');
|
||||
}
|
||||
|
||||
commit('setPlanLoading');
|
||||
const pid = rootState.project.projectId;
|
||||
const url = `/project/${pid}/plan`;
|
||||
const init = {
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
|
||||
if (res) {
|
||||
commit('setPlan', res);
|
||||
commit('setPlanTimestamp');
|
||||
}
|
||||
commit('clearPlanLoading');
|
||||
}
|
||||
|
||||
/** Return a subset of sequences from state.sequences
|
||||
*/
|
||||
async function getPlannedSequences ({commit, dispatch, state}, [projectId, {sequence, date0, date1, sortBy, sortDesc, itemsPerPage, page, text}]) {
|
||||
let filteredPlannedSequences = [...state.sequences];
|
||||
|
||||
if (sortBy) {
|
||||
|
||||
sortBy.forEach( (key, idx) => {
|
||||
filteredPlannedSequences.sort( (el0, el1) => {
|
||||
const a = el0?.[key];
|
||||
const b = el1?.[key];
|
||||
if (a < b) {
|
||||
return -1;
|
||||
} else if (a > b) {
|
||||
return 1;
|
||||
} else if (a == b) {
|
||||
return 0;
|
||||
} else if (a && !b) {
|
||||
return 1;
|
||||
} else if (!a && b) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
if (sortDesc && sortDesc[idx] === true) {
|
||||
filteredPlannedSequences.reverse();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
if (sequence) {
|
||||
filteredPlannedSequences = filteredPlannedSequences.filter( sequence => sequence.sequence == sequence );
|
||||
}
|
||||
|
||||
if (date0 && date1) {
|
||||
filteredPlannedSequences = filteredPlannedSequences.filter( sequence =>
|
||||
sequence.ts0.substr(0, 10) >= date0 && sequence.ts1.substr(0, 10) <= date1
|
||||
);
|
||||
} else if (date0) {
|
||||
filteredPlannedSequences = filteredPlannedSequences.filter( sequence => sequence.ts0.substr(0, 10) == date0 || sequence.ts1.substr(0, 10) );
|
||||
}
|
||||
|
||||
if (text) {
|
||||
const tstampFilter = (value, search, item) => {
|
||||
return textFilter(value.toISOString(), search, item);
|
||||
};
|
||||
|
||||
const numberFilter = (value, search, item) => {
|
||||
return value == search;
|
||||
};
|
||||
|
||||
const textFilter = (value, search, item) => {
|
||||
return String(value).toLowerCase().includes(search.toLowerCase());
|
||||
};
|
||||
|
||||
const searchFunctions = {
|
||||
sequence: numberFilter,
|
||||
line: numberFilter,
|
||||
remarks: textFilter,
|
||||
ts0: tstampFilter,
|
||||
ts1: tstampFilter
|
||||
};
|
||||
|
||||
filteredPlannedSequences = filteredPlannedSequences.filter ( sequence => {
|
||||
for (let key in searchFunctions) {
|
||||
const fn = searchFunctions[key];
|
||||
if (fn(sequence[key], text, sequence)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
|
||||
const count = filteredPlannedSequences.length;
|
||||
|
||||
if (itemsPerPage && itemsPerPage > 0) {
|
||||
const offset = (page > 0)
|
||||
? (page-1) * itemsPerPage
|
||||
: 0;
|
||||
|
||||
filteredPlannedSequences = filteredPlannedSequences.slice(offset, offset+itemsPerPage);
|
||||
}
|
||||
|
||||
return {sequences: filteredPlannedSequences, count};
|
||||
}
|
||||
|
||||
export default { refreshPlan, getPlannedSequences };
|
||||
18
lib/www/client/source/src/store/modules/plan/getters.js
Normal file
18
lib/www/client/source/src/store/modules/plan/getters.js
Normal file
@@ -0,0 +1,18 @@
|
||||
|
||||
function planRemarks (state) {
|
||||
return state.remarks;
|
||||
}
|
||||
|
||||
function plannedSequences (state) {
|
||||
return state.sequences;
|
||||
}
|
||||
|
||||
function plannedSequenceCount (state) {
|
||||
return state.sequences?.length ?? 0;
|
||||
}
|
||||
|
||||
function plannedSequencesLoading (state) {
|
||||
return !!state.loading;
|
||||
}
|
||||
|
||||
export default { planRemarks, plannedSequences, plannedSequenceCount, plannedSequencesLoading };
|
||||
6
lib/www/client/source/src/store/modules/plan/index.js
Normal file
6
lib/www/client/source/src/store/modules/plan/index.js
Normal file
@@ -0,0 +1,6 @@
|
||||
import state from './state'
|
||||
import getters from './getters'
|
||||
import actions from './actions'
|
||||
import mutations from './mutations'
|
||||
|
||||
export default { state, getters, actions, mutations };
|
||||
59
lib/www/client/source/src/store/modules/plan/mutations.js
Normal file
59
lib/www/client/source/src/store/modules/plan/mutations.js
Normal file
@@ -0,0 +1,59 @@
|
||||
|
||||
|
||||
function transform (item) {
|
||||
item.ts0 = new Date(item.ts0);
|
||||
item.ts1 = new Date(item.ts1);
|
||||
return item;
|
||||
}
|
||||
|
||||
// ATTENTION: This relies on the new planner endpoint
|
||||
// as per issue #281.
|
||||
|
||||
function setPlan (state, plan) {
|
||||
// We don't need or want the planned sequences array to be reactive
|
||||
state.sequences = Object.freeze(plan.sequences.map(transform));
|
||||
state.remarks = plan.remarks;
|
||||
}
|
||||
|
||||
function setPlanLoading (state, abortController = new AbortController()) {
|
||||
state.loading = abortController;
|
||||
}
|
||||
|
||||
// This assumes that we know any transactions have finished or we
|
||||
// don't care about aborting.
|
||||
function clearPlanLoading (state) {
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
function setPlanTimestamp (state, timestamp = new Date()) {
|
||||
// NOTE: There is no `modified_on` property in the plan
|
||||
// result or in the database schema, but we should probably add
|
||||
// one.
|
||||
if (timestamp === true) {
|
||||
const tstamp = state.plan
|
||||
.map( item => item.modified_on )
|
||||
.reduce( (acc, cur) => acc > cur ? acc : cur );
|
||||
state.timestamp = tstamp ? new Date(tstamp) : new Date();
|
||||
} else {
|
||||
state.timestamp = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
function setPlanETag (state, etag) {
|
||||
state.etag = etag;
|
||||
}
|
||||
|
||||
function abortPlanLoading (state) {
|
||||
if (state.loading) {
|
||||
state.loading.abort();
|
||||
}
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
export default {
|
||||
setPlan,
|
||||
setPlanLoading,
|
||||
clearPlanLoading,
|
||||
setPlanTimestamp,
|
||||
setPlanETag
|
||||
};
|
||||
9
lib/www/client/source/src/store/modules/plan/state.js
Normal file
9
lib/www/client/source/src/store/modules/plan/state.js
Normal file
@@ -0,0 +1,9 @@
|
||||
const state = () => ({
|
||||
sequences: Object.freeze([]),
|
||||
remarks: null,
|
||||
loading: null,
|
||||
timestamp: null,
|
||||
etag: null,
|
||||
});
|
||||
|
||||
export default state;
|
||||
@@ -1,10 +1,11 @@
|
||||
|
||||
async function getProject ({commit, dispatch}, projectId) {
|
||||
const res = await dispatch('api', [`/project/${String(projectId).toLowerCase()}`]);
|
||||
const res = await dispatch('api', [`/project/${String(projectId).toLowerCase()}/configuration`]);
|
||||
if (res) {
|
||||
commit('setProjectName', res.name);
|
||||
commit('setProjectId', res.pid);
|
||||
commit('setProjectId', res.id?.toLowerCase());
|
||||
commit('setProjectSchema', res.schema);
|
||||
commit('setProjectConfiguration', res);
|
||||
const recentProjects = JSON.parse(localStorage.getItem("recentProjects") || "[]")
|
||||
recentProjects.unshift(res);
|
||||
localStorage.setItem("recentProjects", JSON.stringify(recentProjects.slice(0, 3)));
|
||||
@@ -12,6 +13,7 @@ async function getProject ({commit, dispatch}, projectId) {
|
||||
commit('setProjectName', null);
|
||||
commit('setProjectId', null);
|
||||
commit('setProjectSchema', null);
|
||||
commit('setProjectConfiguration', {});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -11,4 +11,8 @@ function projectSchema (state) {
|
||||
return state.projectSchema;
|
||||
}
|
||||
|
||||
export default { projectId, projectName, projectSchema };
|
||||
function projectConfiguration (state) {
|
||||
return state.projectConfiguration;
|
||||
}
|
||||
|
||||
export default { projectId, projectName, projectSchema, projectConfiguration };
|
||||
|
||||
@@ -11,4 +11,8 @@ function setProjectSchema (state, schema) {
|
||||
state.projectSchema = schema;
|
||||
}
|
||||
|
||||
export default { setProjectId, setProjectName, setProjectSchema };
|
||||
function setProjectConfiguration (state, configuration) {
|
||||
state.projectConfiguration = Object.freeze(configuration);
|
||||
}
|
||||
|
||||
export default { setProjectId, setProjectName, setProjectSchema, setProjectConfiguration };
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
const state = () => ({
|
||||
projectId: null,
|
||||
projectName: null,
|
||||
projectSchema: null
|
||||
projectSchema: null,
|
||||
projectConfiguration: {}
|
||||
});
|
||||
|
||||
export default state;
|
||||
|
||||
122
lib/www/client/source/src/store/modules/sequence/actions.js
Normal file
122
lib/www/client/source/src/store/modules/sequence/actions.js
Normal file
@@ -0,0 +1,122 @@
|
||||
|
||||
/** Fetch sequences from server
|
||||
*/
|
||||
async function refreshSequences ({commit, dispatch, state, rootState}) {
|
||||
|
||||
if (state.loading) {
|
||||
commit('abortSequencesLoading');
|
||||
}
|
||||
|
||||
commit('setSequencesLoading');
|
||||
const pid = rootState.project.projectId;
|
||||
const url = `/project/${pid}/sequence?files=true`;
|
||||
const init = {
|
||||
signal: state.loading.signal
|
||||
};
|
||||
const res = await dispatch('api', [url, init]);
|
||||
|
||||
if (res) {
|
||||
commit('setSequences', res);
|
||||
commit('setSequencesTimestamp');
|
||||
}
|
||||
commit('clearSequencesLoading');
|
||||
}
|
||||
|
||||
/** Return a subset of sequences from state.sequences
|
||||
*/
|
||||
async function getSequences ({commit, dispatch, state}, [projectId, {sequence, date0, date1, sortBy, sortDesc, itemsPerPage, page, text}]) {
|
||||
let filteredSequences = [...state.sequences];
|
||||
|
||||
if (sortBy) {
|
||||
|
||||
sortBy.forEach( (key, idx) => {
|
||||
filteredSequences.sort( (el0, el1) => {
|
||||
const a = el0?.[key];
|
||||
const b = el1?.[key];
|
||||
if (a < b) {
|
||||
return -1;
|
||||
} else if (a > b) {
|
||||
return 1;
|
||||
} else if (a == b) {
|
||||
return 0;
|
||||
} else if (a && !b) {
|
||||
return 1;
|
||||
} else if (!a && b) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
if (sortDesc && sortDesc[idx] === true) {
|
||||
filteredSequences.reverse();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
if (sequence) {
|
||||
filteredSequences = filteredSequences.filter( sequence => sequence.sequence == sequence );
|
||||
}
|
||||
|
||||
if (date0 && date1) {
|
||||
filteredSequences = filteredSequences.filter( sequence =>
|
||||
(sequence.ts0_final ?? sequence.ts0)?.substr(0, 10) >= date0 &&
|
||||
(sequence.ts1_final ?? sequence.ts1)?.substr(0, 10) <= date1
|
||||
);
|
||||
} else if (date0) {
|
||||
filteredSequences = filteredSequences.filter( sequence => (sequence.ts0_final ?? sequence.ts0)?.substr(0, 10) == date0 || (sequence.ts1_final ?? sequence.ts1)?.substr(0, 10) );
|
||||
}
|
||||
|
||||
if (text) {
|
||||
const tstampFilter = (value, search, item) => {
|
||||
return search?.length >= 5 && textFilter(value, search, item);
|
||||
};
|
||||
|
||||
const numberFilter = (value, search, item) => {
|
||||
return value == search;
|
||||
};
|
||||
|
||||
const textFilter = (value, search, item) => {
|
||||
return String(value).toLowerCase().includes(search.toLowerCase());
|
||||
};
|
||||
|
||||
const searchFunctions = {
|
||||
ts0: tstampFilter,
|
||||
ts1: tstampFilter,
|
||||
ts0_final: tstampFilter,
|
||||
ts1_final: tstampFilter,
|
||||
sequence: numberFilter,
|
||||
line: numberFilter,
|
||||
fsp: numberFilter,
|
||||
lsp: numberFilter,
|
||||
fsp_final: numberFilter,
|
||||
fsp_final: numberFilter,
|
||||
remarks: textFilter,
|
||||
remarks_final: textFilter
|
||||
};
|
||||
|
||||
filteredSequences = filteredSequences.filter ( sequence => {
|
||||
for (let key in searchFunctions) {
|
||||
const fn = searchFunctions[key];
|
||||
if (fn(sequence[key], text, sequence)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
|
||||
const count = filteredSequences.length;
|
||||
|
||||
if (itemsPerPage && itemsPerPage > 0) {
|
||||
const offset = (page > 0)
|
||||
? (page-1) * itemsPerPage
|
||||
: 0;
|
||||
|
||||
filteredSequences = filteredSequences.slice(offset, offset+itemsPerPage);
|
||||
}
|
||||
|
||||
return {sequences: filteredSequences, count};
|
||||
}
|
||||
|
||||
export default { refreshSequences, getSequences };
|
||||
14
lib/www/client/source/src/store/modules/sequence/getters.js
Normal file
14
lib/www/client/source/src/store/modules/sequence/getters.js
Normal file
@@ -0,0 +1,14 @@
|
||||
|
||||
function sequences (state) {
|
||||
return state.sequences;
|
||||
}
|
||||
|
||||
function sequenceCount (state) {
|
||||
return state.sequences?.length ?? 0;
|
||||
}
|
||||
|
||||
function sequencesLoading (state) {
|
||||
return !!state.loading;
|
||||
}
|
||||
|
||||
export default { sequences, sequenceCount, sequencesLoading };
|
||||
@@ -0,0 +1,6 @@
|
||||
import state from './state'
|
||||
import getters from './getters'
|
||||
import actions from './actions'
|
||||
import mutations from './mutations'
|
||||
|
||||
export default { state, getters, actions, mutations };
|
||||
@@ -0,0 +1,49 @@
|
||||
|
||||
function setSequences (state, sequences) {
|
||||
// We don't need or want the events array to be reactive, since
|
||||
// it can be tens of thousands of items long.
|
||||
state.sequences = Object.freeze(sequences);
|
||||
}
|
||||
|
||||
function setSequencesLoading (state, abortController = new AbortController()) {
|
||||
state.loading = abortController;
|
||||
}
|
||||
|
||||
// This assumes that we know any transactions have finished or we
|
||||
// don't care about aborting.
|
||||
function clearSequencesLoading (state) {
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
function setSequencesTimestamp (state, timestamp = new Date()) {
|
||||
// NOTE: There is no `modified_on` property in the sequences
|
||||
// result or in the database schema, but we should probably add
|
||||
// one.
|
||||
if (timestamp === true) {
|
||||
const tstamp = state.sequences
|
||||
.map( event => event.modified_on )
|
||||
.reduce( (acc, cur) => acc > cur ? acc : cur );
|
||||
state.timestamp = tstamp ? new Date(tstamp) : new Date();
|
||||
} else {
|
||||
state.timestamp = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
function setSequencesETag (state, etag) {
|
||||
state.etag = etag;
|
||||
}
|
||||
|
||||
function abortSequencesLoading (state) {
|
||||
if (state.loading) {
|
||||
state.loading.abort();
|
||||
}
|
||||
state.loading = null;
|
||||
}
|
||||
|
||||
export default {
|
||||
setSequences,
|
||||
setSequencesLoading,
|
||||
clearSequencesLoading,
|
||||
setSequencesTimestamp,
|
||||
setSequencesETag
|
||||
};
|
||||
@@ -0,0 +1,8 @@
|
||||
const state = () => ({
|
||||
sequences: Object.freeze([]),
|
||||
loading: null,
|
||||
timestamp: null,
|
||||
etag: null,
|
||||
});
|
||||
|
||||
export default state;
|
||||
@@ -39,6 +39,12 @@
|
||||
{{ $refs.calendar.title }}
|
||||
</v-toolbar-title>
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn v-if="categoriesAvailable"
|
||||
small
|
||||
class="mx-4"
|
||||
v-model="useCategories"
|
||||
@click="useCategories = !useCategories"
|
||||
>Labels {{useCategories ? "On" : "Off"}}</v-btn>
|
||||
<v-menu bottom right>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-btn
|
||||
@@ -72,16 +78,23 @@
|
||||
<v-calendar
|
||||
ref="calendar"
|
||||
v-model="focus"
|
||||
:events="events"
|
||||
:events="items"
|
||||
:event-color="getEventColour"
|
||||
color="primary"
|
||||
:type="type"
|
||||
:type="view"
|
||||
:locale-first-day-of-year="4"
|
||||
:weekdays="weekdays"
|
||||
:show-week="true"
|
||||
:category-days="categoryDays"
|
||||
:categories="categories"
|
||||
@click:date="showLogForDate"
|
||||
@click:event="showLogForEvent"
|
||||
></v-calendar>
|
||||
@change="setSpan"
|
||||
>
|
||||
<template v-slot:event="{ event }">
|
||||
<div style="height:100%;overflow:scroll;" v-html="event.name"></div>
|
||||
</template>
|
||||
</v-calendar>
|
||||
</v-sheet>
|
||||
</div>
|
||||
</template>
|
||||
@@ -97,8 +110,9 @@ export default {
|
||||
weekdays: [1, 2, 3, 4, 5, 6, 0],
|
||||
type: "week",
|
||||
focus: "",
|
||||
events: [
|
||||
],
|
||||
items: [],
|
||||
useCategories: false,
|
||||
span: {},
|
||||
options: {
|
||||
sortBy: "sequence"
|
||||
}
|
||||
@@ -117,28 +131,126 @@ export default {
|
||||
return labels[this.type];
|
||||
},
|
||||
|
||||
...mapGetters(['loading'])
|
||||
view () {
|
||||
return this.useCategories ? "category" : this.type;
|
||||
},
|
||||
|
||||
categoriesAvailable () {
|
||||
return this.type == "day" || this.type == "4day";
|
||||
},
|
||||
|
||||
categoryDays () {
|
||||
if (this.useCategories) {
|
||||
const days = {
|
||||
month: 30,
|
||||
week: 7,
|
||||
"4day": 4,
|
||||
day: 1
|
||||
};
|
||||
|
||||
return days[this.type];
|
||||
}
|
||||
},
|
||||
|
||||
visibleItems () {
|
||||
return this.items.filter(i => {
|
||||
const end = i.end ?? i.start;
|
||||
if (i.start > this.span.end) {
|
||||
return false;
|
||||
}
|
||||
if (end < this.span.start) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
},
|
||||
|
||||
categories () {
|
||||
return [...new Set(this.visibleItems.map(i => i.category ?? "General"))];
|
||||
},
|
||||
|
||||
...mapGetters(['sequencesLoading', 'sequences', 'events'])
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
sequences () {
|
||||
const isFirstLoad = !this.items.length;
|
||||
|
||||
this.getEvents();
|
||||
|
||||
if (isFirstLoad) {
|
||||
this.setLast();
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
events () {
|
||||
const isFirstLoad = !this.items.length;
|
||||
|
||||
this.getEvents();
|
||||
|
||||
if (isFirstLoad) {
|
||||
this.setLast();
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
type () {
|
||||
this.getEvents();
|
||||
},
|
||||
|
||||
categoriesAvailable (value) {
|
||||
if (!value) {
|
||||
this.useCategories = false;
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
async getEvents () {
|
||||
const query = new URLSearchParams(this.options);
|
||||
const url = `/project/${this.$route.params.project}/sequence?${query.toString()}`;
|
||||
|
||||
const finalSequences = await this.api([url]) || [];
|
||||
this.events = finalSequences.map(s => {
|
||||
const sequences = this.sequences.map(s => {
|
||||
const e = {};
|
||||
//e.start = s.ts0.substring(0,10)+" "+s.ts0.substring(11,19)
|
||||
//e.end = s.ts1.substring(0,10)+" "+s.ts1.substring(11,19)
|
||||
e.routerLink = { name: "logBySequence", params: { sequence: s.sequence } };
|
||||
e.start = new Date(s.ts0);
|
||||
e.end = new Date(s.ts1);
|
||||
e.timed = true;
|
||||
e.colour = "orange";
|
||||
e.name = `Sequence ${s.sequence}`;
|
||||
e.name = `<b>Sequence ${s.sequence}</b><br/>Line ${s.line}<br/><abbr title="Shotpoints">SP</abbr> ${s.fgsp ?? s.fsp}‒${s.lgsp ?? s.lsp}`;
|
||||
e.category = "Sequence"
|
||||
return e;
|
||||
});
|
||||
|
||||
const lineChanges = this.events.filter(i => i.meta?.["*ReportLineChangeTime*"]?.value && i.meta?.["*ReportLineChangeTime*"]?.type != "excess").map(i => {
|
||||
const e = {};
|
||||
const duration = i.meta?.["*ReportLineChangeTime*"]?.value;
|
||||
e.end = new Date(i.tstamp);
|
||||
e.start = new Date(e.end - duration);
|
||||
e.timed = true;
|
||||
e.colour = "pink";
|
||||
e.name = "Line change";
|
||||
e.category = "Production"
|
||||
return e;
|
||||
});
|
||||
|
||||
const excludedLabels = [ "FSP", "FGSP", "LSP", "LGSP", "QC" ];
|
||||
const otherEvents = this.events.filter(i => !excludedLabels.some(l => i.labels.includes(l))).map(i => {
|
||||
const e = {};
|
||||
e.start = new Date(i.tstamp);
|
||||
e.colour = "brown";
|
||||
e.timed = true;
|
||||
e.name = this.$options.filters.markdownInline(i.remarks);
|
||||
e.category = i.labels[0];
|
||||
return e;
|
||||
});
|
||||
|
||||
this.items = [...sequences];
|
||||
|
||||
if (this.type == "day" || this.type == "4day") {
|
||||
this.items.push(...lineChanges, ...otherEvents);
|
||||
}
|
||||
},
|
||||
|
||||
getEventColour (event) {
|
||||
@@ -150,11 +262,15 @@ export default {
|
||||
},
|
||||
|
||||
setFirst () {
|
||||
this.focus = this.events[this.events.length-1].start;
|
||||
if (this.items.length) {
|
||||
this.focus = this.items[this.items.length-1].start;
|
||||
}
|
||||
},
|
||||
|
||||
setLast () {
|
||||
this.focus = this.events[0].start;
|
||||
if (this.items.length) {
|
||||
this.focus = this.items[0].start;
|
||||
}
|
||||
},
|
||||
|
||||
prev () {
|
||||
@@ -175,6 +291,13 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
setSpan (span) {
|
||||
this.span = {
|
||||
start: new Date(span.start.date),
|
||||
end: new Date((new Date(span.end.date)).valueOf() + 86400000)
|
||||
};
|
||||
},
|
||||
|
||||
|
||||
...mapActions(["api"])
|
||||
|
||||
@@ -182,9 +305,7 @@ export default {
|
||||
|
||||
async mounted () {
|
||||
await this.getEvents();
|
||||
if (this.events.length) {
|
||||
this.setLast();
|
||||
}
|
||||
this.setLast();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
label="Filter"
|
||||
single-line
|
||||
clearable
|
||||
hint="Filter by line number, first or last shotpoint or remarks. Use ‘incr’ or ‘+’ / ‘decr’ or ‘-’ to show only incrementing / decrementing lines"
|
||||
></v-text-field>
|
||||
</v-toolbar>
|
||||
</v-card-title>
|
||||
@@ -106,12 +107,14 @@
|
||||
<v-data-table
|
||||
:headers="headers"
|
||||
:items="items"
|
||||
item-key="line"
|
||||
:items-per-page.sync="itemsPerPage"
|
||||
:server-items-length="lineCount"
|
||||
item-key="line"
|
||||
:search="filter"
|
||||
:loading="loading"
|
||||
:fixed-header="true"
|
||||
:footer-props='{itemsPerPageOptions: [ 10, 25, 50, 100, 500, -1 ]}'
|
||||
:loading="linesLoading"
|
||||
:options.sync="options"
|
||||
fixed-header
|
||||
:footer-props='{itemsPerPageOptions: [ 10, 25, 50, 100, 500, -1 ], showFirstLastPage: true}'
|
||||
:item-class="itemClass"
|
||||
:show-select="selectOn"
|
||||
v-model="selectedRows"
|
||||
@@ -124,6 +127,10 @@
|
||||
:preplot="item"
|
||||
:sequences="sequences.filter(s => s.line == item.line)"
|
||||
:sequence-href="(s) => `/projects/${$route.params.project}/log/sequence/${s.sequence}`"
|
||||
:planned-sequences="plannedSequences.filter(s => s.line == item.line)"
|
||||
:planned-sequence-href="() => `/projects/${$route.params.project}/plan`"
|
||||
:pending-reshoots="null"
|
||||
:pending-reshoot-href="null"
|
||||
>
|
||||
<template v-slot:empty>
|
||||
<div v-if="!item.ntba" class="sequence" title="Virgin"></div>
|
||||
@@ -161,7 +168,7 @@
|
||||
icon
|
||||
small
|
||||
title="Edit"
|
||||
:disabled="loading"
|
||||
:disabled="linesLoading"
|
||||
@click="editItem(item, 'remarks')"
|
||||
>
|
||||
<v-icon small>mdi-square-edit-outline</v-icon>
|
||||
@@ -251,9 +258,10 @@ export default {
|
||||
items: [],
|
||||
selectOn: false,
|
||||
selectedRows: [],
|
||||
filter: null,
|
||||
num_lines: null,
|
||||
sequences: [],
|
||||
filter: "",
|
||||
options: {},
|
||||
lineCount: null,
|
||||
//sequences: [],
|
||||
activeItem: null,
|
||||
edit: null, // {line, key, value}
|
||||
queuedReload: false,
|
||||
@@ -273,11 +281,22 @@ export default {
|
||||
},
|
||||
|
||||
computed: {
|
||||
...mapGetters(['user', 'writeaccess', 'loading', 'serverEvent'])
|
||||
...mapGetters(['user', 'writeaccess', 'linesLoading', 'lines', 'sequences', 'plannedSequences'])
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
options: {
|
||||
handler () {
|
||||
this.fetchLines();
|
||||
},
|
||||
deep: true
|
||||
},
|
||||
|
||||
async lines () {
|
||||
await this.fetchLines();
|
||||
},
|
||||
|
||||
async edit (newVal, oldVal) {
|
||||
if (newVal === null && oldVal !== null) {
|
||||
const item = this.items.find(i => i.line == oldVal.line);
|
||||
@@ -296,39 +315,9 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
async serverEvent (event) {
|
||||
if (event.payload.pid == this.$route.params.project) {
|
||||
if (event.channel == "preplot_lines" || event.channel == "preplot_points") {
|
||||
if (!this.loading && !this.queuedReload) {
|
||||
// Do not force a non-cached response if refreshing as a result
|
||||
// of an event notification. We will assume that the server has
|
||||
// already had time to update the cache by the time our request
|
||||
// gets back to it.
|
||||
this.getLines();
|
||||
} else {
|
||||
this.queuedReload = true;
|
||||
}
|
||||
} else if ([ "planned_lines", "raw_lines", "final_lines" ].includes(event.channel)) {
|
||||
if (!this.loading && !this.queuedReload) {
|
||||
this.getSequences();
|
||||
} else {
|
||||
this.queuedReload = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
queuedReload (newVal, oldVal) {
|
||||
if (newVal && !oldVal && !this.loading) {
|
||||
this.getLines();
|
||||
this.getSequences();
|
||||
}
|
||||
},
|
||||
|
||||
loading (newVal, oldVal) {
|
||||
if (!newVal && oldVal && this.queuedReload) {
|
||||
this.getLines();
|
||||
this.getSequences();
|
||||
filter (newVal, oldVal) {
|
||||
if (newVal?.toLowerCase() != oldVal?.toLowerCase()) {
|
||||
this.fetchLines();
|
||||
}
|
||||
},
|
||||
|
||||
@@ -468,43 +457,28 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
async getNumLines () {
|
||||
const projectInfo = await this.api([`/project/${this.$route.params.project}`]);
|
||||
this.num_lines = projectInfo.lines;
|
||||
},
|
||||
|
||||
async getLines () {
|
||||
|
||||
const url = `/project/${this.$route.params.project}/line`;
|
||||
|
||||
this.queuedReload = false;
|
||||
this.items = await this.api([url]) || [];
|
||||
|
||||
},
|
||||
|
||||
async getSequences () {
|
||||
const urlS = `/project/${this.$route.params.project}/sequence`;
|
||||
this.sequences = await this.api([urlS]) || [];
|
||||
|
||||
const urlP = `/project/${this.$route.params.project}/plan`;
|
||||
const planned = await this.api([urlP]) || [];
|
||||
planned.forEach(i => i.status = "planned");
|
||||
this.sequences.push(...planned);
|
||||
},
|
||||
|
||||
setActiveItem (item) {
|
||||
this.activeItem = this.activeItem == item
|
||||
? null
|
||||
: item;
|
||||
},
|
||||
|
||||
...mapActions(["api"])
|
||||
|
||||
async fetchLines (opts = {}) {
|
||||
const options = {
|
||||
text: this.filter,
|
||||
...this.options
|
||||
};
|
||||
const res = await this.getLines([this.$route.params.project, options]);
|
||||
this.items = res.lines;
|
||||
this.lineCount = res.count;
|
||||
},
|
||||
|
||||
...mapActions(["api", "getLines"])
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.getLines();
|
||||
this.getNumLines();
|
||||
this.getSequences();
|
||||
this.fetchLines();
|
||||
|
||||
// Initialise stylesheet
|
||||
const el = document.createElement("style");
|
||||
|
||||
@@ -93,7 +93,21 @@
|
||||
append-icon="mdi-magnify"
|
||||
label="Filter"
|
||||
single-line
|
||||
hide-details></v-text-field>
|
||||
clearable
|
||||
hide-details>
|
||||
<template v-slot:prepend-inner>
|
||||
<v-chip v-if="labelSearch"
|
||||
class="mr-1"
|
||||
small
|
||||
close
|
||||
@click:close="labelSearch=null"
|
||||
:color="labels[labelSearch] && labels[labelSearch].view.colour"
|
||||
:title="labels[labelSearch] && labels[labelSearch].view.description"
|
||||
:dark="labels[labelSearch] && labels[labelSearch].view.dark"
|
||||
:light="labels[labelSearch] && labels[labelSearch].view.light"
|
||||
>{{labelSearch}}</v-chip>
|
||||
</template>
|
||||
</v-text-field>
|
||||
</v-toolbar>
|
||||
</v-card-title>
|
||||
<v-card-text>
|
||||
@@ -215,13 +229,14 @@
|
||||
:headers="headers"
|
||||
:items="rows"
|
||||
:items-per-page.sync="itemsPerPage"
|
||||
:server-items-length="eventCount"
|
||||
item-key="key"
|
||||
:item-class="itemClass"
|
||||
sort-by="tstamp"
|
||||
:sort-desc="true"
|
||||
:search="filter"
|
||||
:custom-filter="searchTable"
|
||||
:loading="loading"
|
||||
:loading="eventsLoading"
|
||||
:options.sync="options"
|
||||
fixed-header
|
||||
:footer-props='{itemsPerPageOptions: [ 10, 25, 50, 100, 500, -1 ], showFirstLastPage: true}'
|
||||
@click:row="setActiveItem"
|
||||
@@ -249,12 +264,12 @@
|
||||
:dark="labels[label] && labels[label].view.dark"
|
||||
:light="labels[label] && labels[label].view.light"
|
||||
:key="label"
|
||||
:href="$route.path+'?label='+encodeURIComponent(label)"
|
||||
@click="labelSearch=label"
|
||||
>{{label}}</v-chip>
|
||||
</span>
|
||||
<dougal-event-edit-history v-if="entry.has_edits && writeaccess"
|
||||
:id="entry.id"
|
||||
:disabled="loading"
|
||||
:disabled="eventsLoading"
|
||||
:labels="labels"
|
||||
></dougal-event-edit-history>
|
||||
<span v-if="entry.meta.readonly"
|
||||
@@ -385,7 +400,6 @@ export default {
|
||||
}
|
||||
],
|
||||
items: [],
|
||||
labels: {},
|
||||
options: {},
|
||||
filter: "",
|
||||
filterableLabels: [ "QC", "QCAccepted" ],
|
||||
@@ -394,7 +408,6 @@ export default {
|
||||
eventDialog: false,
|
||||
eventLabelsDialog: false,
|
||||
defaultEventTimestamp: null,
|
||||
presetRemarks: null,
|
||||
remarksMenu: null,
|
||||
remarksMenuItem: null,
|
||||
editedEvent: {},
|
||||
@@ -444,17 +457,6 @@ export default {
|
||||
return Object.values(rows);
|
||||
},
|
||||
|
||||
userLabels () {
|
||||
const filtered = {};
|
||||
for (const key in this.labels) {
|
||||
if (this.labels[key].model.user) {
|
||||
filtered[key] = this.labels[key];
|
||||
}
|
||||
}
|
||||
return filtered;
|
||||
|
||||
},
|
||||
|
||||
popularLabels () {
|
||||
const tuples = this.items.flatMap( i => i.labels )
|
||||
.filter( l => (this.labels[l]??{})?.model?.user )
|
||||
@@ -466,6 +468,10 @@ export default {
|
||||
.sort( (a, b) => b[1]-a[1] );
|
||||
},
|
||||
|
||||
presetRemarks () {
|
||||
return this.projectConfiguration?.events?.presetRemarks ?? [];
|
||||
},
|
||||
|
||||
defaultSequence () {
|
||||
if (this.$route.params.sequence) {
|
||||
return Number(this.$route.params.sequence.split(";").pop());
|
||||
@@ -474,19 +480,24 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
...mapGetters(['user', 'writeaccess', 'loading', 'online', 'sequence', 'line', 'point', 'position', 'timestamp', 'lineName', 'serverEvent']),
|
||||
...mapGetters(['user', 'writeaccess', 'eventsLoading', 'online', 'sequence', 'line', 'point', 'position', 'timestamp', 'lineName', 'serverEvent', 'events', 'labels', 'userLabels']),
|
||||
...mapState({projectSchema: state => state.project.projectSchema})
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
options: {
|
||||
handler () {
|
||||
//this.getEvents();
|
||||
async handler () {
|
||||
await this.fetchEvents();
|
||||
},
|
||||
deep: true
|
||||
},
|
||||
|
||||
async events () {
|
||||
console.log("Events changed");
|
||||
await this.fetchEvents();
|
||||
},
|
||||
|
||||
eventDialog (val) {
|
||||
if (val) {
|
||||
// If not online
|
||||
@@ -494,30 +505,14 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
async serverEvent (event) {
|
||||
if (event.channel == "event" && event.payload.schema == this.projectSchema) {
|
||||
if (!this.loading && !this.queuedReload) {
|
||||
// Do not force a non-cached response if refreshing as a result
|
||||
// of an event notification. We will assume that the server has
|
||||
// already had time to update the cache by the time our request
|
||||
// gets back to it.
|
||||
this.getEvents();
|
||||
} else {
|
||||
this.queuedReload = true;
|
||||
}
|
||||
filter (newVal, oldVal) {
|
||||
if (newVal?.toLowerCase() != oldVal?.toLowerCase()) {
|
||||
this.fetchEvents();
|
||||
}
|
||||
},
|
||||
|
||||
queuedReload (newVal, oldVal) {
|
||||
if (newVal && !oldVal && !this.loading) {
|
||||
this.getEvents();
|
||||
}
|
||||
},
|
||||
|
||||
loading (newVal, oldVal) {
|
||||
if (!newVal && oldVal && this.queuedReload) {
|
||||
this.getEvents();
|
||||
}
|
||||
labelSearch () {
|
||||
this.fetchEvents();
|
||||
},
|
||||
|
||||
itemsPerPage (newVal, oldVal) {
|
||||
@@ -574,50 +569,15 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
async getEventCount () {
|
||||
//this.eventCount = await this.api([`/project/${this.$route.params.project}/event/?count`]);
|
||||
this.eventCount = null;
|
||||
},
|
||||
|
||||
async getEvents (opts = {}) {
|
||||
|
||||
const query = new URLSearchParams(this.options);
|
||||
if (this.options.itemsPerPage < 0) {
|
||||
query.delete("itemsPerPage");
|
||||
}
|
||||
|
||||
if (this.$route.params.sequence) {
|
||||
query.set("sequence", this.$route.params.sequence);
|
||||
}
|
||||
|
||||
if (this.$route.params.date0) {
|
||||
query.set("date0", this.$route.params.date0);
|
||||
}
|
||||
|
||||
if (this.$route.params.date1) {
|
||||
query.set("date1", this.$route.params.date1);
|
||||
}
|
||||
|
||||
const url = `/project/${this.$route.params.project}/event?${query.toString()}`;
|
||||
|
||||
this.queuedReload = false;
|
||||
this.items = await this.api([url, opts]) || [];
|
||||
|
||||
},
|
||||
|
||||
async getLabelDefinitions () {
|
||||
const url = `/project/${this.$route.params.project}/label`;
|
||||
|
||||
//const labelSet = {};
|
||||
this.labels = await this.api([url]) ?? {};
|
||||
//labels.forEach( l => labelSet[l.name] = l.data );
|
||||
//this.labels = labelSet;
|
||||
},
|
||||
|
||||
async getPresetRemarks () {
|
||||
const url = `/project/${this.$route.params.project}/configuration`;
|
||||
|
||||
this.presetRemarks = (await this.api([url]))?.events?.presetRemarks ?? {};
|
||||
async fetchEvents (opts = {}) {
|
||||
const options = {
|
||||
text: this.filter,
|
||||
label: this.labelSearch,
|
||||
...this.options
|
||||
};
|
||||
const res = await this.getEvents([this.$route.params.project, options]);
|
||||
this.items = res.events;
|
||||
this.eventCount = res.count;
|
||||
},
|
||||
|
||||
newItem (from = {}) {
|
||||
@@ -691,7 +651,7 @@ export default {
|
||||
if (!err && res.ok) {
|
||||
this.showSnack(["Event saved", "success"]);
|
||||
this.queuedReload = true;
|
||||
this.getEvents({cache: "reload"});
|
||||
this.fetchEvents({cache: "reload"});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -709,7 +669,7 @@ export default {
|
||||
if (!err && res.ok) {
|
||||
this.showSnack(["Event saved", "success"]);
|
||||
this.queuedReload = true;
|
||||
this.getEvents({cache: "reload"});
|
||||
this.fetchEvents({cache: "reload"});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -756,7 +716,7 @@ export default {
|
||||
if (!err && res.ok) {
|
||||
this.showSnack([`${ids.length} events deleted`, "red"]);
|
||||
this.queuedReload = true;
|
||||
this.getEvents({cache: "reload"});
|
||||
this.fetchEvents({cache: "reload"});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -772,7 +732,7 @@ export default {
|
||||
if (!err && res.ok) {
|
||||
this.showSnack(["Event deleted", "red"]);
|
||||
this.queuedReload = true;
|
||||
this.getEvents({cache: "reload"});
|
||||
this.fetchEvents({cache: "reload"});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -806,19 +766,6 @@ export default {
|
||||
|
||||
},
|
||||
|
||||
searchTable (value, search, item) {
|
||||
if (!value && !search) return true;
|
||||
const s = search.toLowerCase();
|
||||
if (typeof value === 'string') {
|
||||
return value.toLowerCase().includes(s);
|
||||
} else if (typeof value === 'number') {
|
||||
return value == search;
|
||||
} else {
|
||||
return item.items.some( i => i.remarks.toLowerCase().includes(s) ) ||
|
||||
item.items.some( i => i.labels.some( l => l.toLowerCase().includes(s) ));
|
||||
}
|
||||
},
|
||||
|
||||
viewOnMap(item) {
|
||||
if (item?.meta && item.meta?.geometry?.type == "Point") {
|
||||
const [ lon, lat ] = item.meta.geometry.coordinates;
|
||||
@@ -857,14 +804,11 @@ export default {
|
||||
*/
|
||||
},
|
||||
|
||||
...mapActions(["api", "showSnack"])
|
||||
...mapActions(["api", "showSnack", "refreshEvents", "getEvents"])
|
||||
},
|
||||
|
||||
async mounted () {
|
||||
await this.getLabelDefinitions();
|
||||
this.getEventCount();
|
||||
this.getEvents();
|
||||
this.getPresetRemarks();
|
||||
this.fetchEvents();
|
||||
|
||||
window.addEventListener('keyup', this.handleKeyboardEvent);
|
||||
},
|
||||
|
||||
@@ -44,6 +44,7 @@
|
||||
label="Filter"
|
||||
single-line
|
||||
clearable
|
||||
hint="Filter by sequence, line, first or last shotpoints, remarks or start/end time"
|
||||
></v-text-field>
|
||||
</v-toolbar>
|
||||
</v-card-title>
|
||||
@@ -109,17 +110,24 @@
|
||||
:headers="headers"
|
||||
:items="items"
|
||||
:items-per-page.sync="itemsPerPage"
|
||||
:server-items-length="sequenceCount"
|
||||
item-key="sequence"
|
||||
:search="filter"
|
||||
:loading="loading"
|
||||
:fixed-header="true"
|
||||
:loading="plannedSequencesLoading"
|
||||
fixed-header
|
||||
no-data-text="No planned lines. Add lines via the context menu from either the Lines or Sequences view."
|
||||
:item-class="(item) => (activeItem == item && !edit) ? 'blue accent-1 elevation-3' : ''"
|
||||
:footer-props="{showFirstLastPage: true}"
|
||||
@click:row="setActiveItem"
|
||||
@contextmenu:row="contextMenu"
|
||||
>
|
||||
|
||||
<template v-slot:item.srss="{item}">
|
||||
<v-icon small :title="srssInfo(item)">{{srssIcon(item)}}</v-icon>
|
||||
<span style="white-space: nowrap;">
|
||||
<v-icon small :title="srssInfo(item)">{{srssIcon(item)}}</v-icon>
|
||||
/
|
||||
<v-icon small :title="wxInfo(item)" v-if="item.meta.wx">{{wxIcon(item)}}</v-icon>
|
||||
</span>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.sequence="{item, value}">
|
||||
@@ -271,7 +279,7 @@
|
||||
icon
|
||||
small
|
||||
title="Edit"
|
||||
:disabled="loading"
|
||||
:disabled="plannedSequencesLoading"
|
||||
@click="editItem(item, 'remarks')"
|
||||
>
|
||||
<v-icon small>mdi-square-edit-outline</v-icon>
|
||||
@@ -413,7 +421,8 @@ export default {
|
||||
remarks: null,
|
||||
editRemarks: false,
|
||||
filter: null,
|
||||
num_lines: null,
|
||||
options: {},
|
||||
sequenceCount: null,
|
||||
activeItem: null,
|
||||
edit: null, // {sequence, key, value}
|
||||
queuedReload: false,
|
||||
@@ -422,6 +431,123 @@ export default {
|
||||
plannerConfig: null,
|
||||
shiftAll: false, // Shift all sequences checkbox
|
||||
|
||||
// Weather API
|
||||
wxData: null,
|
||||
weathercode: {
|
||||
0: {
|
||||
description: "Clear sky",
|
||||
icon: "mdi-weather-sunny"
|
||||
},
|
||||
1: {
|
||||
description: "Mainly clear",
|
||||
icon: "mdi-weather-sunny"
|
||||
},
|
||||
2: {
|
||||
description: "Partly cloudy",
|
||||
icon: "mdi-weather-partly-cloudy"
|
||||
},
|
||||
3: {
|
||||
description: "Overcast",
|
||||
icon: "mdi-weather-cloudy"
|
||||
},
|
||||
45: {
|
||||
description: "Fog",
|
||||
icon: "mde-weather-fog"
|
||||
},
|
||||
48: {
|
||||
description: "Depositing rime fog",
|
||||
icon: "mdi-weather-fog"
|
||||
},
|
||||
51: {
|
||||
description: "Light drizzle",
|
||||
icon: "mdi-weather-partly-rainy"
|
||||
},
|
||||
53: {
|
||||
description: "Moderate drizzle",
|
||||
icon: "mdi-weather-partly-rainy"
|
||||
},
|
||||
55: {
|
||||
description: "Dense drizzle",
|
||||
icon: "mdi-weather-rainy"
|
||||
},
|
||||
56: {
|
||||
description: "Light freezing drizzle",
|
||||
icon: "mdi-weather-partly-snowy-rainy"
|
||||
},
|
||||
57: {
|
||||
description: "Freezing drizzle",
|
||||
icon: "mdi-weather-partly-snowy-rainy"
|
||||
},
|
||||
61: {
|
||||
description: "Light rain",
|
||||
icon: "mdi-weather-rainy"
|
||||
},
|
||||
63: {
|
||||
description: "Moderate rain",
|
||||
icon: "mdi-weather-rainy"
|
||||
},
|
||||
65: {
|
||||
description: "Heavy rain",
|
||||
icon: "mdi-weather-pouring"
|
||||
},
|
||||
66: {
|
||||
description: "Light freezing rain",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
67: {
|
||||
description: "Freezing rain",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
71: {
|
||||
description: "Light snow",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
73: {
|
||||
description: "Moderate snow",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
75: {
|
||||
description: "Heavy snow",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
77: {
|
||||
description: "Snow grains",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
80: {
|
||||
description: "Light rain showers",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
81: {
|
||||
description: "Moderate rain showers",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
82: {
|
||||
description: "Violent rain showers",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
85: {
|
||||
description: "Light snow showers",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
86: {
|
||||
description: "Snow showers",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
95: {
|
||||
description: "Thunderstorm",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
96: {
|
||||
description: "Hailstorm",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
99: {
|
||||
description: "Heavy hailstorm",
|
||||
icon: "mdi-loading"
|
||||
},
|
||||
},
|
||||
|
||||
// Context menu stuff
|
||||
contextMenuShow: false,
|
||||
contextMenuX: 0,
|
||||
@@ -431,11 +557,22 @@ export default {
|
||||
},
|
||||
|
||||
computed: {
|
||||
...mapGetters(['user', 'writeaccess', 'loading', 'serverEvent'])
|
||||
...mapGetters(['user', 'writeaccess', 'plannedSequencesLoading', 'plannedSequences', 'planRemarks'])
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
options: {
|
||||
handler () {
|
||||
this.fetchPlannedSequences();
|
||||
},
|
||||
deep: true
|
||||
},
|
||||
|
||||
async plannedSequences () {
|
||||
await this.fetchPlannedSequences();
|
||||
},
|
||||
|
||||
async edit (newVal, oldVal) {
|
||||
if (newVal === null && oldVal !== null) {
|
||||
const item = this.items.find(i => i.sequence == oldVal.sequence);
|
||||
@@ -466,41 +603,9 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
async serverEvent (event) {
|
||||
if (event.channel == "planned_lines" && event.payload.pid == this.$route.params.project) {
|
||||
|
||||
// Ignore non-ops
|
||||
/*
|
||||
if (event.payload.old === null && event.payload.new === null) {
|
||||
return;
|
||||
}
|
||||
*/
|
||||
|
||||
if (!this.loading && !this.queuedReload) {
|
||||
// Do not force a non-cached response if refreshing as a result
|
||||
// of an event notification. We will assume that the server has
|
||||
// already had time to update the cache by the time our request
|
||||
// gets back to it.
|
||||
this.getPlannedLines();
|
||||
} else {
|
||||
this.queuedReload = true;
|
||||
}
|
||||
} else if (event.channel == "info" && event.payload.pid == this.$route.params.project) {
|
||||
if (event.payload?.new?.key == "plan" && ("remarks" in (event.payload?.new?.value || {}))) {
|
||||
this.remarks = event.payload?.new.value.remarks;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
queuedReload (newVal, oldVal) {
|
||||
if (newVal && !oldVal && !this.loading) {
|
||||
this.getPlannedLines();
|
||||
}
|
||||
},
|
||||
|
||||
loading (newVal, oldVal) {
|
||||
if (!newVal && oldVal && this.queuedReload) {
|
||||
this.getPlannedLines();
|
||||
filter (newVal, oldVal) {
|
||||
if (newVal?.toLowerCase() != oldVal?.toLowerCase()) {
|
||||
this.fetchPlannedSequences();
|
||||
}
|
||||
},
|
||||
|
||||
@@ -630,6 +735,113 @@ export default {
|
||||
return text.join("\n");
|
||||
},
|
||||
|
||||
wxInfo (line) {
|
||||
|
||||
function atm(key) {
|
||||
return line.meta?.wx?.atmospheric?.hourly[key];
|
||||
}
|
||||
|
||||
function mar(key) {
|
||||
return line.meta?.wx?.marine?.hourly[key];
|
||||
}
|
||||
|
||||
const code = atm("weathercode");
|
||||
|
||||
const description = this.weathercode[code]?.description ?? `WMO code ${code}`;
|
||||
const wind_speed = Math.round(atm("windspeed_10m"));
|
||||
const wind_direction = String(Math.round(atm("winddirection_10m"))).padStart(3, "0");
|
||||
const pressure = Math.round(atm("surface_pressure"));
|
||||
const temperature = Math.round(atm("temperature_2m"));
|
||||
const humidity = atm("relativehumidity_2m");
|
||||
const precipitation = atm("precipitation");
|
||||
const precipitation_probability = atm("precipitation_probability");
|
||||
const precipitation_str = precipitation_probability
|
||||
? `\nPrecipitation ${precipitation} mm (prob. ${precipitation_probability}%)`
|
||||
: ""
|
||||
|
||||
const wave_height = mar("wave_height").toFixed(1);
|
||||
const wave_direction = mar("wave_direction");
|
||||
const wave_period = mar("wave_period");
|
||||
|
||||
return `${description}\n${temperature}° C\n${pressure} hPa\nWind ${wind_speed} kt ${wind_direction}°\nRelative humidity ${humidity}%${precipitation_str}\nWaves ${wave_height} m ${wave_direction}° @ ${wave_period} s`;
|
||||
},
|
||||
|
||||
wxIcon (line) {
|
||||
const code = line.meta?.wx?.atmospheric?.hourly?.weathercode;
|
||||
|
||||
return this.weathercode[code]?.icon ?? "mdi-help";
|
||||
|
||||
},
|
||||
|
||||
async wxQuery (line) {
|
||||
function midpoint(line) {
|
||||
// WARNING Fails if across the antimeridian
|
||||
const longitude = (line.geometry.coordinates[0][0] + line.geometry.coordinates[1][0])/2;
|
||||
const latitude = (line.geometry.coordinates[0][1] + line.geometry.coordinates[1][1])/2;
|
||||
return [ longitude, latitude ];
|
||||
}
|
||||
|
||||
function extract (fcst) {
|
||||
const τ = (line.ts0.valueOf() + line.ts1.valueOf()) / 2000;
|
||||
const [idx, ε] = fcst?.hourly?.time?.reduce( (acc, cur, idx) => {
|
||||
const δ = Math.abs(cur - τ);
|
||||
const retval = acc
|
||||
? acc[1] < δ
|
||||
? acc
|
||||
: [ idx, δ ]
|
||||
: [ idx, δ ];
|
||||
|
||||
return retval;
|
||||
});
|
||||
|
||||
if (idx) {
|
||||
const hourly = {};
|
||||
for (let key in fcst?.hourly) {
|
||||
fcst.hourly[key] = fcst.hourly[key][idx];
|
||||
}
|
||||
}
|
||||
|
||||
return fcst;
|
||||
}
|
||||
|
||||
async function fetch_atmospheric (opts) {
|
||||
const { longitude, latitude, dt0, dt1 } = opts;
|
||||
|
||||
const url = `https://api.open-meteo.com/v1/forecast?latitude=${latitude}&longitude=${longitude}&hourly=temperature_2m,relativehumidity_2m,precipitation_probability,precipitation,weathercode,pressure_msl,surface_pressure,windspeed_10m,winddirection_10m&daily=uv_index_max&windspeed_unit=kn&timeformat=unixtime&timezone=GMT&start_date=${dt0}&end_date=${dt1}&format=json`;
|
||||
const init = {};
|
||||
const res = await fetch (url, init);
|
||||
if (res?.ok) {
|
||||
const data = await res.json();
|
||||
|
||||
return extract(data);
|
||||
}
|
||||
}
|
||||
|
||||
async function fetch_marine (opts) {
|
||||
const { longitude, latitude, dt0, dt1 } = opts;
|
||||
const url = `https://marine-api.open-meteo.com/v1/marine?latitude=${latitude}&longitude=${longitude}&hourly=wave_height,wave_direction,wave_period&timeformat=unixtime&timezone=GMT&start_date=${dt0}&end_date=${dt1}&format=json`;
|
||||
|
||||
const init = {};
|
||||
const res = await fetch (url, init);
|
||||
if (res?.ok) {
|
||||
const data = await res.json();
|
||||
|
||||
return extract(data);
|
||||
}
|
||||
}
|
||||
|
||||
if (line) {
|
||||
const [ longitude, latitude ] = midpoint(line);
|
||||
const dt0 = line.ts0.toISOString().substr(0, 10);
|
||||
const dt1 = line.ts1.toISOString().substr(0, 10);
|
||||
|
||||
return {
|
||||
atmospheric: await fetch_atmospheric({longitude, latitude, dt0, dt1}),
|
||||
marine: await fetch_marine({longitude, latitude, dt0, dt1})
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
lagAfter (item) {
|
||||
const pos = this.items.indexOf(item)+1;
|
||||
if (pos != 0) {
|
||||
@@ -662,7 +874,6 @@ export default {
|
||||
const url = `/project/${this.$route.params.project}/plan/${this.contextMenuItem.sequence}`;
|
||||
const init = {method: "DELETE"};
|
||||
await this.api([url, init]);
|
||||
await this.getPlannedLines();
|
||||
},
|
||||
|
||||
editItem (item, key, value) {
|
||||
@@ -714,18 +925,6 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
async getPlannedLines () {
|
||||
|
||||
const url = `/project/${this.$route.params.project}/plan`;
|
||||
|
||||
this.queuedReload = false;
|
||||
this.items = await this.api([url]) || [];
|
||||
for (const item of this.items) {
|
||||
item.ts0 = new Date(item.ts0);
|
||||
item.ts1 = new Date(item.ts1);
|
||||
}
|
||||
},
|
||||
|
||||
async getPlannerConfig () {
|
||||
const url = `/project/${this.$route.params.project}/configuration/planner`;
|
||||
this.plannerConfig = await this.api([url]) || {
|
||||
@@ -736,14 +935,15 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
async getPlannerRemarks () {
|
||||
const url = `/project/${this.$route.params.project}/info/plan/remarks`;
|
||||
this.remarks = await this.api([url]) || "";
|
||||
},
|
||||
|
||||
async getSequences () {
|
||||
const url = `/project/${this.$route.params.project}/sequence`;
|
||||
this.sequences = await this.api([url]) || [];
|
||||
async fetchPlannedSequences (opts = {}) {
|
||||
const options = {
|
||||
text: this.filter,
|
||||
...this.options
|
||||
};
|
||||
const res = await this.getPlannedSequences([this.$route.params.project, options]);
|
||||
this.items = res.sequences;
|
||||
this.sequenceCount = res.count;
|
||||
this.remarks = this.planRemarks;
|
||||
},
|
||||
|
||||
setActiveItem (item) {
|
||||
@@ -752,13 +952,12 @@ export default {
|
||||
: item;
|
||||
},
|
||||
|
||||
...mapActions(["api", "showSnack"])
|
||||
...mapActions(["api", "showSnack", "getPlannedSequences"])
|
||||
},
|
||||
|
||||
async mounted () {
|
||||
await this.getPlannerConfig();
|
||||
this.getPlannedLines();
|
||||
this.getPlannerRemarks();
|
||||
await this.fetchPlannedSequences();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ export default {
|
||||
return this.loading || this.projectId;
|
||||
},
|
||||
|
||||
...mapGetters(["loading", "projectId", "serverEvent"])
|
||||
...mapGetters(["loading", "projectId", "projectSchema", "serverEvent"])
|
||||
},
|
||||
|
||||
watch: {
|
||||
@@ -45,16 +45,39 @@ export default {
|
||||
if (event.channel == "project" && event.payload?.operation == "DELETE" && event.payload?.schema == "public") {
|
||||
// Project potentially deleted
|
||||
await this.getProject(this.$route.params.project);
|
||||
} else if (event.payload?.schema == this.projectSchema) {
|
||||
if (event.channel == "event") {
|
||||
this.refreshEvents();
|
||||
} else if (event.channel == "planned_lines") {
|
||||
this.refreshPlan();
|
||||
} else if (["raw_lines", "final_lines", "final_shots"].includes(event.channel)) {
|
||||
this.refreshSequences();
|
||||
} else if (["preplot_lines", "preplot_points"].includes(event.channel)) {
|
||||
this.refreshLines();
|
||||
} else if (event.channel == "info") {
|
||||
if ((event.payload?.new ?? event.payload?.old)?.key == "plan") {
|
||||
this.refreshPlan();
|
||||
}
|
||||
} else if (event.channel == "project") {
|
||||
this.getProject(this.$route.params.project);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
...mapActions(["getProject"])
|
||||
...mapActions(["getProject", "refreshLines", "refreshSequences", "refreshEvents", "refreshLabels", "refreshPlan"])
|
||||
},
|
||||
|
||||
async mounted () {
|
||||
await this.getProject(this.$route.params.project);
|
||||
if (this.projectFound) {
|
||||
this.refreshLines();
|
||||
this.refreshSequences();
|
||||
this.refreshEvents();
|
||||
this.refreshLabels();
|
||||
this.refreshPlan();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -433,10 +433,7 @@ export default {
|
||||
async getLabelDefinitions () {
|
||||
const url = `/project/${this.$route.params.project}/label`;
|
||||
|
||||
const labelSet = {};
|
||||
const labels = await this.api([url]) || [];
|
||||
labels.forEach( l => labelSet[l.name] = l.data );
|
||||
this.labels = labelSet;
|
||||
this.labels = await this.api([url]) || {};
|
||||
},
|
||||
|
||||
async getQCData () {
|
||||
|
||||
@@ -148,15 +148,16 @@
|
||||
:headers="headers"
|
||||
:items="items"
|
||||
:items-per-page.sync="itemsPerPage"
|
||||
:server-items-length="sequenceCount"
|
||||
item-key="sequence"
|
||||
:server-items-length="num_rows"
|
||||
:search="filter"
|
||||
:custom-filter="customFilter"
|
||||
:loading="loading"
|
||||
:fixed-header="true"
|
||||
:footer-props='{itemsPerPageOptions: [ 10, 25, 50, 100, 500, -1 ]}'
|
||||
show-expand
|
||||
:item-class="(item) => activeItem == item ? 'blue accent-1 elevation-3' : ''"
|
||||
:search="filter"
|
||||
x-custom-filter="customFilter"
|
||||
:loading="sequencesLoading"
|
||||
:options.sync="options"
|
||||
fixed-header
|
||||
:footer-props='{itemsPerPageOptions: [ 10, 25, 50, 100, 500, -1 ], showFirstLastPage: true}'
|
||||
show-expand
|
||||
@click:row="setActiveItem"
|
||||
@contextmenu:row="contextMenu"
|
||||
>
|
||||
@@ -176,7 +177,7 @@
|
||||
icon
|
||||
small
|
||||
title="Cancel edit"
|
||||
:disabled="loading"
|
||||
:disabled="sequencesLoading"
|
||||
@click="edit.value = item.remarks; edit = null"
|
||||
>
|
||||
<v-icon small>mdi-close</v-icon>
|
||||
@@ -185,7 +186,7 @@
|
||||
icon
|
||||
small
|
||||
title="Save edits"
|
||||
:disabled="loading"
|
||||
:disabled="sequencesLoading"
|
||||
@click="edit = null"
|
||||
>
|
||||
<v-icon small>mdi-content-save-edit-outline</v-icon>
|
||||
@@ -196,7 +197,7 @@
|
||||
icon
|
||||
small
|
||||
title="Edit"
|
||||
:disabled="loading"
|
||||
:disabled="sequencesLoading"
|
||||
@click="editItem(item, 'remarks')"
|
||||
>
|
||||
<v-icon small>mdi-square-edit-outline</v-icon>
|
||||
@@ -210,7 +211,7 @@
|
||||
class="markdown"
|
||||
autofocus
|
||||
placeholder="Enter your text here"
|
||||
:disabled="loading"
|
||||
:disabled="sequencesLoading"
|
||||
v-model="edit.value"
|
||||
>
|
||||
</v-textarea>
|
||||
@@ -228,7 +229,7 @@
|
||||
icon
|
||||
small
|
||||
title="Cancel edit"
|
||||
:disabled="loading"
|
||||
:disabled="sequencesLoading"
|
||||
@click="edit.value = item.remarks_final; edit = null"
|
||||
>
|
||||
<v-icon small>mdi-close</v-icon>
|
||||
@@ -237,7 +238,7 @@
|
||||
icon
|
||||
small
|
||||
title="Save edits"
|
||||
:disabled="loading"
|
||||
:disabled="sequencesLoading"
|
||||
@click="edit = null"
|
||||
>
|
||||
<v-icon small>mdi-content-save-edit-outline</v-icon>
|
||||
@@ -248,7 +249,7 @@
|
||||
icon
|
||||
small
|
||||
title="Edit"
|
||||
:disabled="loading"
|
||||
:disabled="sequencesLoading"
|
||||
@click="editItem(item, 'remarks_final')"
|
||||
>
|
||||
<v-icon small>mdi-square-edit-outline</v-icon>
|
||||
@@ -262,7 +263,7 @@
|
||||
class="markdown"
|
||||
autofocus
|
||||
placeholder="Enter your text here"
|
||||
:disabled="loading"
|
||||
:disabled="sequencesLoading"
|
||||
v-model="edit.value"
|
||||
>
|
||||
</v-textarea>
|
||||
@@ -566,7 +567,7 @@ export default {
|
||||
items: [],
|
||||
filter: "",
|
||||
options: {},
|
||||
num_rows: null,
|
||||
sequenceCount: null,
|
||||
activeItem: null,
|
||||
edit: null, // {sequence, key, value}
|
||||
queuedReload: false,
|
||||
@@ -593,17 +594,22 @@ export default {
|
||||
return this.queuedItems.find(i => i.payload.sequence == this.contextMenuItem.sequence);
|
||||
},
|
||||
|
||||
...mapGetters(['user', 'writeaccess', 'loading', 'serverEvent'])
|
||||
...mapGetters(['user', 'writeaccess', 'sequencesLoading', 'sequences'])
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
options: {
|
||||
handler () {
|
||||
this.getSequences();
|
||||
this.fetchSequences();
|
||||
},
|
||||
deep: true
|
||||
},
|
||||
|
||||
async sequences () {
|
||||
await this.fetchSequences();
|
||||
},
|
||||
|
||||
async edit (newVal, oldVal) {
|
||||
if (newVal === null && oldVal !== null) {
|
||||
const item = this.items.find(i => i.sequence == oldVal.sequence);
|
||||
@@ -617,39 +623,9 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
async serverEvent (event) {
|
||||
const subscriptions = ["raw_lines", "final_lines", "final_shots"];
|
||||
if (subscriptions.includes(event.channel) && event.payload.pid == this.$route.params.project) {
|
||||
if (!this.loading && !this.queuedReload) {
|
||||
// Do not force a non-cached response if refreshing as a result
|
||||
// of an event notification. We will assume that the server has
|
||||
// already had time to update the cache by the time our request
|
||||
// gets back to it.
|
||||
this.getSequences();
|
||||
} else {
|
||||
this.queuedReload = true;
|
||||
}
|
||||
} else if (event.channel == "queue_items") {
|
||||
const project =
|
||||
event.payload?.project ??
|
||||
event.payload?.new?.payload?.project ??
|
||||
event.payload?.old?.payload?.project;
|
||||
|
||||
if (project == this.$route.params.project) {
|
||||
this.getQueuedItems();
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
queuedReload (newVal, oldVal) {
|
||||
if (newVal && !oldVal && !this.loading) {
|
||||
this.getSequences();
|
||||
}
|
||||
},
|
||||
|
||||
loading (newVal, oldVal) {
|
||||
if (!newVal && oldVal && this.queuedReload) {
|
||||
this.getSequences();
|
||||
filter (newVal, oldVal) {
|
||||
if (newVal?.toLowerCase() != oldVal?.toLowerCase()) {
|
||||
this.fetchSequences();
|
||||
}
|
||||
},
|
||||
|
||||
@@ -818,19 +794,14 @@ export default {
|
||||
this.num_rows = projectInfo.sequences;
|
||||
},
|
||||
|
||||
async getSequences () {
|
||||
|
||||
const query = new URLSearchParams(this.options);
|
||||
query.set("filter", this.filter);
|
||||
query.set("files", true);
|
||||
if (this.options.itemsPerPage < 0) {
|
||||
query.delete("itemsPerPage");
|
||||
}
|
||||
const url = `/project/${this.$route.params.project}/sequence?${query.toString()}`;
|
||||
|
||||
this.queuedReload = false;
|
||||
this.items = await this.api([url]) || [];
|
||||
|
||||
async fetchSequences (opts = {}) {
|
||||
const options = {
|
||||
text: this.filter,
|
||||
...this.options
|
||||
};
|
||||
const res = await this.getSequences([this.$route.params.project, options]);
|
||||
this.items = res.sequences;
|
||||
this.sequenceCount = res.count;
|
||||
},
|
||||
|
||||
async getQueuedItems () {
|
||||
@@ -878,11 +849,11 @@ export default {
|
||||
return false;
|
||||
},
|
||||
|
||||
...mapActions(["api", "showSnack"])
|
||||
...mapActions(["api", "showSnack", "getSequences"])
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.getSequences();
|
||||
this.fetchSequences();
|
||||
this.getNumLines();
|
||||
this.getQueuedItems();
|
||||
}
|
||||
|
||||
@@ -100,8 +100,8 @@ app.map({
|
||||
get: [ mw.project.summary.get ],
|
||||
},
|
||||
'/project/:project/configuration': {
|
||||
get: [ mw.auth.access.write, mw.project.configuration.get ], // Get project configuration
|
||||
patch: [ mw.auth.access.write, mw.project.configuration.patch ], // Modify project configuration
|
||||
get: [ mw.project.configuration.get ], // Get project configuration
|
||||
patch: [ mw.auth.access.admin, mw.project.configuration.patch ], // Modify project configuration
|
||||
},
|
||||
|
||||
/*
|
||||
@@ -109,19 +109,19 @@ app.map({
|
||||
*/
|
||||
|
||||
'/project/:project/gis': {
|
||||
get: [ mw.gis.project.bbox ]
|
||||
get: [ mw.etag.noSave, mw.gis.project.bbox ]
|
||||
},
|
||||
'/project/:project/gis/preplot': {
|
||||
get: [ mw.gis.project.preplot ]
|
||||
get: [ mw.etag.noSave, mw.gis.project.preplot ]
|
||||
},
|
||||
'/project/:project/gis/preplot/:featuretype(line|point)': {
|
||||
get: [ mw.gis.project.preplot ]
|
||||
get: [ mw.etag.noSave, mw.gis.project.preplot ]
|
||||
},
|
||||
'/project/:project/gis/raw/:featuretype(line|point)': {
|
||||
get: [ mw.gis.project.raw ]
|
||||
get: [ mw.etag.noSave, mw.gis.project.raw ]
|
||||
},
|
||||
'/project/:project/gis/final/:featuretype(line|point)': {
|
||||
get: [ mw.gis.project.final ]
|
||||
get: [ mw.etag.noSave, mw.gis.project.final ]
|
||||
},
|
||||
'/project/:project/gis/layer': {
|
||||
get: [ mw.etag.noSave, mw.gis.project.layer.get ]
|
||||
@@ -181,6 +181,9 @@ app.map({
|
||||
post: [ mw.auth.access.write, mw.event.post ],
|
||||
put: [ mw.auth.access.write, mw.event.put ],
|
||||
delete: [ mw.auth.access.write, mw.event.delete ],
|
||||
'changes/:since': {
|
||||
get: [ mw.event.changes ]
|
||||
},
|
||||
// TODO Rename -/:sequence → sequence/:sequence
|
||||
'-/:sequence/': { // NOTE: We need to avoid conflict with the next endpoint ☹
|
||||
get: [ mw.event.sequence.get ],
|
||||
@@ -200,25 +203,25 @@ app.map({
|
||||
'/project/:project/qc': {
|
||||
'/results': {
|
||||
// Get all QC results for :project
|
||||
get: [ mw.qc.results.get ],
|
||||
get: [ mw.etag.noSave, mw.qc.results.get ],
|
||||
|
||||
// Delete all QC results for :project
|
||||
delete: [ mw.auth.access.write, mw.qc.results.delete ],
|
||||
delete: [ mw.etag.noSave, mw.auth.access.write, mw.qc.results.delete ],
|
||||
|
||||
'/accept': {
|
||||
post: [ mw.auth.access.write, mw.qc.results.accept ]
|
||||
post: [ mw.etag.noSave, mw.auth.access.write, mw.qc.results.accept ]
|
||||
},
|
||||
|
||||
'/unaccept': {
|
||||
post: [ mw.auth.access.write, mw.qc.results.unaccept ]
|
||||
post: [ mw.etag.noSave, mw.auth.access.write, mw.qc.results.unaccept ]
|
||||
},
|
||||
|
||||
'/sequence/:sequence': {
|
||||
// Get QC results for :project, :sequence
|
||||
get: [ mw.qc.results.get ],
|
||||
get: [ mw.etag.noSave, mw.qc.results.get ],
|
||||
|
||||
// Delete QC results for :project, :sequence
|
||||
delete: [ mw.auth.access.write, mw.qc.results.delete ]
|
||||
delete: [ mw.etag.noSave, mw.auth.access.write, mw.qc.results.delete ]
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -268,9 +271,9 @@ app.map({
|
||||
get: [ mw.auth.access.write, mw.etag.noSave, mw.files.get ]
|
||||
},
|
||||
'/navdata/': {
|
||||
get: [ mw.navdata.get ],
|
||||
get: [ mw.etag.noSave, mw.navdata.get ],
|
||||
'gis/:featuretype(line|point)': {
|
||||
get: [ mw.gis.navdata.get ]
|
||||
get: [ mw.etag.noSave, mw.gis.navdata.get ]
|
||||
}
|
||||
},
|
||||
'/info/': {
|
||||
|
||||
@@ -33,7 +33,9 @@ function saveResponse (res) {
|
||||
const cache = getCache(res);
|
||||
const req = res.req;
|
||||
console.log(`Saving ETag: ${req.method} ${req.url} → ${etag}`);
|
||||
cache[req.url] = {etag, headers: res.getHeaders()};
|
||||
const headers = structuredClone(res.getHeaders());
|
||||
delete headers["set-cookie"];
|
||||
cache[req.url] = {etag, headers};
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -43,15 +43,26 @@ const rels = [
|
||||
matches: [ ],
|
||||
callback (url, data) {
|
||||
if (data.payload?.table == "info") {
|
||||
const pid = data.payload?.pid;
|
||||
const key = (data.payload?.new ?? data.payload?.old)?.key;
|
||||
|
||||
const rx = /^\/project\/([^\/]+)\/info\/([^\/?]+)[\/?]?/;
|
||||
const match = url.match(rx);
|
||||
if (match) {
|
||||
if (match[1] == data.payload.pid) {
|
||||
if (match[1] == pid) {
|
||||
if (match[2] == data.payload?.old?.key || match[2] == data.payload?.new?.key) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (key == "plan") {
|
||||
const rx = /^\/project\/([^\/]+)\/plan[\/?]?/;
|
||||
const match = url.match(rx);
|
||||
if (match) {
|
||||
return match[1] == pid;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
14
lib/www/server/api/middleware/event/changes.js
Normal file
14
lib/www/server/api/middleware/event/changes.js
Normal file
@@ -0,0 +1,14 @@
|
||||
|
||||
const { event } = require('../../../lib/db');
|
||||
|
||||
const json = async function (req, res, next) {
|
||||
try {
|
||||
const response = await event.changes(req.params.project, req.params.since, req.query);
|
||||
res.status(200).send(response);
|
||||
next();
|
||||
} catch (err) {
|
||||
next(err);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = json;
|
||||
@@ -6,5 +6,6 @@ module.exports = {
|
||||
post: require('./post'),
|
||||
put: require('./put'),
|
||||
patch: require('./patch'),
|
||||
delete: require('./delete')
|
||||
delete: require('./delete'),
|
||||
changes: require('./changes')
|
||||
}
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
|
||||
const { plan } = require('../../../../lib/db');
|
||||
const { plan, info } = require('../../../../lib/db');
|
||||
|
||||
const json = async function (req, res, next) {
|
||||
try {
|
||||
const response = await plan.list(req.params.project, req.query);
|
||||
const sequences = await plan.list(req.params.project, req.query) ?? [];
|
||||
const remarks = await info.get(req.params.project, "plan/remarks", req.query, req.user.role) ?? null;
|
||||
const response = {
|
||||
remarks,
|
||||
sequences
|
||||
};
|
||||
res.status(200).send(response);
|
||||
next();
|
||||
} catch (err) {
|
||||
|
||||
@@ -9,105 +9,16 @@ const { ALERT, ERROR, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
* the last shot and first shot of the previous and current dates, respectively.
|
||||
*/
|
||||
class DetectFDSP {
|
||||
/* Data may come much faster than we can process it, so we put it
|
||||
* in a queue and process it at our own pace.
|
||||
*
|
||||
* The run() method fills the queue with the necessary data and then
|
||||
* calls processQueue().
|
||||
*
|
||||
* The processQueue() method looks at the first two elements in
|
||||
* the queue and processes them if they are not already being taken
|
||||
* care of by a previous processQueue() call – this will happen when
|
||||
* data is coming in faster than it can be processed.
|
||||
*
|
||||
* If the processQueue() call is the first to see the two bottommost
|
||||
* two elements, it will process them and, when finished, it will set
|
||||
* the `isPending` flag of the bottommost element to `false`, thus
|
||||
* letting the next call know that it has work to do.
|
||||
*
|
||||
* If the queue was empty, run() will set the `isPending` flag of its
|
||||
* first element to a falsy value, thus bootstrapping the process.
|
||||
*/
|
||||
static MAX_QUEUE_SIZE = 125000;
|
||||
|
||||
queue = [];
|
||||
author = `*${this.constructor.name}*`;
|
||||
prev = null;
|
||||
|
||||
async processQueue () {
|
||||
DEBUG("Queue length", this.queue.length)
|
||||
while (this.queue.length > 1) {
|
||||
|
||||
if (this.queue[0].isPending) {
|
||||
setImmediate(() => this.processQueue());
|
||||
return;
|
||||
}
|
||||
|
||||
const prev = this.queue.shift();
|
||||
const cur = this.queue[0];
|
||||
|
||||
const sequence = Number(cur._sequence);
|
||||
|
||||
try {
|
||||
|
||||
if (prev.lineName == cur.lineName && prev._sequence == cur._sequence &&
|
||||
prev.lineStatus == "online" && cur.lineStatus == "online" && sequence) {
|
||||
|
||||
// DEBUG("Previous", prev);
|
||||
// DEBUG("Current", cur);
|
||||
|
||||
if (prev.time.substr(0, 10) != cur.time.substr(0, 10)) {
|
||||
// Possible a date change, but could also be a missing timestamp
|
||||
// or something else.
|
||||
|
||||
const ts0 = new Date(prev.time)
|
||||
const ts1 = new Date(cur.time);
|
||||
|
||||
if (!isNaN(ts0) && !isNaN(ts1) && ts0.getUTCDay() != ts1.getUTCDay()) {
|
||||
INFO("Sequence shot across midnight UTC detected", cur._sequence, cur.lineName);
|
||||
|
||||
const ldsp = {
|
||||
sequence: prev._sequence,
|
||||
point: prev._point,
|
||||
remarks: "Last shotpoint of the day",
|
||||
labels: ["LDSP", "Prod"],
|
||||
meta: {auto: true, insertedBy: this.constructor.name}
|
||||
};
|
||||
|
||||
const fdsp = {
|
||||
sequence: cur._sequence,
|
||||
point: cur._point,
|
||||
remarks: "First shotpoint of the day",
|
||||
labels: ["FDSP", "Prod"],
|
||||
meta: {auto: true, insertedBy: this.constructor.name}
|
||||
};
|
||||
|
||||
INFO("LDSP", ldsp);
|
||||
INFO("FDSP", fdsp);
|
||||
|
||||
const projectId = await schema2pid(prev._schema);
|
||||
|
||||
if (projectId) {
|
||||
await event.post(projectId, ldsp);
|
||||
await event.post(projectId, fdsp);
|
||||
} else {
|
||||
ERROR("projectId not found for", prev._schema);
|
||||
}
|
||||
} else {
|
||||
WARNING("False positive on these timestamps", prev.time, cur.time);
|
||||
WARNING("No events were created");
|
||||
}
|
||||
}
|
||||
}
|
||||
// Processing of this shot has already been completed.
|
||||
// The queue can now move forward.
|
||||
} catch (err) {
|
||||
ERROR(err);
|
||||
} finally {
|
||||
cur.isPending = false;
|
||||
}
|
||||
}
|
||||
constructor () {
|
||||
DEBUG(`${this.author} instantiated`);
|
||||
}
|
||||
|
||||
async run (data) {
|
||||
async run (data, ctx) {
|
||||
|
||||
if (!data || data.channel !== "realtime") {
|
||||
return;
|
||||
}
|
||||
@@ -116,27 +27,70 @@ class DetectFDSP {
|
||||
return;
|
||||
}
|
||||
|
||||
const meta = data.payload.new.meta;
|
||||
|
||||
if (this.queue.length < DetectFDSP.MAX_QUEUE_SIZE) {
|
||||
|
||||
const event = {
|
||||
isPending: this.queue.length,
|
||||
_schema: meta._schema,
|
||||
time: meta.time,
|
||||
lineStatus: meta.lineStatus,
|
||||
_sequence: meta._sequence,
|
||||
_point: meta._point,
|
||||
lineName: meta.lineName
|
||||
};
|
||||
this.queue.push(event);
|
||||
// DEBUG("EVENT", event);
|
||||
|
||||
} else {
|
||||
ALERT("Queue full at", this.queue.length);
|
||||
if (!this.prev) {
|
||||
DEBUG("Initialising `prev`");
|
||||
this.prev = data;
|
||||
return;
|
||||
}
|
||||
|
||||
this.processQueue();
|
||||
try {
|
||||
DEBUG("Running");
|
||||
const cur = data;
|
||||
const sequence = Number(cur._sequence);
|
||||
|
||||
if (this.prev.lineName == cur.lineName && this.prev._sequence == cur._sequence &&
|
||||
this.prev.lineStatus == "online" && cur.lineStatus == "online" && sequence) {
|
||||
|
||||
if (this.prev.time.substr(0, 10) != cur.time.substr(0, 10)) {
|
||||
// Possibly a date change, but could also be a missing timestamp
|
||||
// or something else.
|
||||
|
||||
const ts0 = new Date(this.prev.time)
|
||||
const ts1 = new Date(cur.time);
|
||||
|
||||
if (!isNaN(ts0) && !isNaN(ts1) && ts0.getUTCDay() != ts1.getUTCDay()) {
|
||||
INFO("Sequence shot across midnight UTC detected", cur._sequence, cur.lineName);
|
||||
|
||||
const ldsp = {
|
||||
sequence: this.prev._sequence,
|
||||
point: this.prev._point,
|
||||
remarks: "Last shotpoint of the day",
|
||||
labels: ["LDSP", "Prod"],
|
||||
meta: {auto: true, author: `*${this.constructor.name}*`}
|
||||
};
|
||||
|
||||
const fdsp = {
|
||||
sequence: cur._sequence,
|
||||
point: cur._point,
|
||||
remarks: "First shotpoint of the day",
|
||||
labels: ["FDSP", "Prod"],
|
||||
meta: {auto: true, author: `*${this.constructor.name}*`}
|
||||
};
|
||||
|
||||
INFO("LDSP", ldsp);
|
||||
INFO("FDSP", fdsp);
|
||||
|
||||
const projectId = await schema2pid(this.prev._schema);
|
||||
|
||||
if (projectId) {
|
||||
await event.post(projectId, ldsp);
|
||||
await event.post(projectId, fdsp);
|
||||
} else {
|
||||
ERROR("projectId not found for", this.prev._schema);
|
||||
}
|
||||
} else {
|
||||
WARNING("False positive on these timestamps", this.prev.time, cur.time);
|
||||
WARNING("No events were created");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
} catch (err) {
|
||||
DEBUG(`${this.author} error`, err);
|
||||
throw err;
|
||||
} finally {
|
||||
this.prev = data;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,60 @@
|
||||
const project = require('../../lib/db/project');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
class DetectProjectConfigurationChange {
|
||||
|
||||
author = `*${this.constructor.name}*`;
|
||||
|
||||
constructor (ctx) {
|
||||
DEBUG(`${this.author} instantiated`);
|
||||
|
||||
// Grab project configurations.
|
||||
// NOTE that this will run asynchronously
|
||||
this.run({channel: "project"}, ctx);
|
||||
}
|
||||
|
||||
async run (data, ctx) {
|
||||
|
||||
if (!data || data.channel !== "project") {
|
||||
return;
|
||||
}
|
||||
|
||||
// Project notifications, as of this writing, most likely
|
||||
// do not carry payloads as those exceed the notification
|
||||
// size limit.
|
||||
// For our purposes, we do not care as we just re-read all
|
||||
// the configurations for all non-archived projects.
|
||||
|
||||
try {
|
||||
DEBUG("Project configuration change detected")
|
||||
|
||||
const projects = await project.get();
|
||||
|
||||
const _ctx_data = {};
|
||||
for (let pid of projects.map(i => i.pid)) {
|
||||
DEBUG("Retrieving configuration for", pid);
|
||||
const cfg = await project.configuration.get(pid);
|
||||
if (cfg?.archived === true) {
|
||||
DEBUG(pid, "is archived. Ignoring");
|
||||
continue;
|
||||
}
|
||||
|
||||
DEBUG("Saving configuration for", pid);
|
||||
_ctx_data[pid] = cfg;
|
||||
}
|
||||
|
||||
if (! ("projects" in ctx)) {
|
||||
ctx.projects = {};
|
||||
}
|
||||
|
||||
ctx.projects.configuration = _ctx_data;
|
||||
DEBUG("Committed project configuration to ctx.projects.configuration");
|
||||
|
||||
} catch (err) {
|
||||
DEBUG(`${this.author} error`, err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DetectProjectConfigurationChange;
|
||||
80
lib/www/server/events/handlers/detect-soft-start.js
Normal file
80
lib/www/server/events/handlers/detect-soft-start.js
Normal file
@@ -0,0 +1,80 @@
|
||||
const { schema2pid } = require('../../lib/db/connection');
|
||||
const { event } = require('../../lib/db');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
class DetectSoftStart {
|
||||
|
||||
author = `*${this.constructor.name}*`;
|
||||
prev = null;
|
||||
|
||||
constructor () {
|
||||
DEBUG(`${this.author} instantiated`);
|
||||
}
|
||||
|
||||
async run (data, ctx) {
|
||||
|
||||
if (!data || data.channel !== "realtime") {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!(data.payload && data.payload.new && data.payload.new.meta)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.prev) {
|
||||
DEBUG("Initialising `prev`");
|
||||
this.prev = data;
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
DEBUG("Running");
|
||||
const cur = data?.payload?.new?.meta;
|
||||
const prev = this.prev?.payload?.new?.meta;
|
||||
// DEBUG("%j", prev);
|
||||
// DEBUG("%j", cur);
|
||||
DEBUG("cur.num_guns: %d\ncur.num_active: %d\nprv.num_active: %d\ntest passed: %j", cur.num_guns, cur.num_active, prev.num_active, cur.num_active >= 1 && !prev.num_active && cur.num_active < cur.num_guns);
|
||||
|
||||
|
||||
if (cur.num_active >= 1 && !prev.num_active && cur.num_active < cur.num_guns) {
|
||||
INFO("Soft start detected @", cur.tstamp);
|
||||
|
||||
// FIXME Shouldn't need to use schema2pid as pid already present in payload.
|
||||
const projectId = await schema2pid(cur._schema ?? prev._schema);
|
||||
|
||||
// TODO: Try and grab the corresponding comment from the configuration?
|
||||
const payload = {
|
||||
tstamp: cur.tstamp,
|
||||
remarks: "Soft start",
|
||||
labels: [ "Daily", "Guns", "Prod" ],
|
||||
meta: {auto: true, author: `*${this.constructor.name}*`}
|
||||
};
|
||||
DEBUG("Posting event", projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
|
||||
} else if (cur.num_active == cur.num_guns && prev.num_active < cur.num_active) {
|
||||
INFO("Full volume detected @", cur.tstamp);
|
||||
|
||||
const projectId = await schema2pid(cur._schema ?? prev._schema);
|
||||
|
||||
// TODO: Try and grab the corresponding comment from the configuration?
|
||||
const payload = {
|
||||
tstamp: cur.tstamp,
|
||||
remarks: "Full volume",
|
||||
labels: [ "Daily", "Guns", "Prod" ],
|
||||
meta: {auto: true, author: `*${this.constructor.name}*`}
|
||||
};
|
||||
DEBUG("Posting event", projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
DEBUG(`${this.author} error`, err);
|
||||
throw err;
|
||||
} finally {
|
||||
this.prev = data;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DetectSoftStart;
|
||||
@@ -1,114 +1,17 @@
|
||||
const { schema2pid } = require('../../lib/db/connection');
|
||||
const { event } = require('../../lib/db');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
class DetectSOLEOL {
|
||||
/* Data may come much faster than we can process it, so we put it
|
||||
* in a queue and process it at our own pace.
|
||||
*
|
||||
* The run() method fills the queue with the necessary data and then
|
||||
* calls processQueue().
|
||||
*
|
||||
* The processQueue() method looks takes the first two elements in
|
||||
* the queue and processes them if they are not already being taken
|
||||
* care of by a previous processQueue() call – this will happen when
|
||||
* data is coming in faster than it can be processed.
|
||||
*
|
||||
* If the processQueue() call is the first to see the two bottommost
|
||||
* two elements, it will process them and, when finished, it will set
|
||||
* the `isPending` flag of the bottommost element to `false`, thus
|
||||
* letting the next call know that it has work to do.
|
||||
*
|
||||
* If the queue was empty, run() will set the `isPending` flag of its
|
||||
* first element to a falsy value, thus bootstrapping the process.
|
||||
*/
|
||||
static MAX_QUEUE_SIZE = 125000;
|
||||
|
||||
queue = [];
|
||||
author = `*${this.constructor.name}*`;
|
||||
prev = null;
|
||||
|
||||
async processQueue () {
|
||||
while (this.queue.length > 1) {
|
||||
if (this.queue[0].isPending) {
|
||||
setImmediate(() => this.processQueue());
|
||||
return;
|
||||
}
|
||||
|
||||
const prev = this.queue.shift();
|
||||
const cur = this.queue[0];
|
||||
|
||||
const sequence = Number(cur._sequence);
|
||||
|
||||
try {
|
||||
|
||||
if (prev.lineName == cur.lineName && prev._sequence == cur._sequence &&
|
||||
prev.lineStatus != "online" && cur.lineStatus == "online" && sequence) {
|
||||
// console.log("TRANSITION TO ONLINE", prev, cur);
|
||||
|
||||
// Check if there are already FSP, FGSP events for this sequence
|
||||
const projectId = await schema2pid(cur._schema);
|
||||
const sequenceEvents = await event.list(projectId, {sequence});
|
||||
|
||||
const labels = ["FSP", "FGSP"].filter(l => !sequenceEvents.find(i => i.labels.includes(l)));
|
||||
|
||||
if (labels.includes("FSP")) {
|
||||
// At this point labels contains either FSP only or FSP + FGSP,
|
||||
// depending on whether a FGSP event has already been entered.
|
||||
|
||||
const remarks = `SEQ ${cur._sequence}, SOL ${cur.lineName}, BSP: ${(cur.speed*3.6/1.852).toFixed(1)} kt, Water depth: ${Number(cur.waterDepth).toFixed(0)} m.`;
|
||||
const payload = {
|
||||
type: "sequence",
|
||||
sequence,
|
||||
point: cur._point,
|
||||
remarks,
|
||||
labels
|
||||
}
|
||||
|
||||
// console.log(projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
} else {
|
||||
// A first shot point has been already entered in the log,
|
||||
// so we have nothing to do here.
|
||||
}
|
||||
} else if (prev.lineStatus == "online" && cur.lineStatus != "online") {
|
||||
// console.log("TRANSITION TO OFFLINE", prev, cur);
|
||||
|
||||
// Check if there are already LSP, LGSP events for this sequence
|
||||
const projectId = await schema2pid(prev._schema);
|
||||
const sequenceEvents = await event.list(projectId, {sequence});
|
||||
|
||||
const labels = ["LSP", "LGSP"].filter(l => !sequenceEvents.find(i => i.labels.includes(l)));
|
||||
|
||||
if (labels.includes("LSP")) {
|
||||
// At this point labels contains either LSP only or LSP + LGSP,
|
||||
// depending on whether a LGSP event has already been entered.
|
||||
|
||||
const remarks = `SEQ ${prev._sequence}, EOL ${prev.lineName}, BSP: ${(prev.speed*3.6/1.852).toFixed(1)} kt, Water depth: ${Number(prev.waterDepth).toFixed(0)} m.`;
|
||||
const payload = {
|
||||
type: "sequence",
|
||||
sequence,
|
||||
point: prev._point,
|
||||
remarks,
|
||||
labels
|
||||
}
|
||||
|
||||
// console.log(projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
} else {
|
||||
// A first shot point has been already entered in the log,
|
||||
// so we have nothing to do here.
|
||||
}
|
||||
}
|
||||
// Processing of this shot has already been completed.
|
||||
// The queue can now move forward.
|
||||
} catch (err) {
|
||||
console.error("DetectSOLEOL Error")
|
||||
console.log(err);
|
||||
} finally {
|
||||
cur.isPending = false;
|
||||
}
|
||||
}
|
||||
constructor () {
|
||||
DEBUG(`${this.author} instantiated`);
|
||||
}
|
||||
|
||||
async run (data) {
|
||||
async run (data, ctx) {
|
||||
if (!data || data.channel !== "realtime") {
|
||||
return;
|
||||
}
|
||||
@@ -117,30 +20,69 @@ class DetectSOLEOL {
|
||||
return;
|
||||
}
|
||||
|
||||
const meta = data.payload.new.meta;
|
||||
|
||||
if (this.queue.length < DetectSOLEOL.MAX_QUEUE_SIZE) {
|
||||
|
||||
this.queue.push({
|
||||
isPending: this.queue.length,
|
||||
_schema: meta._schema,
|
||||
time: meta.time,
|
||||
shot: meta.shot,
|
||||
lineStatus: meta.lineStatus,
|
||||
_sequence: meta._sequence,
|
||||
_point: meta._point,
|
||||
lineName: meta.lineName,
|
||||
speed: meta.speed,
|
||||
waterDepth: meta.waterDepth
|
||||
});
|
||||
|
||||
} else {
|
||||
// FIXME Change to alert
|
||||
console.error("DetectSOLEOL queue full at", this.queue.length);
|
||||
if (!this.prev) {
|
||||
DEBUG("Initialising `prev`");
|
||||
this.prev = data;
|
||||
return;
|
||||
}
|
||||
|
||||
this.processQueue();
|
||||
try {
|
||||
DEBUG("Running");
|
||||
// DEBUG("%j", data);
|
||||
const cur = data?.payload?.new?.meta;
|
||||
const prev = this.prev?.payload?.new?.meta;
|
||||
const sequence = Number(cur._sequence);
|
||||
|
||||
// DEBUG("%j", prev);
|
||||
// DEBUG("%j", cur);
|
||||
DEBUG("prv.lineName: %s\ncur.lineName: %s\nprv._sequence: %s\ncur._sequence: %s\nprv.lineStatus: %s\ncur.lineStatus: %s", prev.lineName, cur.lineName, prev._sequence, cur._sequence, prev.lineStatus, cur.lineStatus);
|
||||
|
||||
if (prev.lineName == cur.lineName && prev._sequence == cur._sequence &&
|
||||
prev.lineStatus != "online" && cur.lineStatus == "online" && sequence) {
|
||||
INFO("Transition to ONLINE detected");
|
||||
|
||||
// We must use schema2pid because the pid may not have been
|
||||
// populated for this event.
|
||||
const projectId = await schema2pid(cur._schema ?? prev._schema);
|
||||
const labels = ["FSP", "FGSP"];
|
||||
const remarks = `SEQ ${cur._sequence}, SOL ${cur.lineName}, BSP: ${(cur.speed*3.6/1.852).toFixed(1)} kt, Water depth: ${Number(cur.waterDepth).toFixed(0)} m.`;
|
||||
const payload = {
|
||||
type: "sequence",
|
||||
sequence,
|
||||
point: cur._point,
|
||||
remarks,
|
||||
labels,
|
||||
meta: {auto: true, author: `*${this.constructor.name}*`}
|
||||
}
|
||||
INFO("Posting event", projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
} else if (prev.lineName == cur.lineName && prev._sequence == cur._sequence &&
|
||||
prev.lineStatus == "online" && cur.lineStatus != "online" && sequence) {
|
||||
INFO("Transition to OFFLINE detected");
|
||||
|
||||
const projectId = await schema2pid(prev._schema ?? cur._schema);
|
||||
const labels = ["LSP", "LGSP"];
|
||||
const remarks = `SEQ ${cur._sequence}, EOL ${cur.lineName}, BSP: ${(cur.speed*3.6/1.852).toFixed(1)} kt, Water depth: ${Number(cur.waterDepth).toFixed(0)} m.`;
|
||||
const payload = {
|
||||
type: "sequence",
|
||||
sequence,
|
||||
point: cur._point,
|
||||
remarks,
|
||||
labels,
|
||||
meta: {auto: true, author: `*${this.constructor.name}*`}
|
||||
}
|
||||
INFO("Posting event", projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
DEBUG(`${this.author} error`, err);
|
||||
throw err;
|
||||
} finally {
|
||||
this.prev = data;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = DetectSOLEOL;
|
||||
|
||||
@@ -1,13 +1,44 @@
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
const Handlers = [
|
||||
require('./detect-project-configuration-change'),
|
||||
require('./detect-soleol'),
|
||||
require('./detect-soft-start'),
|
||||
require('./report-line-change-time'),
|
||||
require('./detect-fdsp')
|
||||
];
|
||||
|
||||
function init () {
|
||||
return Handlers.map(Handler => new Handler());
|
||||
function init (ctx) {
|
||||
|
||||
const instances = Handlers.map(Handler => new Handler(ctx));
|
||||
|
||||
function prepare (data, ctx) {
|
||||
const promises = [];
|
||||
for (let instance of instances) {
|
||||
const promise = new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
DEBUG("Run", instance.author);
|
||||
const result = await instance.run(data, ctx);
|
||||
DEBUG("%s result: %O", instance.author, result);
|
||||
resolve(result);
|
||||
} catch (err) {
|
||||
ERROR("%s error:\n%O", instance.author, err);
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
promises.push(promise);
|
||||
}
|
||||
return promises;
|
||||
}
|
||||
|
||||
function despatch (data, ctx) {
|
||||
return Promise.allSettled(prepare(data, ctx));
|
||||
}
|
||||
|
||||
return { instances, prepare, despatch };
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Handlers,
|
||||
init
|
||||
}
|
||||
};
|
||||
|
||||
231
lib/www/server/events/handlers/report-line-change-time.js
Normal file
231
lib/www/server/events/handlers/report-line-change-time.js
Normal file
@@ -0,0 +1,231 @@
|
||||
const { event, project } = require('../../lib/db');
|
||||
const { withinValidity } = require('../../lib/utils/ranges');
|
||||
const unique = require('../../lib/utils/unique');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
class ReportLineChangeTime {
|
||||
|
||||
author = `*${this.constructor.name}*`;
|
||||
|
||||
constructor () {
|
||||
DEBUG(`${this.author} instantiated`);
|
||||
}
|
||||
|
||||
async run (data, ctx) {
|
||||
|
||||
if (!data || data.channel !== "event") {
|
||||
return;
|
||||
}
|
||||
|
||||
const n = data.payload.new;
|
||||
const o = data.payload.old;
|
||||
|
||||
if (!(n?.labels) && !(o?.labels)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!n?.labels?.includes("FGSP") && !o?.labels?.includes("FGSP") &&
|
||||
!n?.labels?.includes("LGSP") && !o?.labels?.includes("LGSP")) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
DEBUG("Running");
|
||||
const cur = data;
|
||||
const projectId = cur?.payload?.pid;
|
||||
const forward = (cur?.payload?.old?.labels?.includes("LGSP") || cur?.payload?.new?.labels?.includes("LGSP"));
|
||||
DEBUG("%j", cur);
|
||||
|
||||
if (!projectId) {
|
||||
throw {message: "No projectID found in event", cur};
|
||||
return;
|
||||
}
|
||||
|
||||
async function getLineChangeTime (data, forward = false) {
|
||||
if (forward) {
|
||||
const ospEvents = await event.list(projectId, {label: "FGSP"});
|
||||
// DEBUG("ospEvents", ospEvents);
|
||||
const osp = ospEvents.filter(i => i.tstamp > data.tstamp).pop();
|
||||
DEBUG("fsp", osp);
|
||||
// DEBUG("data", data);
|
||||
|
||||
if (osp) {
|
||||
DEBUG("lineChangeTime", osp.tstamp - data.tstamp);
|
||||
return { lineChangeTime: osp.tstamp - data.tstamp, osp };
|
||||
}
|
||||
} else {
|
||||
const ospEvents = await event.list(projectId, {label: "LGSP"});
|
||||
// DEBUG("ospEvents", ospEvents);
|
||||
const osp = ospEvents.filter(i => i.tstamp < data.tstamp).shift();
|
||||
DEBUG("lsp", osp);
|
||||
// DEBUG("data", data);
|
||||
|
||||
if (osp) {
|
||||
DEBUG("lineChangeTime", data.tstamp - osp.tstamp);
|
||||
return { lineChangeTime: data.tstamp - osp.tstamp, osp };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function parseInterval (dt) {
|
||||
const daySeconds = (dt/1000) % 86400;
|
||||
const d = Math.floor((dt/1000) / 86400);
|
||||
const dateObject = new Date(null);
|
||||
dateObject.setSeconds(daySeconds);
|
||||
const [ h, m, s ] = dateObject.toISOString().slice(11, 19).split(":").map(Number);
|
||||
return {d, h, m, s};
|
||||
}
|
||||
|
||||
function formatInterval (i) {
|
||||
let str = "";
|
||||
for (let [k, v] of Object.entries(i)) {
|
||||
if (v) {
|
||||
str += " " + v + " " + k;
|
||||
}
|
||||
}
|
||||
return str.trim();
|
||||
}
|
||||
|
||||
const deleteStaleEvents = async (seq) => {
|
||||
if (seq) {
|
||||
DEBUG("Will delete lct events related to sequence(s)", seq);
|
||||
|
||||
const jpq = `$."${this.author}"`;
|
||||
|
||||
const opts = {jpq};
|
||||
|
||||
if (Array.isArray(seq)) {
|
||||
opts.sequences = unique(seq).filter(i => !!i);
|
||||
} else {
|
||||
opts.sequence = seq;
|
||||
}
|
||||
|
||||
const staleEvents = await event.list(projectId, opts);
|
||||
DEBUG(staleEvents.length ?? 0, "events to delete");
|
||||
for (let staleEvent of staleEvents) {
|
||||
DEBUG(`Deleting event id ${staleEvent.id} (seq = ${staleEvent.sequence}, point = ${staleEvent.point})`);
|
||||
await event.del(projectId, staleEvent.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const createLineChangeTimeEvents = async (lineChangeTime, data, osp) => {
|
||||
|
||||
const events = [];
|
||||
const cfg = ctx?.projects?.configuration?.[projectId] ?? {};
|
||||
const nlcd = cfg?.production?.nominalLineChangeDuration * 60*1000; // m → ms
|
||||
DEBUG("nlcd", nlcd);
|
||||
if (nlcd && lineChangeTime > nlcd) {
|
||||
const excess = lineChangeTime-nlcd;
|
||||
const excessString = formatInterval(parseInterval(excess));
|
||||
DEBUG("excess", excess, excessString);
|
||||
|
||||
// ref: The later of the two events
|
||||
const ref = forward ? osp : data;
|
||||
const payload = {
|
||||
// tstamp: new Date(ref.tstamp-1),
|
||||
sequence: ref.sequence,
|
||||
point: ref.point,
|
||||
remarks: `_Nominal line change duration exceeded by ${excessString}_`,
|
||||
labels: [ "Nav", "Prod" ],
|
||||
meta: {
|
||||
auto: true,
|
||||
author: this.author,
|
||||
[this.author]: {
|
||||
parents: [
|
||||
data.id,
|
||||
osp.id
|
||||
],
|
||||
type: "excess",
|
||||
value: excess
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
events.push(payload);
|
||||
DEBUG("Created line change duration exceeded event", projectId, payload);
|
||||
}
|
||||
|
||||
|
||||
const lctString = formatInterval(parseInterval(lineChangeTime));
|
||||
|
||||
// ref: The later of the two events
|
||||
const ref = forward ? osp : data;
|
||||
const payload = {
|
||||
// tstamp: new Date(ref.tstamp-1),
|
||||
sequence: ref.sequence,
|
||||
point: ref.point,
|
||||
remarks: `Line change time: ${lctString}`,
|
||||
labels: [ "Nav", "Prod" ],
|
||||
meta: {
|
||||
auto: true,
|
||||
author: this.author,
|
||||
[this.author]: {
|
||||
parents: [
|
||||
data.id,
|
||||
osp.id
|
||||
],
|
||||
type: "lineChangeTime",
|
||||
value: lineChangeTime
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
events.push(payload);
|
||||
DEBUG("Created line change duration event", projectId, payload);
|
||||
|
||||
return events;
|
||||
}
|
||||
|
||||
const maybePostEvent = async (projectId, payload) => {
|
||||
DEBUG("Posting event", projectId, payload);
|
||||
await event.post(projectId, payload);
|
||||
}
|
||||
|
||||
|
||||
await deleteStaleEvents([cur.old?.sequence, cur.new?.sequence]);
|
||||
|
||||
if (cur?.payload?.operation == "INSERT") {
|
||||
// NOTE: UPDATE on the event_log view translates to one UPDATE plus one INSERT
|
||||
// on event_log_full, so we don't need to worry about UPDATE here.
|
||||
const data = n;
|
||||
DEBUG("INSERT seen: will add lct events related to ", data.id);
|
||||
|
||||
if (withinValidity(data.validity)) {
|
||||
DEBUG("Event within validity period", data.validity, new Date());
|
||||
|
||||
data.tstamp = new Date(data.tstamp);
|
||||
const { lineChangeTime, osp } = await getLineChangeTime(data, forward);
|
||||
|
||||
if (lineChangeTime) {
|
||||
|
||||
const events = await createLineChangeTimeEvents(lineChangeTime, data, osp);
|
||||
|
||||
if (events?.length) {
|
||||
DEBUG("Deleting other events for sequence", events[0].sequence);
|
||||
await deleteStaleEvents(events[0].sequence);
|
||||
}
|
||||
|
||||
for (let payload of events) {
|
||||
await maybePostEvent(projectId, payload);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
DEBUG("Event outside of validity range", data.validity, "lct events not inserted");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
} catch (err) {
|
||||
ERROR(`${this.author} error`, err);
|
||||
throw err;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ReportLineChangeTime;
|
||||
@@ -1,23 +1,25 @@
|
||||
const { listen } = require('../ws/db');
|
||||
const { listen } = require('../lib/db/notify');
|
||||
const channels = require('../lib/db/channels');
|
||||
const handlers = require('./handlers').init();
|
||||
const handlers = require('./handlers');
|
||||
const { ActionsQueue } = require('../lib/queue');
|
||||
const { ERROR, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
function start () {
|
||||
listen(channels, async function (data) {
|
||||
|
||||
const queue = new ActionsQueue();
|
||||
const ctx = {}; // Context object
|
||||
|
||||
const { prepare, despatch } = handlers.init(ctx);
|
||||
|
||||
listen(channels, function (data) {
|
||||
DEBUG("Incoming data", data);
|
||||
for (const handler of handlers) {
|
||||
// NOTE: We are intentionally passing the same instance
|
||||
// of the data to every handler. This means that earlier
|
||||
// handlers could, in principle, modify the data to be
|
||||
// consumed by latter ones, provided that they are
|
||||
// synchronous (as otherwise, the completion order is
|
||||
// undefined).
|
||||
await handler.run(data);
|
||||
}
|
||||
|
||||
// We don't bother awaiting
|
||||
queue.enqueue(() => despatch(data, ctx));
|
||||
DEBUG("Queue size", queue.length());
|
||||
});
|
||||
|
||||
INFO("Events manager started.", handlers.length, "active handlers");
|
||||
INFO("Events manager started");
|
||||
}
|
||||
|
||||
module.exports = { start }
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/node
|
||||
|
||||
const { INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
const { ERROR, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
async function main () {
|
||||
// Check that we're running against the correct database version
|
||||
@@ -8,23 +8,55 @@ async function main () {
|
||||
INFO("Running version", await version.describe());
|
||||
version.compatible()
|
||||
.then( (versions) => {
|
||||
const api = require('./api');
|
||||
const ws = require('./ws');
|
||||
try {
|
||||
const api = require('./api');
|
||||
const ws = require('./ws');
|
||||
const periodicTasks = require('./periodic-tasks').init();
|
||||
|
||||
const { fork } = require('child_process');
|
||||
const { fork } = require('child_process');
|
||||
|
||||
const port = process.env.HTTP_PORT || 3000;
|
||||
const host = process.env.HTTP_HOST || "127.0.0.1";
|
||||
const path = process.env.HTTP_PATH ?? "/api";
|
||||
const server = api.start(port, host, path);
|
||||
ws.start(server);
|
||||
const port = process.env.HTTP_PORT || 3000;
|
||||
const host = process.env.HTTP_HOST || "127.0.0.1";
|
||||
const path = process.env.HTTP_PATH ?? "/api";
|
||||
const server = api.start(port, host, path);
|
||||
ws.start(server);
|
||||
|
||||
const eventManagerPath = [__dirname, "events"].join("/");
|
||||
const eventManager = fork(eventManagerPath, /*{ stdio: 'ignore' }*/);
|
||||
INFO("Versions:", versions);
|
||||
|
||||
INFO("Versions:", versions);
|
||||
periodicTasks.start();
|
||||
|
||||
process.on('exit', () => eventManager.kill());
|
||||
const eventManagerPath = [__dirname, "events"].join("/");
|
||||
const eventManager = fork(eventManagerPath, /*{ stdio: 'ignore' }*/);
|
||||
|
||||
process.on("SIGINT", async () => {
|
||||
DEBUG("Interrupted (SIGINT)");
|
||||
eventManager.kill()
|
||||
await periodicTasks.cleanup();
|
||||
process.exit(0);
|
||||
})
|
||||
|
||||
process.on("SIGHUP", async () => {
|
||||
DEBUG("Stopping (SIGHUP)");
|
||||
eventManager.kill()
|
||||
await periodicTasks.cleanup();
|
||||
process.exit(0);
|
||||
})
|
||||
|
||||
process.on('beforeExit', async () => {
|
||||
DEBUG("Preparing to exit");
|
||||
eventManager.kill()
|
||||
await periodicTasks.cleanup();
|
||||
});
|
||||
|
||||
process.on('exit', async () => {
|
||||
DEBUG("Exiting");
|
||||
// eventManager.kill()
|
||||
// periodicTasks.cleanup();
|
||||
});
|
||||
} catch (err) {
|
||||
ERROR(err);
|
||||
process.exit(2);
|
||||
}
|
||||
})
|
||||
.catch( ({current, wanted, component}) => {
|
||||
console.error(`Fatal error: incompatible ${component} version ${current} (wanted: ${wanted})`);
|
||||
|
||||
61
lib/www/server/lib/db/event/changes.js
Normal file
61
lib/www/server/lib/db/event/changes.js
Normal file
@@ -0,0 +1,61 @@
|
||||
const { setSurvey } = require('../connection');
|
||||
const { replaceMarkers } = require('../../utils');
|
||||
|
||||
function parseValidity (row) {
|
||||
if (row.validity) {
|
||||
const rx = /^(.)("([\d :.+-]+)")?,("([\d :.+-]+)")?([\]\)])$/;
|
||||
const m = row.validity.match(rx);
|
||||
row.validity = [ m[1], m[3], m[5], m[6] ];
|
||||
}
|
||||
return row;
|
||||
}
|
||||
|
||||
function transform (row) {
|
||||
if (row.validity[2]) {
|
||||
return {
|
||||
uid: row.uid,
|
||||
id: row.id,
|
||||
is_deleted: true
|
||||
}
|
||||
} else {
|
||||
row.is_deleted = false;
|
||||
row.has_edits = row.id != row.uid;
|
||||
row.modified_on = row.validity[1];
|
||||
delete row.uid;
|
||||
delete row.validity;
|
||||
return row;
|
||||
}
|
||||
}
|
||||
|
||||
function unique (rows) {
|
||||
const o = {};
|
||||
rows.forEach(row => o[row.id] = row);
|
||||
return Object.values(o);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the event change history from a given epoch (ts0),
|
||||
* for all events.
|
||||
*/
|
||||
async function changes (projectId, ts0, opts = {}) {
|
||||
|
||||
if (!projectId || !ts0) {
|
||||
throw {status: 400, message: "Invalid request" };
|
||||
return;
|
||||
}
|
||||
|
||||
const client = await setSurvey(projectId);
|
||||
|
||||
const text = `
|
||||
SELECT *
|
||||
FROM event_log_changes($1);
|
||||
`;
|
||||
|
||||
const res = await client.query(text, [ts0]);
|
||||
client.release();
|
||||
return opts.unique
|
||||
? unique(res.rows.map(i => transform(replaceMarkers(parseValidity(i)))))
|
||||
: res.rows.map(i => transform(replaceMarkers(parseValidity(i))));
|
||||
}
|
||||
|
||||
module.exports = changes;
|
||||
@@ -5,5 +5,6 @@ module.exports = {
|
||||
post: require('./post'),
|
||||
put: require('./put'),
|
||||
patch: require('./patch'),
|
||||
del: require('./delete')
|
||||
del: require('./delete'),
|
||||
changes: require('./changes')
|
||||
}
|
||||
|
||||
@@ -10,25 +10,34 @@ async function list (projectId, opts = {}) {
|
||||
const offset = Math.abs((opts.page-1)*opts.itemsPerPage) || 0;
|
||||
const limit = Math.abs(Number(opts.itemsPerPage)) || null;
|
||||
|
||||
const filter = opts.sequence
|
||||
? String(opts.sequence).includes(";")
|
||||
? [ "sequence = ANY ( $1 )", [ opts.sequence.split(";") ] ]
|
||||
: [ "sequence = $1", [ opts.sequence ] ]
|
||||
: opts.date0
|
||||
? opts.date1
|
||||
? [ "date(tstamp) BETWEEN SYMMETRIC $1 AND $2", [ opts.date0, opts.date1 ] ]
|
||||
: [ "date(tstamp) = $1", [ opts.date0 ] ]
|
||||
: [ "true = true", [] ];
|
||||
const sequence = opts.sequence && Number(opts.sequence) || null;
|
||||
const sequences = opts.sequences && (Array.isArray(opts.sequences)
|
||||
? opts.sequences.map(Number)
|
||||
: opts.sequences.split(/[^0-9]+/).map(Number)) || null;
|
||||
const date0 = opts.date0 ?? null;
|
||||
const date1 = opts.date1 ?? null;
|
||||
const jpq = opts.jpq || null; // jpq: JSONPath Query
|
||||
const label = opts.label ?? null;
|
||||
|
||||
const text = `
|
||||
SELECT *
|
||||
FROM event_log e
|
||||
WHERE
|
||||
${filter[0]}
|
||||
ORDER BY ${sortKey} ${sortDir};
|
||||
($1::numeric IS NULL OR sequence = $1) AND
|
||||
($2::numeric[] IS NULL OR sequence = ANY( $2 )) AND
|
||||
($3::timestamptz IS NULL OR date(tstamp) = $3) AND
|
||||
($3::timestamptz IS NULL OR
|
||||
(($4::timestamptz IS NULL AND date(tstamp) = $3) OR
|
||||
date(tstamp) BETWEEN SYMMETRIC $3 AND $4)) AND
|
||||
($5::jsonpath IS NULL OR jsonb_path_exists(meta::jsonb, $5::jsonpath)) AND
|
||||
($6::text IS NULL OR $6 = ANY(labels))
|
||||
ORDER BY ${sortKey} ${sortDir}
|
||||
LIMIT ${limit};
|
||||
`;
|
||||
|
||||
const res = await client.query(text, filter[1]);
|
||||
const values = [ sequence, sequences, date0, date1, jpq, label ];
|
||||
|
||||
const res = await client.query(text, values);
|
||||
client.release();
|
||||
return res.rows.map(i => replaceMarkers(i));
|
||||
}
|
||||
|
||||
@@ -9,10 +9,10 @@ async function post (projectId, payload, opts = {}) {
|
||||
|
||||
const text = `
|
||||
INSERT
|
||||
INTO event_log (tstamp, sequence, point, remarks, labels)
|
||||
VALUES ($1, $2, $3, replace_placeholders($4, $1, $2, $3), $5);
|
||||
INTO event_log (tstamp, sequence, point, remarks, labels, meta)
|
||||
VALUES ($1, $2, $3, replace_placeholders($4, $1, $2, $3), $5, $6);
|
||||
`;
|
||||
const values = [ p.tstamp, p.sequence, p.point, p.remarks, p.labels ];
|
||||
const values = [ p.tstamp, p.sequence, p.point, p.remarks, p.labels, p.meta ];
|
||||
|
||||
DEBUG("Inserting new values: %O", values);
|
||||
await client.query(text, values);
|
||||
|
||||
@@ -1,21 +1,43 @@
|
||||
// FIXME This code is in painful need of refactoring
|
||||
|
||||
const { DEBUG } = require("DOUGAL_ROOT/debug")(__filename);
|
||||
const { setSurvey, transaction, pool } = require('../connection');
|
||||
const { listen } = require('../notify');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
let last_tstamp = 0;
|
||||
|
||||
let project_configs, listener;
|
||||
|
||||
async function getAllProjectConfigs () {
|
||||
const client = await pool.connect();
|
||||
|
||||
const res0 = await client.query("SELECT schema FROM projects;");
|
||||
const text = res0.rows.map(r => {
|
||||
return `SELECT '${r.schema}' AS schema, data FROM ${r.schema}.file_data WHERE (data->>'archived')::boolean IS NOT true AND data->>'id' IS NOT NULL`;
|
||||
}).join("\nUNION ALL ");
|
||||
async function getFromDatabase () {
|
||||
DEBUG("Getting project configurations");
|
||||
const client = await pool.connect();
|
||||
|
||||
const res1 = await client.query(text);
|
||||
client.release();
|
||||
return res1.rows.map(r => Object.assign(r.data, {schema: r.schema}));
|
||||
try {
|
||||
const text = `
|
||||
SELECT schema, meta AS data
|
||||
FROM projects
|
||||
WHERE (meta->>'archived')::boolean IS NOT true;
|
||||
`;
|
||||
const res = await client.query(text);
|
||||
project_configs = res.rows;
|
||||
DEBUG("Have configurations for projects", project_configs.map(i => i.data.id));
|
||||
} catch (err) {
|
||||
ERROR(err);
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
return project_configs;
|
||||
}
|
||||
|
||||
if (project_configs) {
|
||||
return project_configs;
|
||||
} else {
|
||||
listener = await listen(["project"], getFromDatabase);
|
||||
DEBUG("Added project configuration change listener");
|
||||
return await getFromDatabase();
|
||||
}
|
||||
}
|
||||
|
||||
async function getNearestPreplot (candidates) {
|
||||
@@ -74,9 +96,9 @@ async function getNearestOfflinePreplot (candidates) {
|
||||
if ("latitude" in candidates[0] && "longitude" in candidates[0]) {
|
||||
text = `
|
||||
SELECT
|
||||
'${c._schema}' AS _schema,
|
||||
'${c.schema}' AS schema,
|
||||
ST_Distance(ST_Transform(ST_SetSRID(ST_MakePoint($1, $2), 4326), ST_SRID(geometry)), geometry) AS distance
|
||||
FROM ${c._schema}.preplot_points
|
||||
FROM ${c.schema}.preplot_points
|
||||
ORDER BY distance ASC
|
||||
LIMIT 1;
|
||||
`;
|
||||
@@ -84,9 +106,9 @@ async function getNearestOfflinePreplot (candidates) {
|
||||
} else if ("easting" in candidates[0] && "northing" in candidates[0]) {
|
||||
text = `
|
||||
SELECT
|
||||
'${c._schema}' AS _schema,
|
||||
'${c.schema}' AS schema,
|
||||
ST_Distance(ST_SetSRID(ST_MakePoint($1, $2), ST_SRID(geometry)), geometry) AS distance
|
||||
FROM ${c._schema}.preplot_points
|
||||
FROM ${c.schema}.preplot_points
|
||||
ORDER BY distance ASC
|
||||
LIMIT 1;
|
||||
`;
|
||||
@@ -102,13 +124,13 @@ async function getNearestOfflinePreplot (candidates) {
|
||||
const results = [];
|
||||
for (const qry of queries) {
|
||||
const res = await client.query(qry.text, qry.values);
|
||||
if (res.rows[0] && res.rows[0]._schema) {
|
||||
if (res.rows[0] && res.rows[0].schema) {
|
||||
results.push(res.rows[0]);
|
||||
}
|
||||
}
|
||||
client.release();
|
||||
const _schema = results.sort( (a, b) => a.distance - b.distance).shift()?._schema;
|
||||
return candidates.find(c => c._schema == _schema);
|
||||
const schema = results.sort( (a, b) => a.distance - b.distance).shift()?.schema;
|
||||
return candidates.find(c => c.schema == schema);
|
||||
}
|
||||
|
||||
async function saveOnline (dataset, opts = {}) {
|
||||
@@ -141,14 +163,14 @@ async function saveOnline (dataset, opts = {}) {
|
||||
await client.query(`
|
||||
INSERT INTO raw_shots
|
||||
(sequence, line, point, objref, tstamp, geometry, hash)
|
||||
VALUES ($1, $2, $3, $4, $5, ST_SetSRID(ST_MakePoint($6, $7), (SELECT (data->>'epsg')::integer AS epsg FROM file_data WHERE data ? 'id')), '*online*')
|
||||
VALUES ($1, $2, $3, $4, $5, ST_SetSRID(ST_MakePoint($6, $7), (select (project_configuration()->>'epsg')::integer as epsg)), '*online*')
|
||||
ON CONFLICT DO NOTHING;
|
||||
`, [item.sequence, item.line, item.point, 0, item.tstamp, item.easting, item.northing]);
|
||||
} else if (item.latitude && item.longitude) {
|
||||
await client.query(`
|
||||
INSERT INTO raw_shots
|
||||
(sequence, line, point, objref, tstamp, geometry, hash)
|
||||
VALUES ($1, $2, $3, $4, $5, ST_Transform(ST_SetSRID(ST_MakePoint($6, $7), 4326), (SELECT (data->>'epsg')::integer AS epsg FROM file_data WHERE data ? 'id')), '*online*')
|
||||
VALUES ($1, $2, $3, $4, $5, ST_Transform(ST_SetSRID(ST_MakePoint($6, $7), 4326), (select (project_configuration()->>'epsg')::integer as epsg)), '*online*')
|
||||
ON CONFLICT DO NOTHING;
|
||||
`, [item.sequence, item.line, item.point, 0, item.tstamp, item.longitude, item.latitude]);
|
||||
} else {
|
||||
@@ -158,8 +180,8 @@ async function saveOnline (dataset, opts = {}) {
|
||||
}
|
||||
await transaction.commit(client);
|
||||
} catch (error) {
|
||||
console.error("ONLINE DATA INSERT ERROR");
|
||||
console.error(error);
|
||||
ERROR("ONLINE DATA INSERT ERROR");
|
||||
ERROR(error);
|
||||
await transaction.rollback(client);
|
||||
} finally {
|
||||
client.release();
|
||||
@@ -186,7 +208,7 @@ async function saveOffline (navData, opts = {}) {
|
||||
} else if (schema && hasEastNorth) {
|
||||
const text = `
|
||||
INSERT INTO real_time_inputs (tstamp, geometry, meta)
|
||||
VALUES ($1, ST_Transform(ST_SetSRID(ST_MakePoint($2, $3), (SELECT (data->>'epsg')::integer AS epsg FROM ${schema}.file_data)), 4326), $4);
|
||||
VALUES ($1, ST_Transform(ST_SetSRID(ST_MakePoint($2, $3), (select (project_configuration()->>'epsg')::integer as epsg), 4326), $4);
|
||||
`;
|
||||
|
||||
const values = [navData.tstamp, navData.longitude, navData.latitude, navData.payload];
|
||||
@@ -215,6 +237,37 @@ async function saveOffline (navData, opts = {}) {
|
||||
client.release();
|
||||
}
|
||||
|
||||
async function getCandidates (navData) {
|
||||
|
||||
const configs = await getAllProjectConfigs();
|
||||
|
||||
// We just get the bits of interest: pattern and schema
|
||||
const candidates = configs.map(c => {
|
||||
if (!c?.data?.online?.line || c?.archived === true) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const p = c.data.online.line.pattern; // For short
|
||||
|
||||
const rx = new RegExp(p.regex, p.flags);
|
||||
const matches = navData.lineName.match(rx);
|
||||
|
||||
if (!matches || ((matches.length+1) < p.captures.length)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
matches.shift(); // Get rid of the full matched text
|
||||
const obj = Object.assign({}, navData, {schema: c.schema});
|
||||
p.captures.forEach( (k, i) => {
|
||||
obj[k] = matches[i];
|
||||
});
|
||||
return obj;
|
||||
}).filter(c => !!c);
|
||||
// DEBUG("Candidates: %j", candidates.map(c => c.schema));
|
||||
|
||||
return candidates;
|
||||
}
|
||||
|
||||
async function save (navData, opts = {}) {
|
||||
|
||||
const hasLatLon = ("latitude" in navData && "longitude" in navData);
|
||||
@@ -222,50 +275,27 @@ async function save (navData, opts = {}) {
|
||||
const hasLinePoint = ("lineName" in navData && "point" in navData);
|
||||
if (!(hasLinePoint || hasLatLon || hasEastNorth)) {
|
||||
// This is of no interest to us
|
||||
console.warning("Ignoring data without useful values", navData);
|
||||
NOTICE("Ignoring data without useful values", navData);
|
||||
return;
|
||||
}
|
||||
|
||||
// DEBUG("navData", navData);
|
||||
|
||||
if (navData.online === true) {
|
||||
|
||||
// So we have a lineName, see which projects match the line pattern.
|
||||
// For this we need to get all the project configs
|
||||
const configs = await getAllProjectConfigs();
|
||||
|
||||
// We just get the bits of interest: pattern and schema
|
||||
const candidates = configs.map(c => {
|
||||
if (!(c && c.online && c.online.line)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const p = c.online.line.pattern; // For short
|
||||
|
||||
const rx = new RegExp(p.regex, p.flags);
|
||||
const matches = navData.lineName.match(rx);
|
||||
|
||||
if (!matches || ((matches.length+1) < p.captures.length)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
matches.shift(); // Get rid of the full matched text
|
||||
const obj = Object.assign({}, navData, {schema: c.schema});
|
||||
p.captures.forEach( (k, i) => {
|
||||
obj[k] = matches[i];
|
||||
});
|
||||
return obj;
|
||||
}).filter(c => !!c);
|
||||
DEBUG("Candidates: %j", candidates);
|
||||
// console.log("CANDIDATES", candidates);
|
||||
const candidates = await getCandidates(navData);
|
||||
|
||||
if (candidates.length == 0) {
|
||||
// This is probably a test line, so we treat it as offline
|
||||
console.log("No match");
|
||||
WARNING("No match");
|
||||
} else {
|
||||
if (candidates.length == 1) {
|
||||
// Only one candidate, associate with it
|
||||
// console.log("Save into schema", candidates[0].match.schema);
|
||||
await saveOnline(candidates);
|
||||
navData.payload._schema = candidates[0].match.schema;
|
||||
navData.payload._schema = candidates[0].schema;
|
||||
} else {
|
||||
// More than one candidate, go for the closest. If more than one active
|
||||
// project with the same preplots, highest numbered schema.
|
||||
@@ -275,7 +305,7 @@ async function save (navData, opts = {}) {
|
||||
await saveOnline(candidates.filter(c => c.schema == destinationSchema), opts);
|
||||
navData.payload._schema = destinationSchema;
|
||||
} else {
|
||||
console.log("Nowhere to save to");
|
||||
WARNING("Nowhere to save to");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -286,17 +316,18 @@ async function save (navData, opts = {}) {
|
||||
}
|
||||
} else {
|
||||
// We are offline. We only assign _schema once every save_interval seconds at most
|
||||
// unless there is gun data present.
|
||||
if (opts.offline_survey_heuristics == "nearest_preplot") {
|
||||
const now = Date.now();
|
||||
const do_save = !opts.offline_survey_detect_interval ||
|
||||
(now - last_tstamp) >= opts.offline_survey_detect_interval;
|
||||
|
||||
if (do_save) {
|
||||
if (do_save || "guns" in navData?.payload) {
|
||||
const configs = await getAllProjectConfigs();
|
||||
const candidates = configs.map(c => Object.assign({}, navData, {_schema: c.schema}));
|
||||
const candidates = await getCandidates(navData);
|
||||
const bestCandidate = await getNearestOfflinePreplot(candidates);
|
||||
if (bestCandidate) {
|
||||
navData.payload._schema = bestCandidate._schema;
|
||||
navData.payload._schema = bestCandidate.schema;
|
||||
last_tstamp = now;
|
||||
}
|
||||
}
|
||||
@@ -304,6 +335,7 @@ async function save (navData, opts = {}) {
|
||||
}
|
||||
|
||||
await saveOffline(navData, opts);
|
||||
DEBUG("Saved");
|
||||
}
|
||||
|
||||
module.exports = save;
|
||||
|
||||
@@ -1,5 +1,43 @@
|
||||
const { makeSubscriber } = require('./connection');
|
||||
const { makeSubscriber, pool } = require('./connection');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
async function purge () {
|
||||
DEBUG("Purging old notifications");
|
||||
const client = await pool.connect();
|
||||
try {
|
||||
await client.query("CALL purge_notifications();");
|
||||
} catch (err) {
|
||||
ERROR(err);
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
|
||||
async function fullPayload (payload) {
|
||||
|
||||
if (!payload.payload_id) {
|
||||
return payload;
|
||||
} else {
|
||||
let client, res;
|
||||
try {
|
||||
client = await pool.connect();
|
||||
const text = `SELECT payload FROM notify_payloads WHERE id = $1;`;
|
||||
const values = [ payload.payload_id ];
|
||||
res = await client.query(text, values);
|
||||
res = res?.rows[0]?.payload;
|
||||
DEBUG(`Oversize notification payload retrieved with id ${payload.payload_id} and size ${res.length}`);
|
||||
// DEBUG(res);
|
||||
res = JSON.parse(res);
|
||||
} catch (err) {
|
||||
ERROR(err);
|
||||
} finally {
|
||||
if (client) {
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
async function listen (addChannels, callback) {
|
||||
|
||||
@@ -18,11 +56,11 @@ async function listen (addChannels, callback) {
|
||||
|
||||
for (const channel of addChannels) {
|
||||
await client.listenTo(channel);
|
||||
client.notifications.on(channel, (payload) => {
|
||||
client.notifications.on(channel, async (payload) => {
|
||||
const data = {
|
||||
channel,
|
||||
_received: new Date(),
|
||||
payload
|
||||
payload: await fullPayload(payload)
|
||||
};
|
||||
callback(data);
|
||||
});
|
||||
@@ -32,5 +70,6 @@ async function listen (addChannels, callback) {
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
listen
|
||||
listen,
|
||||
purge
|
||||
};
|
||||
|
||||
@@ -36,6 +36,9 @@ async function patch (projectId, payload, opts = {}) {
|
||||
}
|
||||
}
|
||||
|
||||
// We do not allow users to change the schema
|
||||
delete payload.schema;
|
||||
|
||||
const dest = removeNulls(deepMerge(source, payload));
|
||||
await modify(projectId, dest);
|
||||
return dest;
|
||||
|
||||
@@ -7,10 +7,11 @@ const { INFO, DEBUG, WARNING, ERROR } = require('DOUGAL_ROOT/debug')(__filename)
|
||||
|
||||
|
||||
function checkSyntax (value, type = "project") {
|
||||
var requiredFields = {};
|
||||
|
||||
switch (type) {
|
||||
case "project":
|
||||
var requiredFields = {
|
||||
requiredFields = {
|
||||
id: "string",
|
||||
name: "string",
|
||||
epsg: "number",
|
||||
@@ -18,7 +19,7 @@ function checkSyntax (value, type = "project") {
|
||||
};
|
||||
break;
|
||||
case "binning":
|
||||
var requiredFields = {
|
||||
requiredFields = {
|
||||
theta: "number",
|
||||
I_inc: "number",
|
||||
J_inc: "number",
|
||||
@@ -28,23 +29,19 @@ function checkSyntax (value, type = "project") {
|
||||
}
|
||||
break
|
||||
case "origin":
|
||||
var requiredFields = {
|
||||
requiredFields = {
|
||||
easting: "number",
|
||||
northing: "number",
|
||||
I: "number",
|
||||
J: "number"
|
||||
}
|
||||
break;
|
||||
break;
|
||||
default:
|
||||
return typeof type == "function"
|
||||
? type(value)
|
||||
: typeof value == type;
|
||||
}
|
||||
|
||||
// return Object.entries(requiredFields).every( ([field, test]) => {
|
||||
// return value.hasOwnProperty(field) && checkSyntax(value[field], test);
|
||||
// });
|
||||
|
||||
for (const [field, test] of Object.entries(requiredFields)) {
|
||||
if (!value.hasOwnProperty(field)) {
|
||||
return `Missing required property: ${field}`;
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
const fs = require('fs');
|
||||
const YAML = require('yaml');
|
||||
const flattenQCDefinitions = require('../../../utils/flattenQCDefinitions');
|
||||
const configuration = require('../../configuration'); // lib/db/configuration
|
||||
const { translatePath } = require('../../../utils/logicalPath');
|
||||
const project = require('../../project'); // lib/db/project
|
||||
|
||||
|
||||
async function get (projectId, opts = {}) {
|
||||
const qcConfig = await configuration.get(projectId, "qc");
|
||||
const qcConfig = (await project.configuration.get(projectId))?.qc;
|
||||
if (qcConfig?.definitions) {
|
||||
try {
|
||||
const definitions = YAML.parse(fs.readFileSync(qcConfig.definitions).toString());
|
||||
const definitions = YAML.parse(fs.readFileSync(translatePath(qcConfig.definitions)).toString());
|
||||
|
||||
return opts.flat ? flattenQCDefinitions(definitions) : definitions;
|
||||
} catch (err) {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const fs = require('fs/promises');
|
||||
const Path = require('path');
|
||||
const mime = require('./mime-types');
|
||||
const { translatePath, logicalRoot } = require('./logical');
|
||||
const { translatePath, logicalRoot } = require('../utils/logicalPath');
|
||||
const systemCfg = require('../config');
|
||||
const projectCfg = require('../db/configuration');
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ const { pool, setSurvey, transaction, fetchRow } = require('../db/connection')
|
||||
const { project, sequence, configuration, info } = require('../db')
|
||||
const flattenQCDefinitions = require('./flatten');
|
||||
const { projectHash, sequenceHash } = require('./last-modified');
|
||||
const { translatePath } = require('../utils/logicalPath');
|
||||
|
||||
const { runShotsQC, saveShotsQC } = require('./shots');
|
||||
const { runSequenceQCs, saveSequenceQCs } = require('./sequences');
|
||||
@@ -42,12 +43,12 @@ function forceQC (projectId, sequenceNumber) {
|
||||
|
||||
async function getProjectQCConfig (projectId) {
|
||||
console.log("getProjectQCConfig");
|
||||
const qcConfig = await configuration.get(projectId, "qc");
|
||||
const qcConfig = (await project.configuration.get(projectId))?.qc;
|
||||
console.log("qcConfig", qcConfig);
|
||||
if (qcConfig?.definitions && qcConfig?.parameters) {
|
||||
const definitions =
|
||||
flattenQCDefinitions(YAML.parse(fs.readFileSync(qcConfig.definitions).toString()));
|
||||
const parameters = YAML.parse(fs.readFileSync(qcConfig.parameters).toString());
|
||||
flattenQCDefinitions(YAML.parse(fs.readFileSync(translatePath(qcConfig.definitions)).toString()));
|
||||
const parameters = YAML.parse(fs.readFileSync(translatePath(qcConfig.parameters)).toString());
|
||||
|
||||
return { definitions, parameters };
|
||||
}
|
||||
@@ -57,11 +58,11 @@ async function main () {
|
||||
// Fetch list of projects
|
||||
console.log("GET PROJECTS");
|
||||
const projects = await project.get();
|
||||
console.log("PROJECTS", projects);
|
||||
|
||||
for (const proj of projects) {
|
||||
const projectId = proj.pid;
|
||||
for (const {pid} of projects) {
|
||||
const projectId = pid;
|
||||
console.log("PROJECT ID", projectId);
|
||||
const proj = await project.configuration.get(projectId);
|
||||
|
||||
if (!proj.archived) {
|
||||
const QCTstamp = new Date();
|
||||
@@ -75,7 +76,7 @@ async function main () {
|
||||
console.log("currentQCHash != lastQCHash", projectId, currentQCHash, lastQCHash);
|
||||
|
||||
// Fetch definitions and parameters
|
||||
const { definitions, parameters } = await getProjectQCConfig(projectId) ?? {};
|
||||
const { definitions, parameters } = await getProjectQCConfig(projectId, proj.qc) ?? {};
|
||||
|
||||
if (definitions && parameters) {
|
||||
console.log("PROJECT ID", projectId);
|
||||
|
||||
52
lib/www/server/lib/queue/actions-queue.js
Normal file
52
lib/www/server/lib/queue/actions-queue.js
Normal file
@@ -0,0 +1,52 @@
|
||||
const Queue = require('./queue');
|
||||
|
||||
// Inspired by:
|
||||
// https://stackoverflow.com/questions/53540348/js-async-await-tasks-queue#53540586
|
||||
|
||||
class ActionsQueue extends Queue {
|
||||
|
||||
constructor (items = []) {
|
||||
super(items);
|
||||
|
||||
this.pending = false;
|
||||
}
|
||||
|
||||
enqueue (action) {
|
||||
return new Promise ((resolve, reject) => {
|
||||
super.enqueue({ action, resolve, reject });
|
||||
this.dequeue();
|
||||
});
|
||||
}
|
||||
|
||||
async dequeue () {
|
||||
|
||||
if (this.pending) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const item = super.dequeue();
|
||||
|
||||
if (!item) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
this.pending = true;
|
||||
|
||||
const result = await item.action(this);
|
||||
|
||||
this.pending = false;
|
||||
item.resolve(result);
|
||||
} catch (err) {
|
||||
this.pending = false;
|
||||
item.reject(err);
|
||||
} finally {
|
||||
this.dequeue();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = ActionsQueue;
|
||||
6
lib/www/server/lib/queue/index.js
Normal file
6
lib/www/server/lib/queue/index.js
Normal file
@@ -0,0 +1,6 @@
|
||||
|
||||
module.exports = {
|
||||
Queue: require('./queue'),
|
||||
ActionsQueue: require('./actions-queue')
|
||||
};
|
||||
|
||||
22
lib/www/server/lib/queue/queue.js
Normal file
22
lib/www/server/lib/queue/queue.js
Normal file
@@ -0,0 +1,22 @@
|
||||
|
||||
class Queue {
|
||||
|
||||
constructor (items = []) {
|
||||
this.items = items;
|
||||
}
|
||||
|
||||
enqueue (item) {
|
||||
this.items.push(item);
|
||||
}
|
||||
|
||||
dequeue () {
|
||||
return this.items.shift();
|
||||
}
|
||||
|
||||
length () {
|
||||
return this.items.length;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = Queue;
|
||||
@@ -5,5 +5,8 @@ module.exports = {
|
||||
replaceMarkers: require('./replaceMarkers'),
|
||||
flattenQCDefinitions: require('./flattenQCDefinitions'),
|
||||
deepMerge: require('./deepMerge'),
|
||||
removeNulls: require('./removeNulls')
|
||||
removeNulls: require('./removeNulls'),
|
||||
logicalPath: require('./logicalPath'),
|
||||
ranges: require('./ranges'),
|
||||
unique: require('./unique')
|
||||
};
|
||||
|
||||
@@ -10,6 +10,7 @@ function translatePath (file) {
|
||||
return physicalPath;
|
||||
} else {
|
||||
// An attempt to break out of the logical path?
|
||||
console.warn("Attempting to break out of the logical path?", physicalPath, prefix);
|
||||
throw {
|
||||
status: 404,
|
||||
message: "Not found"
|
||||
74
lib/www/server/lib/utils/ranges.js
Normal file
74
lib/www/server/lib/utils/ranges.js
Normal file
@@ -0,0 +1,74 @@
|
||||
|
||||
function parseRange (str) {
|
||||
const rx = /^[\[(].*,.*[)\]]$/
|
||||
|
||||
if (rx.test(str)) {
|
||||
const lower_inclusive = str[0] == '[';
|
||||
const upper_inclusive = str[str.length-1] == ']';
|
||||
const [ lower, upper ] = str.slice(1,-1).split(",");
|
||||
return {
|
||||
upper,
|
||||
lower,
|
||||
upper_inclusive,
|
||||
lower_inclusive
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function parseValidity (str) {
|
||||
const range = parseRange(str);
|
||||
|
||||
if (range) {
|
||||
ts0 = range.lower ? new Date(range.lower) : null;
|
||||
ts1 = range.upper ? new Date(range.upper) : null;
|
||||
|
||||
return {
|
||||
...range,
|
||||
lower: ts0,
|
||||
upper: ts1
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function withinValidity (range, ts) {
|
||||
if (!ts) {
|
||||
ts = new Date();
|
||||
}
|
||||
|
||||
if (typeof range === "string") {
|
||||
range = parseValidity(range);
|
||||
}
|
||||
|
||||
if (range.lower) {
|
||||
if (range.lower_inclusive) {
|
||||
if (!(range.lower <= ts)) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if (!(range.lower < ts)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (range.upper) {
|
||||
if (range.upper_inclusive) {
|
||||
if (!(range.upper >= ts)) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if (!(range.upper > ts)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseRange,
|
||||
parseValidity,
|
||||
withinValidity
|
||||
}
|
||||
|
||||
6
lib/www/server/lib/utils/unique.js
Normal file
6
lib/www/server/lib/utils/unique.js
Normal file
@@ -0,0 +1,6 @@
|
||||
|
||||
function unique(array) {
|
||||
return [...new Set(array)];
|
||||
}
|
||||
|
||||
module.exports = unique;
|
||||
@@ -11,7 +11,7 @@
|
||||
"license": "UNLICENSED",
|
||||
"private": true,
|
||||
"config": {
|
||||
"db_schema": "^0.3.11",
|
||||
"db_schema": "^0.4.2",
|
||||
"api": "^0.4.0"
|
||||
},
|
||||
"engines": {
|
||||
|
||||
38
lib/www/server/periodic-tasks/index.js
Normal file
38
lib/www/server/periodic-tasks/index.js
Normal file
@@ -0,0 +1,38 @@
|
||||
const tasks = require('./tasks');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
function init () {
|
||||
const iids = [];
|
||||
|
||||
function start () {
|
||||
INFO("Initialising %d periodic tasks", tasks.length);
|
||||
for (let t of tasks) {
|
||||
const iid = setInterval(t.task, t.timeout);
|
||||
iids.push(iid);
|
||||
}
|
||||
return iids;
|
||||
};
|
||||
|
||||
function stop () {
|
||||
INFO("Stopping %d periodic tasks", iids.length);
|
||||
for (let iid of iids) {
|
||||
clearInterval(iid);
|
||||
}
|
||||
}
|
||||
|
||||
async function cleanup () {
|
||||
stop();
|
||||
DEBUG("Cleaning up %d periodic tasks", tasks.length);
|
||||
for (let t of tasks) {
|
||||
if (t.cleanup) {
|
||||
await t.cleanup();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { start, stop, cleanup, iids };
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
init
|
||||
};
|
||||
4
lib/www/server/periodic-tasks/tasks/index.js
Normal file
4
lib/www/server/periodic-tasks/tasks/index.js
Normal file
@@ -0,0 +1,4 @@
|
||||
|
||||
module.exports = [
|
||||
require('./purge-notifications')
|
||||
];
|
||||
20
lib/www/server/periodic-tasks/tasks/purge-notifications.js
Normal file
20
lib/www/server/periodic-tasks/tasks/purge-notifications.js
Normal file
@@ -0,0 +1,20 @@
|
||||
const { purge } = require('../../lib/db/notify');
|
||||
const { ALERT, ERROR, WARNING, NOTICE, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
const timeout = 120*1000; // 2 minutes
|
||||
|
||||
function task () {
|
||||
DEBUG("Running task");
|
||||
purge();
|
||||
}
|
||||
|
||||
async function cleanup () {
|
||||
DEBUG("Running cleanup");
|
||||
await purge();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
task,
|
||||
timeout,
|
||||
cleanup
|
||||
};
|
||||
@@ -180,6 +180,16 @@ components:
|
||||
required: true
|
||||
example: 14707
|
||||
|
||||
Since:
|
||||
description: Starting epoch
|
||||
name: since
|
||||
in: path
|
||||
schema:
|
||||
type: string
|
||||
format: date-time
|
||||
required: true
|
||||
example: 1970-01-01T00:00:00Z
|
||||
|
||||
QueryLimit:
|
||||
description: Maximum number of results to return
|
||||
name: limit
|
||||
@@ -206,6 +216,16 @@ components:
|
||||
pattern: "(([^\\s,;:]+)(\\s*[,;:\\s]\\s*)?)+"
|
||||
example: "line,point,tstamp"
|
||||
|
||||
Unique:
|
||||
description: |
|
||||
Return unique results. Any value at all represents `true`.
|
||||
name: unique
|
||||
in: query
|
||||
schema:
|
||||
type: string
|
||||
pattern: ".+"
|
||||
example: "t"
|
||||
|
||||
|
||||
schemas:
|
||||
Duration:
|
||||
@@ -602,14 +622,26 @@ components:
|
||||
Flag to indicate that this event is read-only. It cannot be edited by the user or deleted. Typically this concerns system-generated events such as QC results or midnight shots.
|
||||
additionalProperties: true
|
||||
|
||||
EventIDAbstract:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: number
|
||||
description: Event ID.
|
||||
|
||||
|
||||
EventUIDAbstract:
|
||||
type: object
|
||||
properties:
|
||||
uid:
|
||||
type: number
|
||||
description: Event instance unique ID. When an event is modified, the new entry acquires a different `uid` while keeping the same `id` as the original event.
|
||||
|
||||
|
||||
EventAbstract:
|
||||
allOf:
|
||||
-
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: number
|
||||
description: Event ID.
|
||||
$ref: "#/components/schemas/EventIDAbstract"
|
||||
-
|
||||
$ref: "#/components/schemas/EventNew"
|
||||
|
||||
@@ -659,6 +691,47 @@ components:
|
||||
* The third element is either an ISO-8601 timestamp or `null`. The latter indicates +∞. These are the events returned by endpoints that do not concern themselves with event history.
|
||||
* The fourth element is one of `]` or `)`. As before, it indicates either an open or closed interval.
|
||||
|
||||
EventChangesIsDeletedAbstract:
|
||||
type: object
|
||||
properties:
|
||||
is_deleted:
|
||||
type: boolean
|
||||
description: >
|
||||
Flag to indicate whether this event or event instance (depending on the presence of a `uid` attribute) has been deleted.
|
||||
|
||||
|
||||
EventChangesModified:
|
||||
description: An event modification.
|
||||
allOf:
|
||||
-
|
||||
$ref: "#/components/schemas/EventAbstract"
|
||||
-
|
||||
$ref: "#/components/schemas/EventChangesIsDeletedAbstract"
|
||||
|
||||
EventChangesDeleted:
|
||||
description: |
|
||||
Identification of a deleted event or event instance.
|
||||
|
||||
**Note:** the details of the deleted event are not included, only its `id` and `uid`.
|
||||
allOf:
|
||||
-
|
||||
$ref: "#/components/schemas/EventIDAbstract"
|
||||
-
|
||||
$ref: "#/components/schemas/EventUIDAbstract"
|
||||
-
|
||||
$ref: "#/components/schemas/EventChangesIsDeletedAbstract"
|
||||
|
||||
EventChanges:
|
||||
description: List of event changes since the given epoch.
|
||||
type: array
|
||||
items:
|
||||
anyOf:
|
||||
-
|
||||
$ref: "#/components/schemas/EventChangesDeleted"
|
||||
-
|
||||
$ref: "#/components/schemas/EventChangesModified"
|
||||
|
||||
|
||||
SeisExportEntryFSP:
|
||||
type: object
|
||||
properties:
|
||||
@@ -1159,9 +1232,55 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/PlannedSequence"
|
||||
type: object
|
||||
properties:
|
||||
remarks:
|
||||
type: string
|
||||
description: Planner remarks
|
||||
sequences:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/PlannedSequence"
|
||||
text/csv:
|
||||
schema:
|
||||
type: string
|
||||
format: csv
|
||||
description: |
|
||||
Returns a CSV response containing one row for each planned sequence, with the following columns:
|
||||
|
||||
* `sequence`: Sequence number
|
||||
* `line`: Line number
|
||||
* `fsp`: First shotpoint
|
||||
* `lsp`: Last shotpoint
|
||||
* `ts0`: Estimated timestamp of the first shotpoint
|
||||
* `ts1`: Estimated timestamp of the last shotpoint
|
||||
* `name`: Line name
|
||||
* `remarks`: Arbitrary comments
|
||||
* `num_points`: Number of shotpoints
|
||||
* `duration`: Estimated duration in seconds
|
||||
* `length`: Line length in metres
|
||||
* `azimuth`: Line azimuth
|
||||
* `lon0`: Longitude of the first shotpoint
|
||||
* `lat0`: Latitude of the first shotpoint
|
||||
* `lon1` Longitude of the last shotpoint
|
||||
* `lat1`: Latitude of the last shotpoint
|
||||
example: |
|
||||
"sequence","line","fsp","lsp","ts0","ts1","name","remarks","num_points","duration","length","azimuth","lon0","lat0","lon1","lat1"
|
||||
81,5162,2422,1158,"2023-10-22T11:09:24.912Z","2023-10-22T12:56:03.395Z","2051621081S00000","",633,6398,15799.988472147348,26.4703415983101,2.474872,59.086695,2.596266,59.214146
|
||||
82,5178,2444,1146,"2023-10-22T12:56:03.000Z","2023-10-22T14:45:33.607Z","2051781082S00000","",650,6570,16225.02094944685,26.470137885560813,2.469632,59.085264,2.594277,59.216147
|
||||
text/html:
|
||||
schema:
|
||||
type: string
|
||||
format: html
|
||||
description: |
|
||||
An HTML representation of the plan.
|
||||
application/pdf:
|
||||
schema:
|
||||
type: string
|
||||
contentMediaType: application/pdf
|
||||
description: |
|
||||
A PDF representation of the plan.
|
||||
|
||||
|
||||
post:
|
||||
description: Add a new sequence to the plan.
|
||||
@@ -1382,6 +1501,31 @@ paths:
|
||||
$ref: "#/components/responses/401"
|
||||
|
||||
|
||||
/project/{project}/changes/{since}:
|
||||
get:
|
||||
summary: Get event change history since epoch.
|
||||
tags: [ "log" ]
|
||||
security:
|
||||
- BearerAuthGuest: []
|
||||
- CookieAuthGuest: []
|
||||
parameters:
|
||||
- $ref: "#/components/parameters/Project"
|
||||
- $ref: "#/components/parameters/Since"
|
||||
- $ref: "#/components/parameters/Unique"
|
||||
responses:
|
||||
"200":
|
||||
description: List of project event changes. If `unique` is given, only the latest version of each event will be returned, otherwise the entire modification history is given, potentially including the same event `id` multiple times.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/EventChanges"
|
||||
|
||||
"401":
|
||||
$ref: "#/components/responses/401"
|
||||
|
||||
|
||||
/project/{project}/label:
|
||||
get:
|
||||
summary: Get project labels.
|
||||
|
||||
@@ -95,7 +95,8 @@ for (const header of (cfg._("global.navigation.headers") || []).filter(h => h.ty
|
||||
const server = dgram.createSocket('udp4');
|
||||
|
||||
server.on('error', (err) => {
|
||||
console.error(`server error:\n${err.stack}`);
|
||||
ERROR(err);
|
||||
// console.error(`server error:\n${err.stack}`);
|
||||
maybeSendError(err, {title: "UDP listener error on port "+header.port});
|
||||
// server.close();
|
||||
});
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
const { pool } = require('../lib/db/connection');
|
||||
|
||||
var client;
|
||||
|
||||
const channels = {};
|
||||
|
||||
async function notify (data) {
|
||||
|
||||
if (data.channel in channels) {
|
||||
data._received = new Date();
|
||||
try {
|
||||
const json = JSON.parse(data.payload);
|
||||
data.payload = json;
|
||||
} catch {
|
||||
// Ignore the error
|
||||
}
|
||||
for (const listener of channels[data.channel]) {
|
||||
await listener(JSON.parse(JSON.stringify(data)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function reconnect () {
|
||||
console.log("Reconnecting");
|
||||
// No need to provide parameters, channels should already be populated.
|
||||
listen();
|
||||
}
|
||||
|
||||
async function listen (addChannels, callback) {
|
||||
if (!client) {
|
||||
try {
|
||||
client = await pool.connect();
|
||||
} catch (err) {
|
||||
console.error("Error connecting to DB", err);
|
||||
console.log("Will try again in 15 seconds");
|
||||
setImmediate(() => client = null);
|
||||
setTimeout(() => {
|
||||
listen(addChannels, callback);
|
||||
}, 15000);
|
||||
return;
|
||||
}
|
||||
client.on('notification', notify);
|
||||
console.log("Websocket client connected", Object.keys(channels));
|
||||
client.on('error', (err) => console.error("Events client error: ", err));
|
||||
client.on('end', () => {
|
||||
console.warn("Websocket events client disconnected. Will attempt to reconnect in five seconds");
|
||||
setImmediate(() => client = null);
|
||||
setTimeout(reconnect, 5000);
|
||||
});
|
||||
}
|
||||
|
||||
if (addChannels) {
|
||||
if (!Array.isArray(addChannels)) {
|
||||
addChannels = [addChannels];
|
||||
}
|
||||
|
||||
for (const channel of addChannels) {
|
||||
if (!(channel in channels)) {
|
||||
await client.query("LISTEN "+channel);
|
||||
channels[channel] = [];
|
||||
console.log("Listening to ", channel);
|
||||
}
|
||||
|
||||
channels[channel].push(callback);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
listen
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
const ws = require('ws');
|
||||
const URL = require('url');
|
||||
const db = require('./db');
|
||||
const { listen } = require('../lib/db/notify');
|
||||
const channels = require('../lib/db/channels');
|
||||
|
||||
function start (server, pingInterval=30000) {
|
||||
@@ -22,7 +22,7 @@ function start (server, pingInterval=30000) {
|
||||
}
|
||||
});
|
||||
|
||||
db.listen(channels, (data) => {
|
||||
listen(channels, (data) => {
|
||||
wsServer.clients.forEach( (socket) => {
|
||||
socket.send(JSON.stringify(data));
|
||||
})
|
||||
|
||||
@@ -16,7 +16,12 @@ OUTPATH="$OUTDIR/$OUTNAME"
|
||||
# 30000/UDP: Navigation system headers
|
||||
# Not all inputs will be present in all systems.
|
||||
#
|
||||
EXPR="udp and (port 4461 or port 4462 or port 30000)"
|
||||
# NOTE: $INS_HOST must be defined and point to the
|
||||
# navigation server. The reason we don't use a port
|
||||
# filter for this data is because that doesn't work
|
||||
# with fragmented UDP packets.
|
||||
#
|
||||
EXPR="udp and (port 4461 or port 4462 or src host $INS_HOST)"
|
||||
|
||||
if [[ ! -d "$OUTDIR" ]]; then
|
||||
mkdir "$OUTDIR"
|
||||
|
||||
42
sbin/rewrite-captures.sh
Executable file
42
sbin/rewrite-captures.sh
Executable file
@@ -0,0 +1,42 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Rewrite packet captures in order to be able to replay them.
|
||||
#
|
||||
# SINET: Rewrite all packets with this source IP address
|
||||
# SETHER: Rewrite all packets with this MAC
|
||||
#
|
||||
# DINET: Rewrite all packets with this destination IP address
|
||||
# DETHER: Rewrite all packets with this destination MAC address
|
||||
#
|
||||
# The resulting files have the original name with "-rewritten.pcap"
|
||||
# appended as a suffix. Those packets may then be replayed from a
|
||||
# different computer or virtual container, for instance with:
|
||||
#
|
||||
# sudo bittwist -i 1 -v -m10 capture-rewritten.pcap
|
||||
#
|
||||
# Where -i n is the interface name (use bittwist -d to list available
|
||||
# interfaces), -v is the verbose flag and -m10 replays at 10× speed.
|
||||
#
|
||||
|
||||
SINET=${SINET:-$(ip -o -4 addr |grep -v " lo " |head -n 1 |sed -r 's/^.*inet\s([0-9.]+).*$/\1/')}
|
||||
SETHER=${SETHER:-$(ip -o link |grep -v " lo" |head -n 1 |sed -r 's/^.*ether\s([0-9a-fA-F:]+).*$/\1/')}
|
||||
|
||||
DINET=${DINET:-$(ip -o -4 addr |grep -v " lo " |head -n 1 |sed -r 's/^.*inet\s([0-9.]+).*$/\1/')}
|
||||
DETHER=${DETHER:-$(ip -o link |grep -v " lo" |head -n 1 |sed -r 's/^.*ether\s([0-9a-fA-F:]+).*$/\1/')}
|
||||
|
||||
for f in $*; do
|
||||
|
||||
OUTFNAME=$f-rewritten.pcap
|
||||
echo $f → $OUTFNAME
|
||||
if [[ -n "$SINET" && -n "$SETHER" ]]; then
|
||||
tcprewrite -S 0.0.0.0/0:$SINET --enet-smac=$SETHER \
|
||||
-D 0.0.0.0/0:$DINET --enet-dmac=$DETHER \
|
||||
--infile "$f" \
|
||||
--outfile "$OUTFNAME"
|
||||
else
|
||||
tcprewrite -D 0.0.0.0/0:$DINET --enet-dmac=$DETHER \
|
||||
--infile "$f" \
|
||||
--outfile "$OUTFNAME"
|
||||
fi
|
||||
|
||||
done
|
||||
Reference in New Issue
Block a user