mirror of
https://gitlab.com/wgp/dougal/software.git
synced 2025-12-06 08:07:08 +00:00
Compare commits
48 Commits
215-flag-u
...
v3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
992205da4a | ||
|
|
f5e08c68af | ||
|
|
105fee0623 | ||
|
|
aff974c03f | ||
|
|
bada6dc2e2 | ||
|
|
d5aac5e84d | ||
|
|
3577a2ba4a | ||
|
|
04df9f41cc | ||
|
|
fdb5e0cbab | ||
|
|
4b832babfd | ||
|
|
cc3a9b4e5c | ||
|
|
da5a708760 | ||
|
|
9834e85eb9 | ||
|
|
e19601218a | ||
|
|
15c56d3f64 | ||
|
|
632dd1ee75 | ||
|
|
aeff5a491d | ||
|
|
9179c9332d | ||
|
|
bb5de9a00e | ||
|
|
d6b985fcd2 | ||
|
|
3ed8339aa3 | ||
|
|
1b925502bc | ||
|
|
7cea79a9be | ||
|
|
69f565f357 | ||
|
|
23de4d00d7 | ||
|
|
1992efe914 | ||
|
|
c7f3f565cd | ||
|
|
1da02738b0 | ||
|
|
732d8e9be6 | ||
|
|
a2bd614b17 | ||
|
|
003c833293 | ||
|
|
a4c458dc16 | ||
|
|
f7b6ca3f79 | ||
|
|
a7cce69c81 | ||
|
|
2b20a5d69f | ||
|
|
4fc5d1deda | ||
|
|
df13343063 | ||
|
|
a5603cf243 | ||
|
|
b6d4236325 | ||
|
|
7e8f00d9f2 | ||
|
|
721cfb36d1 | ||
|
|
222c951e49 | ||
|
|
45d2e56ed1 | ||
|
|
c5b6c87278 | ||
|
|
fd37e8b8d6 | ||
|
|
ce0310d0b0 | ||
|
|
546bc45861 | ||
|
|
602f2c0a34 |
28
bin/daily_tasks.py
Executable file
28
bin/daily_tasks.py
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
Do daily housekeeping on the database.
|
||||
|
||||
This is meant to run shortly after midnight every day.
|
||||
"""
|
||||
|
||||
import configuration
|
||||
from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
db.set_survey(survey["schema"])
|
||||
|
||||
print("Daily tasks")
|
||||
db.run_daily_tasks()
|
||||
|
||||
print("Done")
|
||||
@@ -412,7 +412,11 @@ class Datastore:
|
||||
qry = """
|
||||
INSERT INTO raw_lines (sequence, line, remarks, ntbp, incr, meta)
|
||||
VALUES (%s, %s, '', %s, %s, %s)
|
||||
ON CONFLICT DO NOTHING;
|
||||
ON CONFLICT (sequence) DO UPDATE SET
|
||||
line = EXCLUDED.line,
|
||||
ntbp = EXCLUDED.ntbp,
|
||||
incr = EXCLUDED.incr,
|
||||
meta = EXCLUDED.meta;
|
||||
"""
|
||||
|
||||
cursor.execute(qry, (fileinfo["sequence"], fileinfo["line"], ntbp, incr, json.dumps(fileinfo["meta"])))
|
||||
@@ -462,7 +466,9 @@ class Datastore:
|
||||
qry = """
|
||||
INSERT INTO final_lines (sequence, line, remarks, meta)
|
||||
VALUES (%s, %s, '', %s)
|
||||
ON CONFLICT DO NOTHING;
|
||||
ON CONFLICT (sequence) DO UPDATE SET
|
||||
line = EXCLUDED.line,
|
||||
meta = EXCLUDED.meta;
|
||||
"""
|
||||
|
||||
cursor.execute(qry, (fileinfo["sequence"], fileinfo["line"], json.dumps(fileinfo["meta"])))
|
||||
@@ -706,6 +712,27 @@ class Datastore:
|
||||
|
||||
qry = "CALL augment_event_data();"
|
||||
cur.execute(qry)
|
||||
|
||||
qry = "CALL scan_placeholders();"
|
||||
cur.execute(qry)
|
||||
|
||||
if cursor is None:
|
||||
self.maybe_commit()
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
# we assume that we are in the middle of a transaction
|
||||
|
||||
def run_daily_tasks(self, cursor = None):
|
||||
"""
|
||||
Run once-a-day tasks
|
||||
"""
|
||||
if cursor is None:
|
||||
cur = self.conn.cursor()
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
qry = "CALL log_midnight_shots();"
|
||||
cur.execute(qry)
|
||||
|
||||
if cursor is None:
|
||||
self.maybe_commit()
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
|
||||
@@ -20,7 +20,9 @@ if __name__ == '__main__':
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
db.set_survey(survey["schema"])
|
||||
|
||||
print("Planner adjustment")
|
||||
db.adjust_planner()
|
||||
print("Event log housekeeping")
|
||||
db.housekeep_event_log()
|
||||
|
||||
print("Done")
|
||||
|
||||
@@ -38,11 +38,11 @@ if __name__ == '__main__':
|
||||
|
||||
message = " ".join(args["remarks"])
|
||||
|
||||
print("new event:", schema, tstamp, message)
|
||||
print("new event:", schema, tstamp, message, args["label"])
|
||||
|
||||
if schema and tstamp and message:
|
||||
db.set_survey(schema)
|
||||
with db.conn.cursor() as cursor:
|
||||
qry = "INSERT INTO events_timed (tstamp, remarks) VALUES (%s, %s);"
|
||||
cursor.execute(qry, (tstamp, message))
|
||||
qry = "INSERT INTO event_log (tstamp, remarks, labels) VALUES (%s, replace_placeholders(%s, %s, NULL, NULL), %s);"
|
||||
cursor.execute(qry, (tstamp, message, tstamp, args["label"]))
|
||||
db.maybe_commit()
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
|
||||
DOUGAL_ROOT=${DOUGAL_ROOT:-$(dirname "$0")/..}
|
||||
|
||||
BINDIR="$DOUGAL_ROOT/bin"
|
||||
@@ -8,6 +9,20 @@ LOCKFILE=${LOCKFILE:-$VARDIR/runner.lock}
|
||||
|
||||
[ -f ~/.profile ] && . ~/.profile
|
||||
|
||||
DOUGAL_LOG_TAG="dougal.runner[$$]"
|
||||
|
||||
# Only send output to the logger if we have the appropriate
|
||||
# configuration set.
|
||||
if [[ -n "$DOUGAL_LOG_TAG" && -n "$DOUGAL_LOG_FACILITY" ]]; then
|
||||
function _logger () {
|
||||
logger $*
|
||||
}
|
||||
else
|
||||
function _logger () {
|
||||
: # This is the Bash null command
|
||||
}
|
||||
fi
|
||||
|
||||
function tstamp () {
|
||||
date -u +%Y-%m-%dT%H:%M:%SZ
|
||||
}
|
||||
@@ -18,26 +33,44 @@ function prefix () {
|
||||
|
||||
function print_log () {
|
||||
printf "$(prefix)\033[36m%s\033[0m\n" "$*"
|
||||
_logger -t "$DOUGAL_LOG_TAG" -p "$DOUGAL_LOG_FACILITY.info" "$*"
|
||||
}
|
||||
|
||||
function print_info () {
|
||||
printf "$(prefix)\033[0m%s\n" "$*"
|
||||
_logger -t "$DOUGAL_LOG_TAG" -p "$DOUGAL_LOG_FACILITY.debug" "$*"
|
||||
}
|
||||
|
||||
function print_warning () {
|
||||
printf "$(prefix)\033[33;1m%s\033[0m\n" "$*"
|
||||
_logger -t "$DOUGAL_LOG_TAG" -p "$DOUGAL_LOG_FACILITY.warning" "$*"
|
||||
}
|
||||
|
||||
function print_error () {
|
||||
printf "$(prefix)\033[31m%s\033[0m\n" "$*"
|
||||
_logger -t "$DOUGAL_LOG_TAG" -p "$DOUGAL_LOG_FACILITY.error" "$*"
|
||||
}
|
||||
|
||||
function run () {
|
||||
PROGNAME=$(basename "$1")
|
||||
PROGNAME=${PROGNAME:-$(basename "$1")}
|
||||
|
||||
STDOUTLOG="$VARDIR/$PROGNAME.out"
|
||||
STDERRLOG="$VARDIR/$PROGNAME.err"
|
||||
|
||||
"$1" >"$STDOUTLOG" 2>"$STDERRLOG" || {
|
||||
# What follows runs the command that we have been given (with any arguments passed)
|
||||
# and logs:
|
||||
# * stdout to $STDOUTLOG (a temporary file) and possibly to syslog, if enabled.
|
||||
# * stderr to $STDERRLOG (a temporary file) and possibly to syslog, if enabled.
|
||||
#
|
||||
# When logging to syslog, stdout goes as debug level and stderr as warning (not error)
|
||||
#
|
||||
# The temporary file is used in case the command fails, at which point we try to log
|
||||
# a warning in GitLab's alerts facility.
|
||||
|
||||
$* \
|
||||
> >(tee $STDOUTLOG |_logger -t "dougal.runner.$PROGNAME[$$]" -p "$DOUGAL_LOG_FACILITY.debug") \
|
||||
2> >(tee $STDERRLOG |_logger -t "dougal.runner.$PROGNAME[$$]" -p "$DOUGAL_LOG_FACILITY.warning") || {
|
||||
|
||||
print_error "Failed: $PROGNAME"
|
||||
cat $STDOUTLOG
|
||||
cat $STDERRLOG
|
||||
@@ -52,8 +85,8 @@ function run () {
|
||||
|
||||
exit 2
|
||||
}
|
||||
# cat $STDOUTLOG
|
||||
|
||||
unset PROGNAME
|
||||
rm $STDOUTLOG $STDERRLOG
|
||||
}
|
||||
|
||||
@@ -117,13 +150,13 @@ run $BINDIR/import_smsrc.py
|
||||
print_log "Process ASAQC queue"
|
||||
# Run insecure in test mode:
|
||||
# export NODE_TLS_REJECT_UNAUTHORIZED=0
|
||||
run $DOUGAL_ROOT/lib/www/server/queues/asaqc/index.js
|
||||
PROGNAME=asaqc_queue run $DOUGAL_ROOT/lib/www/server/queues/asaqc/index.js
|
||||
|
||||
print_log "Run database housekeeping actions"
|
||||
run $BINDIR/housekeep_database.py
|
||||
|
||||
print_log "Run QCs"
|
||||
run $DOUGAL_ROOT/lib/www/server/lib/qc/index.js
|
||||
PROGNAME=run_qc run $DOUGAL_ROOT/lib/www/server/lib/qc/index.js
|
||||
|
||||
|
||||
rm "$LOCKFILE"
|
||||
|
||||
@@ -20,7 +20,7 @@ SET row_security = off;
|
||||
-- Name: dougal; Type: DATABASE; Schema: -; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE DATABASE dougal WITH TEMPLATE = template0 ENCODING = 'UTF8' LOCALE = 'en_GB.UTF-8';
|
||||
CREATE DATABASE dougal WITH TEMPLATE = template0 ENCODING = 'UTF8' LC_COLLATE = 'C' LC_CTYPE = 'en_GB.UTF-8';
|
||||
|
||||
|
||||
ALTER DATABASE dougal OWNER TO postgres;
|
||||
@@ -144,6 +144,107 @@ CREATE TYPE public.queue_item_status AS ENUM (
|
||||
|
||||
ALTER TYPE public.queue_item_status OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: event_meta(timestamp with time zone); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.event_meta(tstamp timestamp with time zone) RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_meta(tstamp, NULL, NULL);
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.event_meta(tstamp timestamp with time zone) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION event_meta(tstamp timestamp with time zone); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.event_meta(tstamp timestamp with time zone) IS 'Overload of event_meta (timestamptz, integer, integer) for use when searching by timestamp.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: event_meta(integer, integer); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.event_meta(sequence integer, point integer) RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_meta(NULL, sequence, point);
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.event_meta(sequence integer, point integer) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION event_meta(sequence integer, point integer); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.event_meta(sequence integer, point integer) IS 'Overload of event_meta (timestamptz, integer, integer) for use when searching by sequence / point.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: event_meta(timestamp with time zone, integer, integer); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.event_meta(tstamp timestamp with time zone, sequence integer, point integer) RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
result jsonb;
|
||||
-- Tolerance is hard-coded, at least until a need to expose arises.
|
||||
tolerance numeric;
|
||||
BEGIN
|
||||
tolerance := 3; -- seconds
|
||||
|
||||
-- We search by timestamp if we can, as that's a lot quicker
|
||||
IF tstamp IS NOT NULL THEN
|
||||
|
||||
SELECT meta
|
||||
INTO result
|
||||
FROM real_time_inputs rti
|
||||
WHERE
|
||||
rti.tstamp BETWEEN (event_meta.tstamp - tolerance * interval '1 second') AND (event_meta.tstamp + tolerance * interval '1 second')
|
||||
ORDER BY abs(extract('epoch' FROM rti.tstamp - event_meta.tstamp ))
|
||||
LIMIT 1;
|
||||
|
||||
ELSE
|
||||
|
||||
SELECT meta
|
||||
INTO result
|
||||
FROM real_time_inputs rti
|
||||
WHERE
|
||||
(meta->>'_sequence')::integer = event_meta.sequence AND
|
||||
(meta->>'_point')::integer = event_meta.point
|
||||
ORDER BY rti.tstamp DESC
|
||||
LIMIT 1;
|
||||
|
||||
END IF;
|
||||
|
||||
RETURN result;
|
||||
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.event_meta(tstamp timestamp with time zone, sequence integer, point integer) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION event_meta(tstamp timestamp with time zone, sequence integer, point integer); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.event_meta(tstamp timestamp with time zone, sequence integer, point integer) IS 'Return the real-time event metadata associated with a sequence / point in the current project or
|
||||
with a given timestamp. Timestamp that is first searched for in the shot tables
|
||||
of the current prospect or, if not found, in the real-time data.
|
||||
|
||||
Returns a JSONB object.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: geometry_from_tstamp(timestamp with time zone, numeric); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
@@ -153,12 +254,12 @@ CREATE FUNCTION public.geometry_from_tstamp(ts timestamp with time zone, toleran
|
||||
AS $$
|
||||
SELECT
|
||||
geometry,
|
||||
extract('epoch' FROM (meta->>'tstamp')::timestamptz - ts ) AS delta
|
||||
extract('epoch' FROM tstamp - ts ) AS delta
|
||||
FROM real_time_inputs
|
||||
WHERE
|
||||
geometry IS NOT NULL AND
|
||||
abs(extract('epoch' FROM (meta->>'tstamp')::timestamptz - ts )) < tolerance
|
||||
ORDER BY abs(extract('epoch' FROM (meta->>'tstamp')::timestamptz - ts ))
|
||||
tstamp BETWEEN (ts - tolerance * interval '1 second') AND (ts + tolerance * interval '1 second')
|
||||
ORDER BY abs(extract('epoch' FROM tstamp - ts ))
|
||||
LIMIT 1;
|
||||
$$;
|
||||
|
||||
@@ -172,6 +273,78 @@ ALTER FUNCTION public.geometry_from_tstamp(ts timestamp with time zone, toleranc
|
||||
COMMENT ON FUNCTION public.geometry_from_tstamp(ts timestamp with time zone, tolerance numeric, OUT geometry public.geometry, OUT delta numeric) IS 'Get geometry from timestamp';
|
||||
|
||||
|
||||
--
|
||||
-- Name: interpolate_geometry_from_tstamp(timestamp with time zone, numeric); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.interpolate_geometry_from_tstamp(ts timestamp with time zone, maxspan numeric) RETURNS public.geometry
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
ts0 timestamptz;
|
||||
ts1 timestamptz;
|
||||
geom0 geometry;
|
||||
geom1 geometry;
|
||||
span numeric;
|
||||
fraction numeric;
|
||||
BEGIN
|
||||
|
||||
SELECT tstamp, geometry
|
||||
INTO ts0, geom0
|
||||
FROM real_time_inputs
|
||||
WHERE tstamp <= ts
|
||||
ORDER BY tstamp DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT tstamp, geometry
|
||||
INTO ts1, geom1
|
||||
FROM real_time_inputs
|
||||
WHERE tstamp >= ts
|
||||
ORDER BY tstamp ASC
|
||||
LIMIT 1;
|
||||
|
||||
IF geom0 IS NULL OR geom1 IS NULL THEN
|
||||
RAISE NOTICE 'Interpolation failed (no straddling data)';
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
-- See if we got an exact match
|
||||
IF ts0 = ts THEN
|
||||
RETURN geom0;
|
||||
ELSIF ts1 = ts THEN
|
||||
RETURN geom1;
|
||||
END IF;
|
||||
|
||||
span := extract('epoch' FROM ts1 - ts0);
|
||||
|
||||
IF span > maxspan THEN
|
||||
RAISE NOTICE 'Interpolation timespan % outside maximum requested (%)', span, maxspan;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
fraction := extract('epoch' FROM ts - ts0) / span;
|
||||
|
||||
IF fraction < 0 OR fraction > 1 THEN
|
||||
RAISE NOTICE 'Requested timestamp % outside of interpolation span (fraction: %)', ts, fraction;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
RETURN ST_LineInterpolatePoint(St_MakeLine(geom0, geom1), fraction);
|
||||
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION public.interpolate_geometry_from_tstamp(ts timestamp with time zone, maxspan numeric) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION interpolate_geometry_from_tstamp(ts timestamp with time zone, maxspan numeric); Type: COMMENT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION public.interpolate_geometry_from_tstamp(ts timestamp with time zone, maxspan numeric) IS 'Interpolate a position over a given maximum timespan (in seconds)
|
||||
based on real-time inputs. Returns a POINT geometry.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: notify(); Type: FUNCTION; Schema: public; Owner: postgres
|
||||
--
|
||||
@@ -428,13 +601,6 @@ ALTER TABLE ONLY public.queue_items
|
||||
ADD CONSTRAINT queue_items_pkey PRIMARY KEY (item_id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: meta_tstamp_idx; Type: INDEX; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE INDEX meta_tstamp_idx ON public.real_time_inputs USING btree (((meta ->> 'tstamp'::text)) DESC);
|
||||
|
||||
|
||||
--
|
||||
-- Name: tstamp_idx; Type: INDEX; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.5"}')
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.12"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.5"}' WHERE public.info.key = 'version';
|
||||
SET value = public.info.value || '{"db_schema": "0.3.12"}' WHERE public.info.key = 'version';
|
||||
|
||||
@@ -70,174 +70,174 @@ If the path matches that of an existing entry, delete that entry (which cascades
|
||||
CREATE PROCEDURE _SURVEY__TEMPLATE_.adjust_planner()
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
_planner_config jsonb;
|
||||
_planned_line planned_lines%ROWTYPE;
|
||||
_lag interval;
|
||||
_last_sequence sequences_summary%ROWTYPE;
|
||||
_deltatime interval;
|
||||
_shotinterval interval;
|
||||
_tstamp timestamptz;
|
||||
_incr integer;
|
||||
BEGIN
|
||||
DECLARE
|
||||
_planner_config jsonb;
|
||||
_planned_line planned_lines%ROWTYPE;
|
||||
_lag interval;
|
||||
_last_sequence sequences_summary%ROWTYPE;
|
||||
_deltatime interval;
|
||||
_shotinterval interval;
|
||||
_tstamp timestamptz;
|
||||
_incr integer;
|
||||
BEGIN
|
||||
|
||||
SET CONSTRAINTS planned_lines_pkey DEFERRED;
|
||||
SET CONSTRAINTS planned_lines_pkey DEFERRED;
|
||||
|
||||
SELECT data->'planner'
|
||||
INTO _planner_config
|
||||
FROM file_data
|
||||
WHERE data ? 'planner';
|
||||
SELECT data->'planner'
|
||||
INTO _planner_config
|
||||
FROM file_data
|
||||
WHERE data ? 'planner';
|
||||
|
||||
SELECT *
|
||||
INTO _last_sequence
|
||||
FROM sequences_summary
|
||||
ORDER BY sequence DESC
|
||||
LIMIT 1;
|
||||
SELECT *
|
||||
INTO _last_sequence
|
||||
FROM sequences_summary
|
||||
ORDER BY sequence DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT *
|
||||
INTO _planned_line
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
SELECT *
|
||||
INTO _planned_line
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
SELECT
|
||||
COALESCE(
|
||||
((lead(ts0) OVER (ORDER BY sequence)) - ts1),
|
||||
make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer)
|
||||
)
|
||||
INTO _lag
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
SELECT
|
||||
COALESCE(
|
||||
((lead(ts0) OVER (ORDER BY sequence)) - ts1),
|
||||
make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer)
|
||||
)
|
||||
INTO _lag
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
_incr = sign(_last_sequence.lsp - _last_sequence.fsp);
|
||||
_incr = sign(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE '_planner_config: %', _planner_config;
|
||||
RAISE NOTICE '_last_sequence: %', _last_sequence;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
RAISE NOTICE '_incr: %', _incr;
|
||||
RAISE NOTICE '_planner_config: %', _planner_config;
|
||||
RAISE NOTICE '_last_sequence: %', _last_sequence;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
RAISE NOTICE '_incr: %', _incr;
|
||||
|
||||
-- Does the latest sequence match a planned sequence?
|
||||
IF _planned_line IS NULL THEN -- No it doesn't
|
||||
RAISE NOTICE 'Latest sequence shot does not match a planned sequence';
|
||||
SELECT * INTO _planned_line FROM planned_lines ORDER BY sequence ASC LIMIT 1;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
-- Does the latest sequence match a planned sequence?
|
||||
IF _planned_line IS NULL THEN -- No it doesn't
|
||||
RAISE NOTICE 'Latest sequence shot does not match a planned sequence';
|
||||
SELECT * INTO _planned_line FROM planned_lines ORDER BY sequence ASC LIMIT 1;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
|
||||
IF _planned_line.sequence <= _last_sequence.sequence THEN
|
||||
RAISE NOTICE 'Renumbering the planned sequences starting from %', _planned_line.sequence + 1;
|
||||
-- Renumber the planned sequences starting from last shot sequence number + 1
|
||||
UPDATE planned_lines
|
||||
SET sequence = sequence + _last_sequence.sequence - _planned_line.sequence + 1;
|
||||
END IF;
|
||||
IF _planned_line.sequence <= _last_sequence.sequence THEN
|
||||
RAISE NOTICE 'Renumbering the planned sequences starting from %', _planned_line.sequence + 1;
|
||||
-- Renumber the planned sequences starting from last shot sequence number + 1
|
||||
UPDATE planned_lines
|
||||
SET sequence = sequence + _last_sequence.sequence - _planned_line.sequence + 1;
|
||||
END IF;
|
||||
|
||||
-- The correction to make to the first planned line's ts0 will be based on either the last
|
||||
-- sequence's EOL + default line change time or the current time, whichever is later.
|
||||
_deltatime := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1) + make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer), current_timestamp) - _planned_line.ts0;
|
||||
-- The correction to make to the first planned line's ts0 will be based on either the last
|
||||
-- sequence's EOL + default line change time or the current time, whichever is later.
|
||||
_deltatime := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1) + make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer), current_timestamp) - _planned_line.ts0;
|
||||
|
||||
-- Is the first of the planned lines start time in the past? (±5 mins)
|
||||
IF _planned_line.ts0 < (current_timestamp - make_interval(mins => 5)) THEN
|
||||
RAISE NOTICE 'First planned line is in the past. Adjusting times by %', _deltatime;
|
||||
-- Adjust the start / end time of the planned lines by assuming that we are at
|
||||
-- `defaultLineChangeDuration` minutes away from SOL of the first planned line.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime;
|
||||
END IF;
|
||||
-- Is the first of the planned lines start time in the past? (±5 mins)
|
||||
IF _planned_line.ts0 < (current_timestamp - make_interval(mins => 5)) THEN
|
||||
RAISE NOTICE 'First planned line is in the past. Adjusting times by %', _deltatime;
|
||||
-- Adjust the start / end time of the planned lines by assuming that we are at
|
||||
-- `defaultLineChangeDuration` minutes away from SOL of the first planned line.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime;
|
||||
END IF;
|
||||
|
||||
ELSE -- Yes it does
|
||||
RAISE NOTICE 'Latest sequence does match a planned sequence: %, %', _planned_line.sequence, _planned_line.line;
|
||||
ELSE -- Yes it does
|
||||
RAISE NOTICE 'Latest sequence does match a planned sequence: %, %', _planned_line.sequence, _planned_line.line;
|
||||
|
||||
-- Is it online?
|
||||
IF EXISTS(SELECT 1 FROM raw_lines_files WHERE sequence = _last_sequence.sequence AND hash = '*online*') THEN
|
||||
-- Yes it is
|
||||
RAISE NOTICE 'Sequence % is online', _last_sequence.sequence;
|
||||
-- Is it online?
|
||||
IF EXISTS(SELECT 1 FROM raw_lines_files WHERE sequence = _last_sequence.sequence AND hash = '*online*') THEN
|
||||
-- Yes it is
|
||||
RAISE NOTICE 'Sequence % is online', _last_sequence.sequence;
|
||||
|
||||
-- Let us get the SOL from the events log if we can
|
||||
RAISE NOTICE 'Trying to set fsp, ts0 from events log FSP, FGSP';
|
||||
WITH e AS (
|
||||
SELECT * FROM events
|
||||
WHERE
|
||||
sequence = _last_sequence.sequence
|
||||
AND ('FSP' = ANY(labels) OR 'FGSP' = ANY(labels))
|
||||
ORDER BY tstamp LIMIT 1
|
||||
)
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
fsp = COALESCE(e.point, fsp),
|
||||
ts0 = COALESCE(e.tstamp, ts0)
|
||||
FROM e
|
||||
WHERE planned_lines.sequence = _last_sequence.sequence;
|
||||
-- Let us get the SOL from the events log if we can
|
||||
RAISE NOTICE 'Trying to set fsp, ts0 from events log FSP, FGSP';
|
||||
WITH e AS (
|
||||
SELECT * FROM event_log
|
||||
WHERE
|
||||
sequence = _last_sequence.sequence
|
||||
AND ('FSP' = ANY(labels) OR 'FGSP' = ANY(labels))
|
||||
ORDER BY tstamp LIMIT 1
|
||||
)
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
fsp = COALESCE(e.point, fsp),
|
||||
ts0 = COALESCE(e.tstamp, ts0)
|
||||
FROM e
|
||||
WHERE planned_lines.sequence = _last_sequence.sequence;
|
||||
|
||||
-- Shot interval
|
||||
_shotinterval := (_last_sequence.ts1 - _last_sequence.ts0) / abs(_last_sequence.lsp - _last_sequence.fsp);
|
||||
-- Shot interval
|
||||
_shotinterval := (_last_sequence.ts1 - _last_sequence.ts0) / abs(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE 'Estimating EOL from current shot interval: %', _shotinterval;
|
||||
RAISE NOTICE 'Estimating EOL from current shot interval: %', _shotinterval;
|
||||
|
||||
SELECT (abs(lsp-fsp) * _shotinterval + ts0) - ts1
|
||||
INTO _deltatime
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
SELECT (abs(lsp-fsp) * _shotinterval + ts0) - ts1
|
||||
INTO _deltatime
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
---- Set ts1 for the current sequence
|
||||
--UPDATE planned_lines
|
||||
--SET
|
||||
--ts1 = (abs(lsp-fsp) * _shotinterval) + ts0
|
||||
--WHERE sequence = _last_sequence.sequence;
|
||||
---- Set ts1 for the current sequence
|
||||
--UPDATE planned_lines
|
||||
--SET
|
||||
--ts1 = (abs(lsp-fsp) * _shotinterval) + ts0
|
||||
--WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Adjustment is %', _deltatime;
|
||||
RAISE NOTICE 'Adjustment is %', _deltatime;
|
||||
|
||||
IF abs(EXTRACT(EPOCH FROM _deltatime)) < 8 THEN
|
||||
RAISE NOTICE 'Adjustment too small (< 8 s), so not applying it';
|
||||
RETURN;
|
||||
END IF;
|
||||
IF abs(EXTRACT(EPOCH FROM _deltatime)) < 8 THEN
|
||||
RAISE NOTICE 'Adjustment too small (< 8 s), so not applying it';
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- Adjust ts1 for the current sequence
|
||||
UPDATE planned_lines
|
||||
SET ts1 = ts1 + _deltatime
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
-- Adjust ts1 for the current sequence
|
||||
UPDATE planned_lines
|
||||
SET ts1 = ts1 + _deltatime
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
-- Now shift all sequences after
|
||||
UPDATE planned_lines
|
||||
SET ts0 = ts0 + _deltatime, ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _last_sequence.sequence;
|
||||
-- Now shift all sequences after
|
||||
UPDATE planned_lines
|
||||
SET ts0 = ts0 + _deltatime, ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences before %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence < _last_sequence.sequence;
|
||||
RAISE NOTICE 'Deleting planned sequences before %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence < _last_sequence.sequence;
|
||||
|
||||
ELSE
|
||||
-- No it isn't
|
||||
RAISE NOTICE 'Sequence % is offline', _last_sequence.sequence;
|
||||
ELSE
|
||||
-- No it isn't
|
||||
RAISE NOTICE 'Sequence % is offline', _last_sequence.sequence;
|
||||
|
||||
-- We were supposed to finish at _planned_line.ts1 but we finished at:
|
||||
_tstamp := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1), current_timestamp);
|
||||
-- WARNING Next line is for testing only
|
||||
--_tstamp := COALESCE(_last_sequence.ts1_final, _last_sequence.ts1);
|
||||
-- So we need to adjust timestamps by:
|
||||
_deltatime := _tstamp - _planned_line.ts1;
|
||||
-- We were supposed to finish at _planned_line.ts1 but we finished at:
|
||||
_tstamp := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1), current_timestamp);
|
||||
-- WARNING Next line is for testing only
|
||||
--_tstamp := COALESCE(_last_sequence.ts1_final, _last_sequence.ts1);
|
||||
-- So we need to adjust timestamps by:
|
||||
_deltatime := _tstamp - _planned_line.ts1;
|
||||
|
||||
RAISE NOTICE 'Planned end: %, actual end: % (%, %)', _planned_line.ts1, _tstamp, _planned_line.sequence, _last_sequence.sequence;
|
||||
RAISE NOTICE 'Shifting times by % for sequences > %', _deltatime, _planned_line.sequence;
|
||||
-- NOTE: This won't work if sequences are not, err… sequential.
|
||||
-- NOTE: This has been known to happen in 2020.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _planned_line.sequence;
|
||||
RAISE NOTICE 'Planned end: %, actual end: % (%, %)', _planned_line.ts1, _tstamp, _planned_line.sequence, _last_sequence.sequence;
|
||||
RAISE NOTICE 'Shifting times by % for sequences > %', _deltatime, _planned_line.sequence;
|
||||
-- NOTE: This won't work if sequences are not, err… sequential.
|
||||
-- NOTE: This has been known to happen in 2020.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _planned_line.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences up to %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence <= _last_sequence.sequence;
|
||||
RAISE NOTICE 'Deleting planned sequences up to %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence <= _last_sequence.sequence;
|
||||
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER PROCEDURE _SURVEY__TEMPLATE_.adjust_planner() OWNER TO postgres;
|
||||
@@ -279,6 +279,26 @@ ALTER FUNCTION _SURVEY__TEMPLATE_.assoc_tstamp() OWNER TO postgres;
|
||||
--
|
||||
|
||||
CREATE PROCEDURE _SURVEY__TEMPLATE_.augment_event_data()
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
CALL augment_event_data(600);
|
||||
$$;
|
||||
|
||||
|
||||
ALTER PROCEDURE _SURVEY__TEMPLATE_.augment_event_data() OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: PROCEDURE augment_event_data(); Type: COMMENT; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON PROCEDURE _SURVEY__TEMPLATE_.augment_event_data() IS 'Overload of augment_event_data(maxspan numeric) with a maxspan value of 600 seconds.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: augment_event_data(numeric); Type: PROCEDURE; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE PROCEDURE _SURVEY__TEMPLATE_.augment_event_data(IN maxspan numeric)
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
-- Populate the timestamp of sequence / point events
|
||||
@@ -308,7 +328,7 @@ CREATE PROCEDURE _SURVEY__TEMPLATE_.augment_event_data()
|
||||
SET
|
||||
meta = meta || jsonb_build_object('geometry',
|
||||
(SELECT st_transform(g.geometry, 4326)::jsonb
|
||||
FROM geometry_from_tstamp(e.tstamp, 3) g))
|
||||
FROM interpolate_geometry_from_tstamp(e.tstamp, maxspan) g))
|
||||
WHERE
|
||||
tstamp IS NOT NULL AND
|
||||
sequence IS NULL AND point IS NULL AND
|
||||
@@ -331,13 +351,13 @@ CREATE PROCEDURE _SURVEY__TEMPLATE_.augment_event_data()
|
||||
$$;
|
||||
|
||||
|
||||
ALTER PROCEDURE _SURVEY__TEMPLATE_.augment_event_data() OWNER TO postgres;
|
||||
ALTER PROCEDURE _SURVEY__TEMPLATE_.augment_event_data(IN maxspan numeric) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: PROCEDURE augment_event_data(); Type: COMMENT; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
-- Name: PROCEDURE augment_event_data(IN maxspan numeric); Type: COMMENT; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON PROCEDURE _SURVEY__TEMPLATE_.augment_event_data() IS 'Populate missing timestamps and geometries in event_log_full';
|
||||
COMMENT ON PROCEDURE _SURVEY__TEMPLATE_.augment_event_data(IN maxspan numeric) IS 'Populate missing timestamps and geometries in event_log_full';
|
||||
|
||||
|
||||
--
|
||||
@@ -496,6 +516,144 @@ CREATE FUNCTION _SURVEY__TEMPLATE_.event_log_update() RETURNS trigger
|
||||
|
||||
ALTER FUNCTION _SURVEY__TEMPLATE_.event_log_update() OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: event_position(timestamp with time zone); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION _SURVEY__TEMPLATE_.event_position(tstamp timestamp with time zone) RETURNS public.geometry
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_position(tstamp, NULL, NULL);
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION _SURVEY__TEMPLATE_.event_position(tstamp timestamp with time zone) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION event_position(tstamp timestamp with time zone); Type: COMMENT; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION _SURVEY__TEMPLATE_.event_position(tstamp timestamp with time zone) IS 'Overload of event_position (timestamptz, integer, integer) for use when searching by timestamp.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: event_position(integer, integer); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION _SURVEY__TEMPLATE_.event_position(sequence integer, point integer) RETURNS public.geometry
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_position(NULL, sequence, point);
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION _SURVEY__TEMPLATE_.event_position(sequence integer, point integer) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION event_position(sequence integer, point integer); Type: COMMENT; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION _SURVEY__TEMPLATE_.event_position(sequence integer, point integer) IS 'Overload of event_position (timestamptz, integer, integer) for use when searching by sequence / point.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: event_position(timestamp with time zone, integer, integer); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION _SURVEY__TEMPLATE_.event_position(tstamp timestamp with time zone, sequence integer, point integer) RETURNS public.geometry
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_position(tstamp, sequence, point, 3);
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION _SURVEY__TEMPLATE_.event_position(tstamp timestamp with time zone, sequence integer, point integer) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION event_position(tstamp timestamp with time zone, sequence integer, point integer); Type: COMMENT; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION _SURVEY__TEMPLATE_.event_position(tstamp timestamp with time zone, sequence integer, point integer) IS 'Overload of event_position with a default tolerance of three seconds.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: event_position(timestamp with time zone, integer, integer, numeric); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION _SURVEY__TEMPLATE_.event_position(tstamp timestamp with time zone, sequence integer, point integer, tolerance numeric) RETURNS public.geometry
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
position geometry;
|
||||
BEGIN
|
||||
|
||||
-- Try and get position by sequence / point first
|
||||
IF sequence IS NOT NULL AND point IS NOT NULL THEN
|
||||
-- Try and get the position from final_shots or raw_shots
|
||||
SELECT COALESCE(f.geometry, r.geometry) geometry
|
||||
INTO position
|
||||
FROM raw_shots r LEFT JOIN final_shots f USING (sequence, point)
|
||||
WHERE r.sequence = event_position.sequence AND r.point = event_position.point;
|
||||
|
||||
IF position IS NOT NULL THEN
|
||||
RETURN position;
|
||||
ELSIF tstamp IS NULL THEN
|
||||
-- Get the timestamp for the sequence / point, if we can.
|
||||
-- It will be used later in the function as we fall back
|
||||
-- to timestamp based search.
|
||||
-- We also adjust the tolerance as we're now dealing with
|
||||
-- an exact timestamp.
|
||||
SELECT COALESCE(f.tstamp, r.tstamp) tstamp, 0.002 tolerance
|
||||
INTO tstamp, tolerance
|
||||
FROM raw_shots r LEFT JOIN final_shots f USING (sequence, point)
|
||||
WHERE r.sequence = event_position.sequence AND r.point = event_position.point;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- If we got here, we better have a timestamp
|
||||
-- First attempt, get a position from final_shots, raw_shots. This may
|
||||
-- be redundant if we got here from the position of having a sequence /
|
||||
-- point without a position, but never mind.
|
||||
SELECT COALESCE(f.geometry, r.geometry) geometry
|
||||
INTO position
|
||||
FROM raw_shots r LEFT JOIN final_shots f USING (sequence, point)
|
||||
WHERE r.tstamp = event_position.tstamp OR f.tstamp = event_position.tstamp
|
||||
LIMIT 1; -- Just to be sure
|
||||
|
||||
IF position IS NULL THEN
|
||||
-- Ok, so everything else so far has failed, let's try and get this
|
||||
-- from real time data. We skip the search via sequence / point and
|
||||
-- go directly for timestamp.
|
||||
SELECT geometry
|
||||
INTO position
|
||||
FROM geometry_from_tstamp(tstamp, tolerance);
|
||||
END IF;
|
||||
|
||||
RETURN position;
|
||||
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION _SURVEY__TEMPLATE_.event_position(tstamp timestamp with time zone, sequence integer, point integer, tolerance numeric) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION event_position(tstamp timestamp with time zone, sequence integer, point integer, tolerance numeric); Type: COMMENT; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION _SURVEY__TEMPLATE_.event_position(tstamp timestamp with time zone, sequence integer, point integer, tolerance numeric) IS 'Return the position associated with a sequence / point in the current project or
|
||||
with a given timestamp. Timestamp that is first searched for in the shot tables
|
||||
of the current prospect or, if not found, in the real-time data.
|
||||
|
||||
Returns a geometry.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: events_seq_labels_single(); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
@@ -513,7 +671,7 @@ BEGIN
|
||||
id <> NEW.id
|
||||
AND label = NEW.label
|
||||
AND id IN (SELECT id FROM events_seq WHERE sequence = _sequence);
|
||||
|
||||
|
||||
DELETE
|
||||
FROM events_timed_labels
|
||||
WHERE
|
||||
@@ -611,15 +769,15 @@ ALTER PROCEDURE _SURVEY__TEMPLATE_.events_timed_seq_update_all() OWNER TO postgr
|
||||
CREATE PROCEDURE _SURVEY__TEMPLATE_.final_line_post_import(IN _seq integer)
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
BEGIN
|
||||
|
||||
CALL handle_final_line_events(_seq, 'FSP', 'fsp');
|
||||
CALL handle_final_line_events(_seq, 'FGSP', 'fsp');
|
||||
CALL handle_final_line_events(_seq, 'LGSP', 'lsp');
|
||||
CALL handle_final_line_events(_seq, 'LSP', 'lsp');
|
||||
CALL handle_final_line_events(_seq, 'FSP', 'fsp');
|
||||
CALL handle_final_line_events(_seq, 'FGSP', 'fsp');
|
||||
CALL handle_final_line_events(_seq, 'LGSP', 'lsp');
|
||||
CALL handle_final_line_events(_seq, 'LSP', 'lsp');
|
||||
|
||||
END;
|
||||
$$;
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER PROCEDURE _SURVEY__TEMPLATE_.final_line_post_import(IN _seq integer) OWNER TO postgres;
|
||||
@@ -632,56 +790,56 @@ CREATE PROCEDURE _SURVEY__TEMPLATE_.handle_final_line_events(IN _seq integer, IN
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
|
||||
DECLARE
|
||||
_line final_lines_summary%ROWTYPE;
|
||||
_column_value integer;
|
||||
_tg_name text := 'final_line';
|
||||
_event event_log%ROWTYPE;
|
||||
event_id integer;
|
||||
BEGIN
|
||||
DECLARE
|
||||
_line final_lines_summary%ROWTYPE;
|
||||
_column_value integer;
|
||||
_tg_name text := 'final_line';
|
||||
_event event_log%ROWTYPE;
|
||||
event_id integer;
|
||||
BEGIN
|
||||
|
||||
SELECT * INTO _line FROM final_lines_summary WHERE sequence = _seq;
|
||||
_event := label_in_sequence(_seq, _label);
|
||||
_column_value := row_to_json(_line)->>_column;
|
||||
SELECT * INTO _line FROM final_lines_summary WHERE sequence = _seq;
|
||||
_event := label_in_sequence(_seq, _label);
|
||||
_column_value := row_to_json(_line)->>_column;
|
||||
|
||||
--RAISE NOTICE '% is %', _label, _event;
|
||||
--RAISE NOTICE 'Line is %', _line;
|
||||
--RAISE NOTICE '% is % (%)', _column, _column_value, _label;
|
||||
--RAISE NOTICE '% is %', _label, _event;
|
||||
--RAISE NOTICE 'Line is %', _line;
|
||||
--RAISE NOTICE '% is % (%)', _column, _column_value, _label;
|
||||
|
||||
IF _event IS NULL THEN
|
||||
--RAISE NOTICE 'We will populate the event log from the sequence data';
|
||||
IF _event IS NULL THEN
|
||||
--RAISE NOTICE 'We will populate the event log from the sequence data';
|
||||
|
||||
INSERT INTO event_log (sequence, point, remarks, labels, meta)
|
||||
VALUES (
|
||||
-- The sequence
|
||||
_seq,
|
||||
-- The shotpoint
|
||||
_column_value,
|
||||
-- Remark. Something like "FSP <linename>"
|
||||
format('%s %s', _label, (SELECT meta->>'lineName' FROM final_lines WHERE sequence = _seq)),
|
||||
-- Label
|
||||
ARRAY[_label],
|
||||
-- Meta. Something like {"auto" : {"FSP" : "final_line"}}
|
||||
json_build_object('auto', json_build_object(_label, _tg_name))
|
||||
);
|
||||
INSERT INTO event_log (sequence, point, remarks, labels, meta)
|
||||
VALUES (
|
||||
-- The sequence
|
||||
_seq,
|
||||
-- The shotpoint
|
||||
_column_value,
|
||||
-- Remark. Something like "FSP <linename>"
|
||||
format('%s %s', _label, (SELECT meta->>'lineName' FROM final_lines WHERE sequence = _seq)),
|
||||
-- Label
|
||||
ARRAY[_label],
|
||||
-- Meta. Something like {"auto" : {"FSP" : "final_line"}}
|
||||
json_build_object('auto', json_build_object(_label, _tg_name))
|
||||
);
|
||||
|
||||
ELSE
|
||||
--RAISE NOTICE 'We may populate the sequence meta from the event log';
|
||||
--RAISE NOTICE 'Unless the event log was populated by us previously';
|
||||
--RAISE NOTICE 'Populated by us previously? %', _event.meta->'auto'->>_label = _tg_name;
|
||||
ELSE
|
||||
--RAISE NOTICE 'We may populate the sequence meta from the event log';
|
||||
--RAISE NOTICE 'Unless the event log was populated by us previously';
|
||||
--RAISE NOTICE 'Populated by us previously? %', _event.meta->'auto'->>_label = _tg_name;
|
||||
|
||||
IF _event.meta->'auto'->>_label IS DISTINCT FROM _tg_name THEN
|
||||
IF _event.meta->'auto'->>_label IS DISTINCT FROM _tg_name THEN
|
||||
|
||||
--RAISE NOTICE 'Adding % found in events log to final_line meta', _label;
|
||||
UPDATE final_lines
|
||||
SET meta = jsonb_set(meta, ARRAY[_label], to_jsonb(_event.point))
|
||||
WHERE sequence = _seq;
|
||||
--RAISE NOTICE 'Adding % found in events log to final_line meta', _label;
|
||||
UPDATE final_lines
|
||||
SET meta = jsonb_set(meta, ARRAY[_label], to_jsonb(_event.point))
|
||||
WHERE sequence = _seq;
|
||||
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER PROCEDURE _SURVEY__TEMPLATE_.handle_final_line_events(IN _seq integer, IN _label text, IN _column text) OWNER TO postgres;
|
||||
@@ -696,7 +854,7 @@ CREATE FUNCTION _SURVEY__TEMPLATE_.ij_error(line double precision, point double
|
||||
DECLARE
|
||||
bp jsonb := binning_parameters();
|
||||
ij public.geometry := to_binning_grid(geom, bp);
|
||||
|
||||
|
||||
theta numeric := (bp->>'theta')::numeric * pi() / 180;
|
||||
I_inc numeric DEFAULT 1;
|
||||
J_inc numeric DEFAULT 1;
|
||||
@@ -711,13 +869,13 @@ DECLARE
|
||||
yoff numeric := (bp->'origin'->>'J')::numeric;
|
||||
E0 numeric := (bp->'origin'->>'easting')::numeric;
|
||||
N0 numeric := (bp->'origin'->>'northing')::numeric;
|
||||
|
||||
|
||||
error_i double precision;
|
||||
error_j double precision;
|
||||
BEGIN
|
||||
error_i := (public.st_x(ij) - line) * I_width;
|
||||
error_j := (public.st_y(ij) - point) * J_width;
|
||||
|
||||
|
||||
RETURN public.ST_MakePoint(error_i, error_j);
|
||||
END
|
||||
$$;
|
||||
@@ -792,12 +950,258 @@ ALTER TABLE _SURVEY__TEMPLATE_.event_log OWNER TO postgres;
|
||||
CREATE FUNCTION _SURVEY__TEMPLATE_.label_in_sequence(_sequence integer, _label text) RETURNS _SURVEY__TEMPLATE_.event_log
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
SELECT * FROM event_log WHERE sequence = _sequence AND _label = ANY(labels);
|
||||
$$;
|
||||
SELECT * FROM event_log WHERE sequence = _sequence AND _label = ANY(labels);
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION _SURVEY__TEMPLATE_.label_in_sequence(_sequence integer, _label text) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: log_midnight_shots(); Type: PROCEDURE; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE PROCEDURE _SURVEY__TEMPLATE_.log_midnight_shots()
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
CALL log_midnight_shots(NULL, NULL);
|
||||
$$;
|
||||
|
||||
|
||||
ALTER PROCEDURE _SURVEY__TEMPLATE_.log_midnight_shots() OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: PROCEDURE log_midnight_shots(); Type: COMMENT; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON PROCEDURE _SURVEY__TEMPLATE_.log_midnight_shots() IS 'Overload taking no arguments (adds all missing events).';
|
||||
|
||||
|
||||
--
|
||||
-- Name: log_midnight_shots(date); Type: PROCEDURE; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE PROCEDURE _SURVEY__TEMPLATE_.log_midnight_shots(IN dt0 date)
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
CALL log_midnight_shots(dt0, NULL);
|
||||
$$;
|
||||
|
||||
|
||||
ALTER PROCEDURE _SURVEY__TEMPLATE_.log_midnight_shots(IN dt0 date) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: PROCEDURE log_midnight_shots(IN dt0 date); Type: COMMENT; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON PROCEDURE _SURVEY__TEMPLATE_.log_midnight_shots(IN dt0 date) IS 'Overload taking only a dt0 (adds events on that date or after).';
|
||||
|
||||
|
||||
--
|
||||
-- Name: log_midnight_shots(date, date); Type: PROCEDURE; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE PROCEDURE _SURVEY__TEMPLATE_.log_midnight_shots(IN dt0 date, IN dt1 date)
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
INSERT INTO event_log (sequence, point, remarks, labels, meta)
|
||||
SELECT
|
||||
sequence, point, remarks, labels,
|
||||
'{"auto": true, "insertedBy": "log_midnight_shots"}'::jsonb
|
||||
FROM midnight_shots ms
|
||||
WHERE
|
||||
(dt0 IS NULL OR ms.tstamp >= dt0) AND
|
||||
(dt1 IS NULL OR ms.tstamp <= dt1) AND
|
||||
NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM event_log el
|
||||
WHERE ms.sequence = el.sequence AND ms.point = el.point AND el.labels @> ms.labels
|
||||
);
|
||||
|
||||
-- Delete any midnight shots that might have been inserted in the log
|
||||
-- but are no longer relevant according to the final_shots data.
|
||||
-- We operate on event_log, so the deletion is traceable.
|
||||
DELETE
|
||||
FROM event_log
|
||||
WHERE id IN (
|
||||
SELECT id
|
||||
FROM event_log el
|
||||
LEFT JOIN midnight_shots ms USING (sequence, point)
|
||||
WHERE
|
||||
'{LDSP,FDSP}'::text[] && el.labels -- &&: Do the arrays overlap?
|
||||
AND ms.sequence IS NULL
|
||||
);
|
||||
$$;
|
||||
|
||||
|
||||
ALTER PROCEDURE _SURVEY__TEMPLATE_.log_midnight_shots(IN dt0 date, IN dt1 date) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: PROCEDURE log_midnight_shots(IN dt0 date, IN dt1 date); Type: COMMENT; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON PROCEDURE _SURVEY__TEMPLATE_.log_midnight_shots(IN dt0 date, IN dt1 date) IS 'Add midnight shots between two dates dt0 and dt1 to the event_log, unless the events already exist.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: replace_placeholders(text, timestamp with time zone, integer, integer); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION _SURVEY__TEMPLATE_.replace_placeholders(text_in text, tstamp timestamp with time zone, sequence integer, point integer) RETURNS text
|
||||
LANGUAGE plpgsql
|
||||
AS $_$
|
||||
DECLARE
|
||||
position geometry;
|
||||
metadata jsonb;
|
||||
text_out text;
|
||||
|
||||
json_query text;
|
||||
json_result jsonb;
|
||||
expect_recursion boolean := false;
|
||||
BEGIN
|
||||
|
||||
text_out := text_in;
|
||||
|
||||
-- We only get a position if we are going to need it…
|
||||
IF regexp_match(text_out, '@DMS@|@POS@|@DEG@') IS NOT NULL THEN
|
||||
position := ST_Transform(event_position(tstamp, sequence, point), 4326);
|
||||
END IF;
|
||||
|
||||
-- …and likewise with the metadata.
|
||||
IF regexp_match(text_out, '@BSP@|@WD@|@CMG@|@EN@|@GRID@|@(\$\..*?)@@') IS NOT NULL THEN
|
||||
metadata := event_meta(tstamp, sequence, point);
|
||||
END IF;
|
||||
|
||||
-- We shortcut the evaluation if neither of the above regexps matched
|
||||
IF position IS NULL AND metadata IS NULL THEN
|
||||
RETURN text_out;
|
||||
END IF;
|
||||
|
||||
IF position('@DMS@' IN text_out) != 0 THEN
|
||||
text_out := replace(text_out, '@DMS@', ST_AsLatLonText(position));
|
||||
END IF;
|
||||
|
||||
IF position('@POS@' IN text_out) != 0 THEN
|
||||
text_out := replace(text_out, '@POS@', replace(ST_AsLatLonText(position, 'D.DDDDDD'), ' ', ', '));
|
||||
END IF;
|
||||
|
||||
IF position('@DEG@' IN text_out) != 0 THEN
|
||||
text_out := replace(text_out, '@DEG@', replace(ST_AsLatLonText(position, 'D.DDDDDD'), ' ', ', '));
|
||||
END IF;
|
||||
|
||||
IF position('@EN@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'easting' AND metadata ? 'northing' THEN
|
||||
text_out := replace(text_out, '@EN@', (metadata->>'easting') || ', ' || (metadata->>'northing'));
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@GRID@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'easting' AND metadata ? 'northing' THEN
|
||||
text_out := replace(text_out, '@GRID@', (metadata->>'easting') || ', ' || (metadata->>'northing'));
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@CMG@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'bearing' THEN
|
||||
text_out := replace(text_out, '@CMG@', metadata->>'bearing');
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@BSP@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'speed' THEN
|
||||
text_out := replace(text_out, '@BSP@', round((metadata->>'speed')::numeric * 3600 / 1852, 1)::text);
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@WD@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'waterDepth' THEN
|
||||
text_out := replace(text_out, '@WD@', metadata->>'waterDepth');
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
json_query := (regexp_match(text_out, '@(\$\..*?)@@'))[1];
|
||||
IF json_query IS NOT NULL THEN
|
||||
json_result := jsonb_path_query_array(metadata, json_query::jsonpath);
|
||||
IF jsonb_array_length(json_result) = 1 THEN
|
||||
text_out := replace(text_out, '@'||json_query||'@@', json_result->>0);
|
||||
ELSE
|
||||
text_out := replace(text_out, '@'||json_query||'@@', json_result::text);
|
||||
END IF;
|
||||
-- There might be multiple JSONPath queries, so we may have to recurse
|
||||
expect_recursion := true;
|
||||
END IF;
|
||||
|
||||
IF expect_recursion IS TRUE AND text_in != text_out THEN
|
||||
--RAISE NOTICE 'Recursing %', text_out;
|
||||
-- We don't know if we have found all the JSONPath expression
|
||||
-- so we do another pass.
|
||||
RETURN replace_placeholders(text_out, tstamp, sequence, point);
|
||||
ELSE
|
||||
RETURN text_out;
|
||||
END IF;
|
||||
|
||||
END;
|
||||
$_$;
|
||||
|
||||
|
||||
ALTER FUNCTION _SURVEY__TEMPLATE_.replace_placeholders(text_in text, tstamp timestamp with time zone, sequence integer, point integer) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: FUNCTION replace_placeholders(text_in text, tstamp timestamp with time zone, sequence integer, point integer); Type: COMMENT; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON FUNCTION _SURVEY__TEMPLATE_.replace_placeholders(text_in text, tstamp timestamp with time zone, sequence integer, point integer) IS 'Replace certain placeholder strings in the input text with data obtained from shot or real-time data.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: scan_placeholders(); Type: PROCEDURE; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE PROCEDURE _SURVEY__TEMPLATE_.scan_placeholders()
|
||||
LANGUAGE sql
|
||||
AS $_$
|
||||
-- We update non read-only events via the event_log view to leave a trace
|
||||
-- of the fact that placeholders were replaced (and when).
|
||||
-- Note that this will not replace placeholders of old edits.
|
||||
UPDATE event_log
|
||||
SET remarks = replace_placeholders(remarks, tstamp, sequence, point)
|
||||
FROM (
|
||||
SELECT id
|
||||
FROM event_log e
|
||||
WHERE
|
||||
(meta->'readonly')::boolean IS NOT TRUE AND (
|
||||
regexp_match(remarks, '@DMS@|@POS@|@DEG@') IS NOT NULL OR
|
||||
regexp_match(remarks, '@BSP@|@WD@|@CMG@|@EN@|@GRID@|@(\$\..*?)@@') IS NOT NULL
|
||||
)
|
||||
) t
|
||||
WHERE event_log.id = t.id;
|
||||
|
||||
-- And then we update read-only events directly on the event_log_full table
|
||||
-- (as of this version of the schema we're prevented from updating read-only
|
||||
-- events via event_log anyway).
|
||||
UPDATE event_log_full
|
||||
SET remarks = replace_placeholders(remarks, tstamp, sequence, point)
|
||||
FROM (
|
||||
SELECT uid
|
||||
FROM event_log_full e
|
||||
WHERE
|
||||
(meta->'readonly')::boolean IS TRUE AND (
|
||||
regexp_match(remarks, '@DMS@|@POS@|@DEG@') IS NOT NULL OR
|
||||
regexp_match(remarks, '@BSP@|@WD@|@CMG@|@EN@|@GRID@|@(\$\..*?)@@') IS NOT NULL
|
||||
)
|
||||
) t
|
||||
WHERE event_log_full.uid = t.uid;
|
||||
$_$;
|
||||
|
||||
|
||||
ALTER PROCEDURE _SURVEY__TEMPLATE_.scan_placeholders() OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: PROCEDURE scan_placeholders(); Type: COMMENT; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
COMMENT ON PROCEDURE _SURVEY__TEMPLATE_.scan_placeholders() IS 'Run replace_placeholders() on the entire event log.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: to_binning_grid(public.geometry); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
@@ -1211,6 +1615,58 @@ ALTER TABLE _SURVEY__TEMPLATE_.labels OWNER TO postgres;
|
||||
COMMENT ON TABLE _SURVEY__TEMPLATE_.labels IS 'Labels to attach to events, shots, or anything else really. Each level consists of a (unique) name and a JSON object with arbitrary label properties (intended to be used for label descriptions, colours, etc.)';
|
||||
|
||||
|
||||
--
|
||||
-- Name: midnight_shots; Type: VIEW; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE VIEW _SURVEY__TEMPLATE_.midnight_shots AS
|
||||
WITH straddlers AS (
|
||||
SELECT final_shots.sequence
|
||||
FROM _SURVEY__TEMPLATE_.final_shots
|
||||
GROUP BY final_shots.sequence
|
||||
HAVING (min(date(final_shots.tstamp)) <> max(date(final_shots.tstamp)))
|
||||
), ts AS (
|
||||
SELECT fs.sequence,
|
||||
min(fs.tstamp) AS ts0,
|
||||
max(fs.tstamp) AS ts1
|
||||
FROM (_SURVEY__TEMPLATE_.final_shots fs
|
||||
JOIN straddlers USING (sequence))
|
||||
GROUP BY fs.sequence, (date(fs.tstamp))
|
||||
ORDER BY fs.sequence, (date(fs.tstamp))
|
||||
), spts AS (
|
||||
SELECT DISTINCT ts.sequence,
|
||||
min(ts.ts1) OVER (PARTITION BY ts.sequence) AS ldsp,
|
||||
max(ts.ts0) OVER (PARTITION BY ts.sequence) AS fdsp
|
||||
FROM ts
|
||||
ORDER BY ts.sequence
|
||||
), evt AS (
|
||||
SELECT fs.tstamp,
|
||||
fs.sequence,
|
||||
fs.point,
|
||||
'Last shotpoint of the day'::text AS remarks,
|
||||
'{LDSP}'::text[] AS labels
|
||||
FROM (_SURVEY__TEMPLATE_.final_shots fs
|
||||
JOIN spts ON (((fs.sequence = spts.sequence) AND (fs.tstamp = spts.ldsp))))
|
||||
UNION
|
||||
SELECT fs.tstamp,
|
||||
fs.sequence,
|
||||
fs.point,
|
||||
'First shotpoint of the day'::text AS remarks,
|
||||
'{FDSP}'::text[] AS labels
|
||||
FROM (_SURVEY__TEMPLATE_.final_shots fs
|
||||
JOIN spts ON (((fs.sequence = spts.sequence) AND (fs.tstamp = spts.fdsp))))
|
||||
ORDER BY 1
|
||||
)
|
||||
SELECT evt.tstamp,
|
||||
evt.sequence,
|
||||
evt.point,
|
||||
evt.remarks,
|
||||
evt.labels
|
||||
FROM evt;
|
||||
|
||||
|
||||
ALTER TABLE _SURVEY__TEMPLATE_.midnight_shots OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: preplot_lines; Type: TABLE; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
162
etc/db/upgrades/upgrade19-v0.3.6-optimise-geometry-functions.sql
Normal file
162
etc/db/upgrades/upgrade19-v0.3.6-optimise-geometry-functions.sql
Normal file
@@ -0,0 +1,162 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.6
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This optimises geometry_from_tstamp() by many orders of magnitude
|
||||
-- (issue #241). The redefinition of geometry_from_tstamp() necessitates
|
||||
-- redefining dependent functions.
|
||||
--
|
||||
-- We also drop the index on real_time_inputs.meta->'tstamp' as it is no
|
||||
-- longer used.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE PROCEDURE augment_event_data ()
|
||||
LANGUAGE sql
|
||||
AS $inner$
|
||||
-- Populate the timestamp of sequence / point events
|
||||
UPDATE event_log_full
|
||||
SET tstamp = tstamp_from_sequence_shot(sequence, point)
|
||||
WHERE
|
||||
tstamp IS NULL AND sequence IS NOT NULL AND point IS NOT NULL;
|
||||
|
||||
-- Populate the geometry of sequence / point events for which
|
||||
-- there is raw_shots data.
|
||||
UPDATE event_log_full
|
||||
SET meta = meta ||
|
||||
jsonb_build_object(
|
||||
'geometry',
|
||||
(
|
||||
SELECT st_transform(geometry, 4326)::jsonb
|
||||
FROM raw_shots rs
|
||||
WHERE rs.sequence = event_log_full.sequence AND rs.point = event_log_full.point
|
||||
)
|
||||
)
|
||||
WHERE
|
||||
sequence IS NOT NULL AND point IS NOT NULL AND
|
||||
NOT meta ? 'geometry';
|
||||
|
||||
-- Populate the geometry of time-based events
|
||||
UPDATE event_log_full e
|
||||
SET
|
||||
meta = meta || jsonb_build_object('geometry',
|
||||
(SELECT st_transform(g.geometry, 4326)::jsonb
|
||||
FROM geometry_from_tstamp(e.tstamp, 3) g))
|
||||
WHERE
|
||||
tstamp IS NOT NULL AND
|
||||
sequence IS NULL AND point IS NULL AND
|
||||
NOT meta ? 'geometry';
|
||||
|
||||
-- Get rid of null geometries
|
||||
UPDATE event_log_full
|
||||
SET
|
||||
meta = meta - 'geometry'
|
||||
WHERE
|
||||
jsonb_typeof(meta->'geometry') = 'null';
|
||||
|
||||
-- Simplify the GeoJSON when the CRS is EPSG:4326
|
||||
UPDATE event_log_full
|
||||
SET
|
||||
meta = meta #- '{geometry, crs}'
|
||||
WHERE
|
||||
meta->'geometry'->'crs'->'properties'->>'name' = 'EPSG:4326';
|
||||
|
||||
$inner$;
|
||||
|
||||
COMMENT ON PROCEDURE augment_event_data()
|
||||
IS 'Populate missing timestamps and geometries in event_log_full';
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
CALL show_notice('Dropping index from real_time_inputs.meta->tstamp');
|
||||
DROP INDEX IF EXISTS meta_tstamp_idx;
|
||||
|
||||
CALL show_notice('Creating function geometry_from_tstamp');
|
||||
CREATE OR REPLACE FUNCTION public.geometry_from_tstamp(
|
||||
IN ts timestamptz,
|
||||
IN tolerance numeric,
|
||||
OUT "geometry" geometry,
|
||||
OUT "delta" numeric)
|
||||
AS $inner$
|
||||
SELECT
|
||||
geometry,
|
||||
extract('epoch' FROM tstamp - ts ) AS delta
|
||||
FROM real_time_inputs
|
||||
WHERE
|
||||
geometry IS NOT NULL AND
|
||||
tstamp BETWEEN (ts - tolerance * interval '1 second') AND (ts + tolerance * interval '1 second')
|
||||
ORDER BY abs(extract('epoch' FROM tstamp - ts ))
|
||||
LIMIT 1;
|
||||
$inner$ LANGUAGE SQL;
|
||||
|
||||
COMMENT ON FUNCTION public.geometry_from_tstamp(timestamptz, numeric)
|
||||
IS 'Get geometry from timestamp';
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.6"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.6"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,254 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.7
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This updates the adjust_planner() procedure to take into account the
|
||||
-- new events schema (the `event` view has been replaced by `event_log`).
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CALL pg_temp.show_notice('Replacing adjust_planner() procedure');
|
||||
CREATE OR REPLACE PROCEDURE adjust_planner()
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
_planner_config jsonb;
|
||||
_planned_line planned_lines%ROWTYPE;
|
||||
_lag interval;
|
||||
_last_sequence sequences_summary%ROWTYPE;
|
||||
_deltatime interval;
|
||||
_shotinterval interval;
|
||||
_tstamp timestamptz;
|
||||
_incr integer;
|
||||
BEGIN
|
||||
|
||||
SET CONSTRAINTS planned_lines_pkey DEFERRED;
|
||||
|
||||
SELECT data->'planner'
|
||||
INTO _planner_config
|
||||
FROM file_data
|
||||
WHERE data ? 'planner';
|
||||
|
||||
SELECT *
|
||||
INTO _last_sequence
|
||||
FROM sequences_summary
|
||||
ORDER BY sequence DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT *
|
||||
INTO _planned_line
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
SELECT
|
||||
COALESCE(
|
||||
((lead(ts0) OVER (ORDER BY sequence)) - ts1),
|
||||
make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer)
|
||||
)
|
||||
INTO _lag
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
_incr = sign(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE '_planner_config: %', _planner_config;
|
||||
RAISE NOTICE '_last_sequence: %', _last_sequence;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
RAISE NOTICE '_incr: %', _incr;
|
||||
|
||||
-- Does the latest sequence match a planned sequence?
|
||||
IF _planned_line IS NULL THEN -- No it doesn't
|
||||
RAISE NOTICE 'Latest sequence shot does not match a planned sequence';
|
||||
SELECT * INTO _planned_line FROM planned_lines ORDER BY sequence ASC LIMIT 1;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
|
||||
IF _planned_line.sequence <= _last_sequence.sequence THEN
|
||||
RAISE NOTICE 'Renumbering the planned sequences starting from %', _planned_line.sequence + 1;
|
||||
-- Renumber the planned sequences starting from last shot sequence number + 1
|
||||
UPDATE planned_lines
|
||||
SET sequence = sequence + _last_sequence.sequence - _planned_line.sequence + 1;
|
||||
END IF;
|
||||
|
||||
-- The correction to make to the first planned line's ts0 will be based on either the last
|
||||
-- sequence's EOL + default line change time or the current time, whichever is later.
|
||||
_deltatime := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1) + make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer), current_timestamp) - _planned_line.ts0;
|
||||
|
||||
-- Is the first of the planned lines start time in the past? (±5 mins)
|
||||
IF _planned_line.ts0 < (current_timestamp - make_interval(mins => 5)) THEN
|
||||
RAISE NOTICE 'First planned line is in the past. Adjusting times by %', _deltatime;
|
||||
-- Adjust the start / end time of the planned lines by assuming that we are at
|
||||
-- `defaultLineChangeDuration` minutes away from SOL of the first planned line.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime;
|
||||
END IF;
|
||||
|
||||
ELSE -- Yes it does
|
||||
RAISE NOTICE 'Latest sequence does match a planned sequence: %, %', _planned_line.sequence, _planned_line.line;
|
||||
|
||||
-- Is it online?
|
||||
IF EXISTS(SELECT 1 FROM raw_lines_files WHERE sequence = _last_sequence.sequence AND hash = '*online*') THEN
|
||||
-- Yes it is
|
||||
RAISE NOTICE 'Sequence % is online', _last_sequence.sequence;
|
||||
|
||||
-- Let us get the SOL from the events log if we can
|
||||
RAISE NOTICE 'Trying to set fsp, ts0 from events log FSP, FGSP';
|
||||
WITH e AS (
|
||||
SELECT * FROM event_log
|
||||
WHERE
|
||||
sequence = _last_sequence.sequence
|
||||
AND ('FSP' = ANY(labels) OR 'FGSP' = ANY(labels))
|
||||
ORDER BY tstamp LIMIT 1
|
||||
)
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
fsp = COALESCE(e.point, fsp),
|
||||
ts0 = COALESCE(e.tstamp, ts0)
|
||||
FROM e
|
||||
WHERE planned_lines.sequence = _last_sequence.sequence;
|
||||
|
||||
-- Shot interval
|
||||
_shotinterval := (_last_sequence.ts1 - _last_sequence.ts0) / abs(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE 'Estimating EOL from current shot interval: %', _shotinterval;
|
||||
|
||||
SELECT (abs(lsp-fsp) * _shotinterval + ts0) - ts1
|
||||
INTO _deltatime
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
---- Set ts1 for the current sequence
|
||||
--UPDATE planned_lines
|
||||
--SET
|
||||
--ts1 = (abs(lsp-fsp) * _shotinterval) + ts0
|
||||
--WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Adjustment is %', _deltatime;
|
||||
|
||||
IF abs(EXTRACT(EPOCH FROM _deltatime)) < 8 THEN
|
||||
RAISE NOTICE 'Adjustment too small (< 8 s), so not applying it';
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- Adjust ts1 for the current sequence
|
||||
UPDATE planned_lines
|
||||
SET ts1 = ts1 + _deltatime
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
-- Now shift all sequences after
|
||||
UPDATE planned_lines
|
||||
SET ts0 = ts0 + _deltatime, ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences before %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence < _last_sequence.sequence;
|
||||
|
||||
ELSE
|
||||
-- No it isn't
|
||||
RAISE NOTICE 'Sequence % is offline', _last_sequence.sequence;
|
||||
|
||||
-- We were supposed to finish at _planned_line.ts1 but we finished at:
|
||||
_tstamp := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1), current_timestamp);
|
||||
-- WARNING Next line is for testing only
|
||||
--_tstamp := COALESCE(_last_sequence.ts1_final, _last_sequence.ts1);
|
||||
-- So we need to adjust timestamps by:
|
||||
_deltatime := _tstamp - _planned_line.ts1;
|
||||
|
||||
RAISE NOTICE 'Planned end: %, actual end: % (%, %)', _planned_line.ts1, _tstamp, _planned_line.sequence, _last_sequence.sequence;
|
||||
RAISE NOTICE 'Shifting times by % for sequences > %', _deltatime, _planned_line.sequence;
|
||||
-- NOTE: This won't work if sequences are not, err… sequential.
|
||||
-- NOTE: This has been known to happen in 2020.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _planned_line.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences up to %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence <= _last_sequence.sequence;
|
||||
|
||||
END IF;
|
||||
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.7"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.7"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
267
etc/db/upgrades/upgrade21-v0.3.8-add-event-data-functions.sql
Normal file
267
etc/db/upgrades/upgrade21-v0.3.8-add-event-data-functions.sql
Normal file
@@ -0,0 +1,267 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.8
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This adds event_position() and event_meta() functions which are used
|
||||
-- to retrieve position or metadata, respectively, given either a timestamp
|
||||
-- or a sequence / point pair. Intended to be used in the context of #229.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
--
|
||||
-- event_position(): Fetch event position
|
||||
--
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_position (
|
||||
tstamp timestamptz, sequence integer, point integer, tolerance numeric
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
DECLARE
|
||||
position geometry;
|
||||
BEGIN
|
||||
|
||||
-- Try and get position by sequence / point first
|
||||
IF sequence IS NOT NULL AND point IS NOT NULL THEN
|
||||
-- Try and get the position from final_shots or raw_shots
|
||||
SELECT COALESCE(f.geometry, r.geometry) geometry
|
||||
INTO position
|
||||
FROM raw_shots r LEFT JOIN final_shots f USING (sequence, point)
|
||||
WHERE r.sequence = event_position.sequence AND r.point = event_position.point;
|
||||
|
||||
IF position IS NOT NULL THEN
|
||||
RETURN position;
|
||||
ELSIF tstamp IS NULL THEN
|
||||
-- Get the timestamp for the sequence / point, if we can.
|
||||
-- It will be used later in the function as we fall back
|
||||
-- to timestamp based search.
|
||||
-- We also adjust the tolerance as we're now dealing with
|
||||
-- an exact timestamp.
|
||||
SELECT COALESCE(f.tstamp, r.tstamp) tstamp, 0.002 tolerance
|
||||
INTO tstamp, tolerance
|
||||
FROM raw_shots r LEFT JOIN final_shots f USING (sequence, point)
|
||||
WHERE r.sequence = event_position.sequence AND r.point = event_position.point;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- If we got here, we better have a timestamp
|
||||
-- First attempt, get a position from final_shots, raw_shots. This may
|
||||
-- be redundant if we got here from the position of having a sequence /
|
||||
-- point without a position, but never mind.
|
||||
SELECT COALESCE(f.geometry, r.geometry) geometry
|
||||
INTO position
|
||||
FROM raw_shots r LEFT JOIN final_shots f USING (sequence, point)
|
||||
WHERE r.tstamp = event_position.tstamp OR f.tstamp = event_position.tstamp
|
||||
LIMIT 1; -- Just to be sure
|
||||
|
||||
IF position IS NULL THEN
|
||||
-- Ok, so everything else so far has failed, let's try and get this
|
||||
-- from real time data. We skip the search via sequence / point and
|
||||
-- go directly for timestamp.
|
||||
SELECT geometry
|
||||
INTO position
|
||||
FROM geometry_from_tstamp(tstamp, tolerance);
|
||||
END IF;
|
||||
|
||||
RETURN position;
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_position (timestamptz, integer, integer, numeric) IS
|
||||
'Return the position associated with a sequence / point in the current project or
|
||||
with a given timestamp. Timestamp that is first searched for in the shot tables
|
||||
of the current prospect or, if not found, in the real-time data.
|
||||
|
||||
Returns a geometry.';
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_position (
|
||||
tstamp timestamptz, sequence integer, point integer
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_position(tstamp, sequence, point, 3);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_position (timestamptz, integer, integer) IS
|
||||
'Overload of event_position with a default tolerance of three seconds.';
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_position (
|
||||
tstamp timestamptz
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_position(tstamp, NULL, NULL);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_position (timestamptz) IS
|
||||
'Overload of event_position (timestamptz, integer, integer) for use when searching by timestamp.';
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_position (
|
||||
sequence integer, point integer
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_position(NULL, sequence, point);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_position (integer, integer) IS
|
||||
'Overload of event_position (timestamptz, integer, integer) for use when searching by sequence / point.';
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
--
|
||||
-- event_meta(): Fetch event metadata
|
||||
--
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_meta (
|
||||
tstamp timestamptz, sequence integer, point integer
|
||||
)
|
||||
RETURNS jsonb
|
||||
AS $$
|
||||
DECLARE
|
||||
result jsonb;
|
||||
-- Tolerance is hard-coded, at least until a need to expose arises.
|
||||
tolerance numeric;
|
||||
BEGIN
|
||||
tolerance := 3; -- seconds
|
||||
|
||||
-- We search by timestamp if we can, as that's a lot quicker
|
||||
IF tstamp IS NOT NULL THEN
|
||||
|
||||
SELECT meta
|
||||
INTO result
|
||||
FROM real_time_inputs rti
|
||||
WHERE
|
||||
rti.tstamp BETWEEN (event_meta.tstamp - tolerance * interval '1 second') AND (event_meta.tstamp + tolerance * interval '1 second')
|
||||
ORDER BY abs(extract('epoch' FROM rti.tstamp - event_meta.tstamp ))
|
||||
LIMIT 1;
|
||||
|
||||
ELSE
|
||||
|
||||
SELECT meta
|
||||
INTO result
|
||||
FROM real_time_inputs rti
|
||||
WHERE
|
||||
(meta->>'_sequence')::integer = event_meta.sequence AND
|
||||
(meta->>'_point')::integer = event_meta.point
|
||||
ORDER BY rti.tstamp DESC
|
||||
LIMIT 1;
|
||||
|
||||
END IF;
|
||||
|
||||
RETURN result;
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_meta (timestamptz, integer, integer) IS
|
||||
'Return the real-time event metadata associated with a sequence / point in the current project or
|
||||
with a given timestamp. Timestamp that is first searched for in the shot tables
|
||||
of the current prospect or, if not found, in the real-time data.
|
||||
|
||||
Returns a JSONB object.';
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_meta (
|
||||
tstamp timestamptz
|
||||
)
|
||||
RETURNS jsonb
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_meta(tstamp, NULL, NULL);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_meta (timestamptz) IS
|
||||
'Overload of event_meta (timestamptz, integer, integer) for use when searching by timestamp.';
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_meta (
|
||||
sequence integer, point integer
|
||||
)
|
||||
RETURNS jsonb
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN event_meta(NULL, sequence, point);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION event_meta (integer, integer) IS
|
||||
'Overload of event_meta (timestamptz, integer, integer) for use when searching by sequence / point.';
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.8"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.8"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,229 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.9
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This defines a replace_placeholders() function, taking as arguments
|
||||
-- a text string and either a timestamp or a sequence / point pair. It
|
||||
-- uses the latter arguments to find metadata from which it can extract
|
||||
-- relevant information and replace it into the text string wherever the
|
||||
-- appropriate placeholders appear. For instance, given a call such as
|
||||
-- replace_placeholders('The position is @POS@', NULL, 11, 2600) it will
|
||||
-- replace '@POS@' with the position of point 2600 in sequence 11, if it
|
||||
-- exists (or leave the placeholder untouched otherwise).
|
||||
--
|
||||
-- A scan_placeholders() procedure is also defined, which calls the above
|
||||
-- function on the entire event log.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION replace_placeholders (
|
||||
text_in text, tstamp timestamptz, sequence integer, point integer
|
||||
)
|
||||
RETURNS text
|
||||
AS $$
|
||||
DECLARE
|
||||
position geometry;
|
||||
metadata jsonb;
|
||||
text_out text;
|
||||
|
||||
json_query text;
|
||||
json_result jsonb;
|
||||
expect_recursion boolean := false;
|
||||
BEGIN
|
||||
|
||||
text_out := text_in;
|
||||
|
||||
-- We only get a position if we are going to need it…
|
||||
IF regexp_match(text_out, '@DMS@|@POS@|@DEG@') IS NOT NULL THEN
|
||||
position := ST_Transform(event_position(tstamp, sequence, point), 4326);
|
||||
END IF;
|
||||
|
||||
-- …and likewise with the metadata.
|
||||
IF regexp_match(text_out, '@BSP@|@WD@|@CMG@|@EN@|@GRID@|@(\$\..*?)@@') IS NOT NULL THEN
|
||||
metadata := event_meta(tstamp, sequence, point);
|
||||
END IF;
|
||||
|
||||
-- We shortcut the evaluation if neither of the above regexps matched
|
||||
IF position IS NULL AND metadata IS NULL THEN
|
||||
RETURN text_out;
|
||||
END IF;
|
||||
|
||||
IF position('@DMS@' IN text_out) != 0 THEN
|
||||
text_out := replace(text_out, '@DMS@', ST_AsLatLonText(position));
|
||||
END IF;
|
||||
|
||||
IF position('@POS@' IN text_out) != 0 THEN
|
||||
text_out := replace(text_out, '@POS@', replace(ST_AsLatLonText(position, 'D.DDDDDD'), ' ', ', '));
|
||||
END IF;
|
||||
|
||||
IF position('@DEG@' IN text_out) != 0 THEN
|
||||
text_out := replace(text_out, '@DEG@', replace(ST_AsLatLonText(position, 'D.DDDDDD'), ' ', ', '));
|
||||
END IF;
|
||||
|
||||
IF position('@EN@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'easting' AND metadata ? 'northing' THEN
|
||||
text_out := replace(text_out, '@EN@', (metadata->>'easting') || ', ' || (metadata->>'northing'));
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@GRID@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'easting' AND metadata ? 'northing' THEN
|
||||
text_out := replace(text_out, '@GRID@', (metadata->>'easting') || ', ' || (metadata->>'northing'));
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@CMG@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'bearing' THEN
|
||||
text_out := replace(text_out, '@CMG@', metadata->>'bearing');
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@BSP@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'speed' THEN
|
||||
text_out := replace(text_out, '@BSP@', round((metadata->>'speed')::numeric * 3600 / 1852, 1)::text);
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF position('@WD@' IN text_out) != 0 THEN
|
||||
IF metadata ? 'waterDepth' THEN
|
||||
text_out := replace(text_out, '@WD@', metadata->>'waterDepth');
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
json_query := (regexp_match(text_out, '@(\$\..*?)@@'))[1];
|
||||
IF json_query IS NOT NULL THEN
|
||||
json_result := jsonb_path_query_array(metadata, json_query::jsonpath);
|
||||
IF jsonb_array_length(json_result) = 1 THEN
|
||||
text_out := replace(text_out, '@'||json_query||'@@', json_result->>0);
|
||||
ELSE
|
||||
text_out := replace(text_out, '@'||json_query||'@@', json_result::text);
|
||||
END IF;
|
||||
-- There might be multiple JSONPath queries, so we may have to recurse
|
||||
expect_recursion := true;
|
||||
END IF;
|
||||
|
||||
IF expect_recursion IS TRUE AND text_in != text_out THEN
|
||||
--RAISE NOTICE 'Recursing %', text_out;
|
||||
-- We don't know if we have found all the JSONPath expression
|
||||
-- so we do another pass.
|
||||
RETURN replace_placeholders(text_out, tstamp, sequence, point);
|
||||
ELSE
|
||||
RETURN text_out;
|
||||
END IF;
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION replace_placeholders (text, timestamptz, integer, integer) IS
|
||||
'Replace certain placeholder strings in the input text with data obtained from shot or real-time data.';
|
||||
|
||||
|
||||
CREATE OR REPLACE PROCEDURE scan_placeholders ()
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
-- We update non read-only events via the event_log view to leave a trace
|
||||
-- of the fact that placeholders were replaced (and when).
|
||||
-- Note that this will not replace placeholders of old edits.
|
||||
UPDATE event_log
|
||||
SET remarks = replace_placeholders(remarks, tstamp, sequence, point)
|
||||
FROM (
|
||||
SELECT id
|
||||
FROM event_log e
|
||||
WHERE
|
||||
(meta->'readonly')::boolean IS NOT TRUE AND (
|
||||
regexp_match(remarks, '@DMS@|@POS@|@DEG@') IS NOT NULL OR
|
||||
regexp_match(remarks, '@BSP@|@WD@|@CMG@|@EN@|@GRID@|@(\$\..*?)@@') IS NOT NULL
|
||||
)
|
||||
) t
|
||||
WHERE event_log.id = t.id;
|
||||
|
||||
-- And then we update read-only events directly on the event_log_full table
|
||||
-- (as of this version of the schema we're prevented from updating read-only
|
||||
-- events via event_log anyway).
|
||||
UPDATE event_log_full
|
||||
SET remarks = replace_placeholders(remarks, tstamp, sequence, point)
|
||||
FROM (
|
||||
SELECT uid
|
||||
FROM event_log_full e
|
||||
WHERE
|
||||
(meta->'readonly')::boolean IS TRUE AND (
|
||||
regexp_match(remarks, '@DMS@|@POS@|@DEG@') IS NOT NULL OR
|
||||
regexp_match(remarks, '@BSP@|@WD@|@CMG@|@EN@|@GRID@|@(\$\..*?)@@') IS NOT NULL
|
||||
)
|
||||
) t
|
||||
WHERE event_log_full.uid = t.uid;
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE scan_placeholders () IS
|
||||
'Run replace_placeholders() on the entire event log.';
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.9"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.9"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,127 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.10
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects only the public schema.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This defines a interpolate_geometry_from_tstamp(), taking a timestamp
|
||||
-- and a maximum timespan in seconds. It will then interpolate a position
|
||||
-- at the exact timestamp based on data from real_time_inputs, provided
|
||||
-- that the effective interpolation timespan does not exceed the maximum
|
||||
-- requested.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
BEGIN
|
||||
|
||||
CALL pg_temp.show_notice('Defining interpolate_geometry_from_tstamp()');
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.interpolate_geometry_from_tstamp(
|
||||
IN ts timestamptz,
|
||||
IN maxspan numeric
|
||||
)
|
||||
RETURNS geometry
|
||||
AS $$
|
||||
DECLARE
|
||||
ts0 timestamptz;
|
||||
ts1 timestamptz;
|
||||
geom0 geometry;
|
||||
geom1 geometry;
|
||||
span numeric;
|
||||
fraction numeric;
|
||||
BEGIN
|
||||
|
||||
SELECT tstamp, geometry
|
||||
INTO ts0, geom0
|
||||
FROM real_time_inputs
|
||||
WHERE tstamp <= ts
|
||||
ORDER BY tstamp DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT tstamp, geometry
|
||||
INTO ts1, geom1
|
||||
FROM real_time_inputs
|
||||
WHERE tstamp >= ts
|
||||
ORDER BY tstamp ASC
|
||||
LIMIT 1;
|
||||
|
||||
IF geom0 IS NULL OR geom1 IS NULL THEN
|
||||
RAISE NOTICE 'Interpolation failed (no straddling data)';
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
-- See if we got an exact match
|
||||
IF ts0 = ts THEN
|
||||
RETURN geom0;
|
||||
ELSIF ts1 = ts THEN
|
||||
RETURN geom1;
|
||||
END IF;
|
||||
|
||||
span := extract('epoch' FROM ts1 - ts0);
|
||||
|
||||
IF span > maxspan THEN
|
||||
RAISE NOTICE 'Interpolation timespan % outside maximum requested (%)', span, maxspan;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
fraction := extract('epoch' FROM ts - ts0) / span;
|
||||
|
||||
IF fraction < 0 OR fraction > 1 THEN
|
||||
RAISE NOTICE 'Requested timestamp % outside of interpolation span (fraction: %)', ts, fraction;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
RETURN ST_LineInterpolatePoint(St_MakeLine(geom0, geom1), fraction);
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION public.interpolate_geometry_from_tstamp(timestamptz, numeric) IS
|
||||
'Interpolate a position over a given maximum timespan (in seconds)
|
||||
based on real-time inputs. Returns a POINT geometry.';
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.10"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.10"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,149 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.11
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This redefines augment_event_data() to use interpolation rather than
|
||||
-- nearest neighbour. It now takes an argument indicating the maximum
|
||||
-- allowed interpolation timespan. An overload with a default of ten
|
||||
-- minutes is also provided, as an in situ replacement for the previous
|
||||
-- version.
|
||||
--
|
||||
-- The ten minute default is based on Triggerfish headers behaviour seen
|
||||
-- on crew 248 during soft starts.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE PROCEDURE augment_event_data (maxspan numeric)
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
-- Populate the timestamp of sequence / point events
|
||||
UPDATE event_log_full
|
||||
SET tstamp = tstamp_from_sequence_shot(sequence, point)
|
||||
WHERE
|
||||
tstamp IS NULL AND sequence IS NOT NULL AND point IS NOT NULL;
|
||||
|
||||
-- Populate the geometry of sequence / point events for which
|
||||
-- there is raw_shots data.
|
||||
UPDATE event_log_full
|
||||
SET meta = meta ||
|
||||
jsonb_build_object(
|
||||
'geometry',
|
||||
(
|
||||
SELECT st_transform(geometry, 4326)::jsonb
|
||||
FROM raw_shots rs
|
||||
WHERE rs.sequence = event_log_full.sequence AND rs.point = event_log_full.point
|
||||
)
|
||||
)
|
||||
WHERE
|
||||
sequence IS NOT NULL AND point IS NOT NULL AND
|
||||
NOT meta ? 'geometry';
|
||||
|
||||
-- Populate the geometry of time-based events
|
||||
UPDATE event_log_full e
|
||||
SET
|
||||
meta = meta || jsonb_build_object('geometry',
|
||||
(SELECT st_transform(g.geometry, 4326)::jsonb
|
||||
FROM interpolate_geometry_from_tstamp(e.tstamp, maxspan) g))
|
||||
WHERE
|
||||
tstamp IS NOT NULL AND
|
||||
sequence IS NULL AND point IS NULL AND
|
||||
NOT meta ? 'geometry';
|
||||
|
||||
-- Get rid of null geometries
|
||||
UPDATE event_log_full
|
||||
SET
|
||||
meta = meta - 'geometry'
|
||||
WHERE
|
||||
jsonb_typeof(meta->'geometry') = 'null';
|
||||
|
||||
-- Simplify the GeoJSON when the CRS is EPSG:4326
|
||||
UPDATE event_log_full
|
||||
SET
|
||||
meta = meta #- '{geometry, crs}'
|
||||
WHERE
|
||||
meta->'geometry'->'crs'->'properties'->>'name' = 'EPSG:4326';
|
||||
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE augment_event_data(numeric)
|
||||
IS 'Populate missing timestamps and geometries in event_log_full';
|
||||
|
||||
CREATE OR REPLACE PROCEDURE augment_event_data ()
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
CALL augment_event_data(600);
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE augment_event_data()
|
||||
IS 'Overload of augment_event_data(maxspan numeric) with a maxspan value of 600 seconds.';
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.11"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.11"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,193 @@
|
||||
-- Fix not being able to edit a time-based event.
|
||||
--
|
||||
-- New schema version: 0.3.12
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This defines a midnight_shots view and a log_midnight_shots() procedure
|
||||
-- (with some overloads). The view returns all points straddling midnight
|
||||
-- UTC and belonging to the same sequence (so last shot of the day and
|
||||
-- first shot of the next day).
|
||||
--
|
||||
-- The procedure inserts the corresponding events (optionally constrained
|
||||
-- by an earliest and a latest date) in the event log, unless the events
|
||||
-- already exist.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE VIEW midnight_shots AS
|
||||
WITH straddlers AS (
|
||||
-- Get sequence numbers straddling midnight UTC
|
||||
SELECT sequence
|
||||
FROM final_shots
|
||||
GROUP BY sequence
|
||||
HAVING min(date(tstamp)) != max(date(tstamp))
|
||||
),
|
||||
ts AS (
|
||||
-- Get earliest and latest timestamps for each day
|
||||
-- for each of the above sequences.
|
||||
-- This will return the timestamps for:
|
||||
-- FSP, LDSP, FDSP, LSP.
|
||||
SELECT
|
||||
fs.sequence,
|
||||
min(fs.tstamp) AS ts0,
|
||||
max(fs.tstamp) AS ts1
|
||||
FROM final_shots fs INNER JOIN straddlers USING (sequence)
|
||||
GROUP BY fs.sequence, (date(fs.tstamp))
|
||||
ORDER BY fs.sequence, date(fs.tstamp)
|
||||
),
|
||||
spts AS (
|
||||
-- Filter out FSP, LSP from the above.
|
||||
-- NOTE: This *should* in theory be able to cope with
|
||||
-- a sequence longer than 24 hours (so with more than
|
||||
-- one LDSP, FDSP) but that hasn't been tested.
|
||||
SELECT DISTINCT
|
||||
sequence,
|
||||
min(ts1) OVER (PARTITION BY sequence) ldsp,
|
||||
max(ts0) OVER (PARTITION BY sequence) fdsp
|
||||
FROM ts
|
||||
ORDER BY sequence
|
||||
), evt AS (
|
||||
SELECT
|
||||
fs.tstamp,
|
||||
fs.sequence,
|
||||
point,
|
||||
'Last shotpoint of the day' remarks,
|
||||
'{LDSP}'::text[] labels
|
||||
FROM final_shots fs
|
||||
INNER JOIN spts ON fs.sequence = spts.sequence AND fs.tstamp = spts.ldsp
|
||||
UNION SELECT
|
||||
fs.tstamp,
|
||||
fs.sequence,
|
||||
point,
|
||||
'First shotpoint of the day' remarks,
|
||||
'{FDSP}'::text[] labels
|
||||
FROM final_shots fs
|
||||
INNER JOIN spts ON fs.sequence = spts.sequence AND fs.tstamp = spts.fdsp
|
||||
ORDER BY tstamp
|
||||
)
|
||||
SELECT * FROM evt;
|
||||
|
||||
|
||||
CREATE OR REPLACE PROCEDURE log_midnight_shots (dt0 date, dt1 date)
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
INSERT INTO event_log (sequence, point, remarks, labels, meta)
|
||||
SELECT
|
||||
sequence, point, remarks, labels,
|
||||
'{"auto": true, "insertedBy": "log_midnight_shots"}'::jsonb
|
||||
FROM midnight_shots ms
|
||||
WHERE
|
||||
(dt0 IS NULL OR ms.tstamp >= dt0) AND
|
||||
(dt1 IS NULL OR ms.tstamp <= dt1) AND
|
||||
NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM event_log el
|
||||
WHERE ms.sequence = el.sequence AND ms.point = el.point AND el.labels @> ms.labels
|
||||
);
|
||||
|
||||
-- Delete any midnight shots that might have been inserted in the log
|
||||
-- but are no longer relevant according to the final_shots data.
|
||||
-- We operate on event_log, so the deletion is traceable.
|
||||
DELETE
|
||||
FROM event_log
|
||||
WHERE id IN (
|
||||
SELECT id
|
||||
FROM event_log el
|
||||
LEFT JOIN midnight_shots ms USING (sequence, point)
|
||||
WHERE
|
||||
'{LDSP,FDSP}'::text[] && el.labels -- &&: Do the arrays overlap?
|
||||
AND ms.sequence IS NULL
|
||||
);
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE log_midnight_shots (date, date)
|
||||
IS 'Add midnight shots between two dates dt0 and dt1 to the event_log, unless the events already exist.';
|
||||
|
||||
|
||||
CREATE OR REPLACE PROCEDURE log_midnight_shots (dt0 date)
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
CALL log_midnight_shots(dt0, NULL);
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE log_midnight_shots (date)
|
||||
IS 'Overload taking only a dt0 (adds events on that date or after).';
|
||||
|
||||
CREATE OR REPLACE PROCEDURE log_midnight_shots ()
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
CALL log_midnight_shots(NULL, NULL);
|
||||
$$;
|
||||
|
||||
COMMENT ON PROCEDURE log_midnight_shots ()
|
||||
IS 'Overload taking no arguments (adds all missing events).';
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
BEGIN
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.12"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.12"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -4,21 +4,21 @@
|
||||
<head>
|
||||
<title>{{DglProjectId}} Line Log Report – {%if Sequences.length > 1 %}Multiple sequences{% else %}Sequence {{Sequences[0].SequenceNumber}}{% endif %}</title>
|
||||
<style>
|
||||
|
||||
|
||||
@media print {
|
||||
body, html, table {
|
||||
font-size: 10px !important;
|
||||
}
|
||||
|
||||
|
||||
a {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
|
||||
tr.aside {
|
||||
font-size: 8px !important;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
html {
|
||||
font-size: 16px;
|
||||
text-rendering: optimizeLegibility;
|
||||
@@ -180,7 +180,7 @@ footer {
|
||||
font-size: smaller;
|
||||
border-top: thin solid;
|
||||
min-height: 25px;
|
||||
|
||||
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
background: white;
|
||||
@@ -246,13 +246,24 @@ footer {
|
||||
{% if Begin.Reshoot %}Reshoot{% endif -%}
|
||||
|
||||
<div class="comments">
|
||||
|
||||
{% if Sequence.DglSequenceComments %}
|
||||
{% if Sequence.DglSequenceComments[0] %}
|
||||
<h3>Acquisition Comments</h3>
|
||||
<div class="comment">
|
||||
{{ Sequence.DglSequenceComments[0] | markdown }}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if Sequence.DglSequenceComments[1] %}
|
||||
<h3>Processing Comments</h3>
|
||||
<div class="comment">
|
||||
{{ Sequence.DglSequenceComments[1] | markdown }}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% else %}
|
||||
<h3>Sequence comments</h3>
|
||||
|
||||
{% for Comment in Sequence.DglSequenceComments %}
|
||||
<div class="comment">{{ Comment | markdown }}</div>
|
||||
{% endfor %}
|
||||
|
||||
{% if not Sequence.DglSequenceComments %}<div class="nocomment">(Nil)</div>{% endif %}
|
||||
<div class="nocomment">(Nil)</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<div class="events">
|
||||
|
||||
2
lib/www/client/source/package-lock.json
generated
2
lib/www/client/source/package-lock.json
generated
@@ -33,7 +33,7 @@
|
||||
"@vue/cli-plugin-router": "~4.4.0",
|
||||
"@vue/cli-plugin-vuex": "~4.4.0",
|
||||
"@vue/cli-service": "^4.5.13",
|
||||
"sass": "1.32",
|
||||
"sass": "~1.32",
|
||||
"sass-loader": "^8.0.0",
|
||||
"stylus": "^0.54.8",
|
||||
"stylus-loader": "^3.0.2",
|
||||
|
||||
@@ -188,9 +188,9 @@ export default {
|
||||
labelToItem (k) {
|
||||
return {
|
||||
text: k,
|
||||
icon: this.labels[k].view?.icon,
|
||||
colour: this.labels[k].view?.colour,
|
||||
title: this.labels[k].view?.description
|
||||
icon: this.labels?.[k]?.view?.icon,
|
||||
colour: this.labels?.[k]?.view?.colour,
|
||||
title: this.labels?.[k]?.view?.description
|
||||
};
|
||||
},
|
||||
|
||||
|
||||
@@ -133,6 +133,7 @@
|
||||
item-text="text"
|
||||
return-object
|
||||
label="Remarks"
|
||||
hint="Placeholders: @DMS@, @DEG@, @EN@, @WD@, @BSP@, @CMG@, …"
|
||||
prepend-icon="mdi-text-box-outline"
|
||||
append-outer-icon="mdi-magnify"
|
||||
@click:append-outer="(e) => remarksMenu = e"
|
||||
|
||||
@@ -182,121 +182,31 @@
|
||||
<v-list-item-icon><v-icon>mdi-delete</v-icon></v-list-item-icon>
|
||||
<v-list-item-title class="error--text">Delete all comments</v-list-item-title>
|
||||
</v-list-item>
|
||||
|
||||
<!-- BEGIN This section only applies to QC events -->
|
||||
<template v-if="contextMenuItem.meta.qc_id">
|
||||
|
||||
<v-divider></v-divider>
|
||||
|
||||
<!-- Mark QC accepted -->
|
||||
<v-list-item @click="() => acceptQc(contextMenuItem)" v-if="!isAcceptedQc(contextMenuItem)">
|
||||
<v-list-item-icon><v-icon>mdi-check</v-icon></v-list-item-icon>
|
||||
<v-list-item-title>Mark QC accepted</v-list-item-title>
|
||||
</v-list-item>
|
||||
<!-- Unmark QC accepted -->
|
||||
<v-list-item @click="() => acceptQc(contextMenuItem, false)" v-else>
|
||||
<v-list-item-icon><v-icon>mdi-restore</v-icon></v-list-item-icon>
|
||||
<v-list-item-title>Unmark QC accepted</v-list-item-title>
|
||||
</v-list-item>
|
||||
|
||||
</template>
|
||||
<!-- END This section only applies to QC events -->
|
||||
|
||||
</v-list>
|
||||
|
||||
</v-menu>
|
||||
<!-- END Context menu for log entries -->
|
||||
|
||||
<v-container fluid class="pa-0 pb-2" v-if="sequenceData">
|
||||
<v-row no-gutters class="d-flex flex-column flex-sm-row">
|
||||
<v-col cols="6" class="d-flex flex-column">
|
||||
<v-card outlined tile class="flex-grow-1">
|
||||
<v-card-subtitle>
|
||||
Acquisition remarks
|
||||
<template v-if="writeaccess">
|
||||
<template v-if="edit && edit.sequence == sequenceData.sequence && edit.key == 'remarks'">
|
||||
<v-btn
|
||||
class="ml-3"
|
||||
icon
|
||||
small
|
||||
title="Cancel edit"
|
||||
:disabled="loading"
|
||||
@click="edit.value = sequenceData.remarks; edit = null"
|
||||
>
|
||||
<v-icon small>mdi-close</v-icon>
|
||||
</v-btn>
|
||||
<v-btn v-if="edit.value != sequenceData.remarks"
|
||||
icon
|
||||
small
|
||||
title="Save edits"
|
||||
:disabled="loading"
|
||||
@click="edit = null"
|
||||
>
|
||||
<v-icon small>mdi-content-save-edit-outline</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
<v-btn v-else-if="edit === null"
|
||||
class="ml-3"
|
||||
icon
|
||||
small
|
||||
title="Edit"
|
||||
:disabled="loading"
|
||||
@click="editItem(sequenceData, 'remarks')"
|
||||
>
|
||||
<v-icon small>mdi-square-edit-outline</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
</v-card-subtitle>
|
||||
<v-card-text v-if="edit && edit.sequence == sequenceData.sequence && edit.key == 'remarks'">
|
||||
<v-textarea
|
||||
class="markdown"
|
||||
autofocus
|
||||
placeholder="Enter your text here"
|
||||
:disabled="loading"
|
||||
v-model="edit.value"
|
||||
>
|
||||
</v-textarea>
|
||||
</v-card-text>
|
||||
<v-card-text v-else v-html="$options.filters.markdown(sequenceData.remarks || '')">
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="6" class="d-flex flex-column">
|
||||
<v-card outlined tile class="flex-grow-1">
|
||||
<v-card-subtitle>
|
||||
Processing remarks
|
||||
<template v-if="writeaccess">
|
||||
<template v-if="edit && edit.sequence == sequenceData.sequence && edit.key == 'remarks_final'">
|
||||
<v-btn
|
||||
class="ml-3"
|
||||
icon
|
||||
small
|
||||
title="Cancel edit"
|
||||
:disabled="loading"
|
||||
@click="edit.value = sequenceData.remarks_final; edit = null"
|
||||
>
|
||||
<v-icon small>mdi-close</v-icon>
|
||||
</v-btn>
|
||||
<v-btn v-if="edit.value != sequenceData.remarks_final"
|
||||
icon
|
||||
small
|
||||
title="Save edits"
|
||||
:disabled="loading"
|
||||
@click="edit = null"
|
||||
>
|
||||
<v-icon small>mdi-content-save-edit-outline</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
<v-btn v-else-if="edit === null"
|
||||
class="ml-3"
|
||||
icon
|
||||
small
|
||||
title="Edit"
|
||||
:disabled="loading"
|
||||
@click="editItem(sequenceData, 'remarks_final')"
|
||||
>
|
||||
<v-icon small>mdi-square-edit-outline</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
</v-card-subtitle>
|
||||
<v-card-text v-if="edit && edit.sequence == sequenceData.sequence && edit.key == 'remarks_final'">
|
||||
<v-textarea
|
||||
class="markdown"
|
||||
autofocus
|
||||
placeholder="Enter your text here"
|
||||
:disabled="loading"
|
||||
v-model="edit.value"
|
||||
>
|
||||
</v-textarea>
|
||||
</v-card-text>
|
||||
<v-card-text v-else v-html="$options.filters.markdown(sequenceData.remarks_final || '')">
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</v-col>
|
||||
</v-row>
|
||||
</v-container>
|
||||
|
||||
<v-data-table
|
||||
dense
|
||||
:headers="headers"
|
||||
@@ -310,8 +220,7 @@
|
||||
:custom-filter="searchTable"
|
||||
:loading="loading"
|
||||
fixed-header
|
||||
:footer-props='{itemsPerPageOptions: [ 10, 25, 50, 100, 500, -1 ]}'
|
||||
:show-first-last-page="true"
|
||||
:footer-props='{itemsPerPageOptions: [ 10, 25, 50, 100, 500, -1 ], showFirstLastPage: true}'
|
||||
@click:row="setActiveItem"
|
||||
@contextmenu:row="contextMenu"
|
||||
>
|
||||
@@ -359,6 +268,29 @@
|
||||
|
||||
</template>
|
||||
|
||||
<template v-slot:footer.prepend>
|
||||
<v-checkbox v-for="label in filterableLabels"
|
||||
:key="label"
|
||||
class="mr-3"
|
||||
v-model="shownLabels"
|
||||
:value="label"
|
||||
:title="`Show ${label} events`"
|
||||
dense
|
||||
hide-details
|
||||
>
|
||||
<template v-slot:label>
|
||||
<v-chip
|
||||
x-small
|
||||
:color="labels[label] && labels[label].view.colour"
|
||||
:title="labels[label] && labels[label].view.description"
|
||||
:dark="labels[label] && labels[label].view.dark"
|
||||
:light="labels[label] && labels[label].view.light"
|
||||
>{{label}}
|
||||
</v-chip>
|
||||
</template>
|
||||
</v-checkbox>
|
||||
</template>
|
||||
|
||||
</v-data-table>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
@@ -453,6 +385,8 @@ export default {
|
||||
labels: {},
|
||||
options: {},
|
||||
filter: "",
|
||||
filterableLabels: [ "QC", "QCAccepted" ],
|
||||
shownLabels: [ "QC", "QCAccepted" ],
|
||||
eventCount: null,
|
||||
eventDialog: false,
|
||||
eventLabelsDialog: false,
|
||||
@@ -465,9 +399,6 @@ export default {
|
||||
queuedReload: false,
|
||||
itemsPerPage: 25,
|
||||
|
||||
sequenceData: null,
|
||||
edit: null, // { sequence, key, value }
|
||||
|
||||
// Row highlighter
|
||||
activeItem: null,
|
||||
|
||||
@@ -482,7 +413,16 @@ export default {
|
||||
computed: {
|
||||
rows () {
|
||||
const rows = {};
|
||||
this.items.forEach(i => {
|
||||
this.items
|
||||
.filter(i => {
|
||||
for (const label of this.filterableLabels) {
|
||||
if (!this.shownLabels.includes(label) && i.labels.includes(label)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
})
|
||||
.forEach(i => {
|
||||
const key = (i.sequence && i.point) ? (i.sequence+"@"+i.point) : i.tstamp;
|
||||
if (!rows[key]) {
|
||||
rows[key] = {
|
||||
@@ -537,26 +477,6 @@ export default {
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
async edit (newVal, oldVal) {
|
||||
if (newVal === null && oldVal !== null) {
|
||||
const item = oldVal.sequence == this.sequenceData.sequence
|
||||
? this.sequenceData
|
||||
: null;
|
||||
if (item && item[oldVal.key] != oldVal.value) {
|
||||
if (await this.saveItem(oldVal)) {
|
||||
item[oldVal.key] = oldVal.value;
|
||||
} else {
|
||||
this.edit = oldVal;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
defaultSequence (sequenceNumber) {
|
||||
this.getSequenceData();
|
||||
},
|
||||
|
||||
options: {
|
||||
handler () {
|
||||
//this.getEvents();
|
||||
@@ -582,12 +502,6 @@ export default {
|
||||
} else {
|
||||
this.queuedReload = true;
|
||||
}
|
||||
} else if ((event.channel == "final_lines" || event.channel == "raw_lines") &&
|
||||
event.payload.schema == this.projectSchema &&
|
||||
this.sequenceData?.sequence &&
|
||||
(this.sequenceData.sequence == event.payload.old.sequence ||
|
||||
this.sequenceData.sequence == event.payload.new.sequence)) {
|
||||
this.getSequenceData();
|
||||
}
|
||||
},
|
||||
|
||||
@@ -703,44 +617,6 @@ export default {
|
||||
this.presetRemarks = await this.api([url]);
|
||||
},
|
||||
|
||||
async getSequenceData () {
|
||||
if (this.defaultSequence) {
|
||||
const url = `/project/${this.$route.params.project}/sequence?sequence=${this.defaultSequence}`;
|
||||
const res = await this.api([url]);
|
||||
this.sequenceData = res[0];
|
||||
} else {
|
||||
this.sequenceData = null;
|
||||
}
|
||||
},
|
||||
|
||||
editItem (item, key) {
|
||||
this.edit = {
|
||||
sequence: item.sequence,
|
||||
key,
|
||||
value: item[key]
|
||||
}
|
||||
},
|
||||
|
||||
async saveItem (edit) {
|
||||
if (!edit) return;
|
||||
|
||||
try {
|
||||
const url = `/project/${this.$route.params.project}/sequence/${edit.sequence}`;
|
||||
const init = {
|
||||
method: "PATCH",
|
||||
body: {
|
||||
[edit.key]: edit.value
|
||||
}
|
||||
};
|
||||
|
||||
let res;
|
||||
await this.api([url, init, (e, r) => res = r]);
|
||||
return res && res.ok;
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
newItem (from = {}) {
|
||||
const type = (from.sequence && from.point) ? "sequence" : "timed";
|
||||
const tstamp = from.tstamp || (new Date).toISOString();
|
||||
@@ -949,6 +825,23 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
isAcceptedQc (item) {
|
||||
return item.labels.includes('QCAccepted');
|
||||
},
|
||||
|
||||
async acceptQc (item, accept = true) {
|
||||
|
||||
const url = accept
|
||||
? `/project/${this.$route.params.project}/qc/results/accept`
|
||||
: `/project/${this.$route.params.project}/qc/results/unaccept`;
|
||||
|
||||
await this.api([url, {
|
||||
method: "POST",
|
||||
body: [ item.id ]
|
||||
}]);
|
||||
|
||||
},
|
||||
|
||||
setActiveItem (item) {
|
||||
// Disable setting the active item for now,
|
||||
// it's kind of annoying.
|
||||
@@ -969,7 +862,6 @@ export default {
|
||||
this.getEventCount();
|
||||
this.getEvents();
|
||||
this.getPresetRemarks();
|
||||
this.getSequenceData();
|
||||
|
||||
window.addEventListener('keyup', this.handleKeyboardEvent);
|
||||
},
|
||||
|
||||
@@ -374,6 +374,7 @@ export default {
|
||||
}
|
||||
}
|
||||
],
|
||||
labels: {},
|
||||
hashMarker: null
|
||||
};
|
||||
},
|
||||
@@ -438,17 +439,16 @@ export default {
|
||||
const data = await this.api([url, {headers: {"Accept": "application/geo+json"}}]);
|
||||
if (data) {
|
||||
|
||||
function colour(feature) {
|
||||
if (feature && feature.properties && feature.properties.type) {
|
||||
if (feature.properties.type == "qc") {
|
||||
return feature.properties.labels.includes("QCAccepted")
|
||||
? "lightgray"
|
||||
: "gray";
|
||||
} else if (feature.properties.type == "midnight shot") {
|
||||
return "cyan";
|
||||
} else {
|
||||
return "orange";
|
||||
}
|
||||
const colour = (feature) => {
|
||||
if (feature.properties.meta?.qc_id) {
|
||||
return feature.properties.labels.includes("QCAccepted")
|
||||
? "lightgray"
|
||||
: "green";
|
||||
} else if (feature.properties.type == "midnight shot") { // FIXME
|
||||
// The above will no longer work. See #223.
|
||||
return "cyan";
|
||||
} else if (feature.properties.labels?.length) {
|
||||
return this.labels?.[feature.properties.labels[0]]?.view?.colour ?? "orange";
|
||||
}
|
||||
return "brown";
|
||||
}
|
||||
@@ -671,6 +671,15 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
async getLabelDefinitions () {
|
||||
const url = `/project/${this.$route.params.project}/label`;
|
||||
|
||||
const labelSet = {};
|
||||
const labels = await this.api([url]) || [];
|
||||
labels.forEach( l => labelSet[l.name] = l.data );
|
||||
this.labels = labelSet;
|
||||
},
|
||||
|
||||
...mapActions(["api"])
|
||||
|
||||
},
|
||||
@@ -704,8 +713,10 @@ export default {
|
||||
}
|
||||
};
|
||||
|
||||
layers["Events (QC)"] = L.realtime(this.getEvents(i => i.properties.type == "qc"), eventsOptions());
|
||||
layers["Events (Other)"] = L.realtime(this.getEvents(i => i.properties.type != "qc"), eventsOptions());
|
||||
this.getLabelDefinitions(); // No await
|
||||
|
||||
layers["Events (QC)"] = L.realtime(this.getEvents(i => i.properties.meta?.qc_id), eventsOptions());
|
||||
layers["Events (Other)"] = L.realtime(this.getEvents(i => !i.properties.meta?.qc_id), eventsOptions());
|
||||
|
||||
layers["Events (Other)"].on('update', function (e) {
|
||||
//console.log("Events (Other) update event", e);
|
||||
|
||||
@@ -329,6 +329,18 @@
|
||||
title="View the event log for this sequence">{{value}}</a>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.line="{value}">
|
||||
<b>{{value}}</b>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.fsp_final="{value}">
|
||||
<b v-if="value">{{value}}</b>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.lsp_final="{value}">
|
||||
<b v-if="value">{{value}}</b>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.status="{value, item}">
|
||||
<span :class="{'success--text': value=='final', 'warning--text': value=='raw', 'error--text': value=='ntbp'}">
|
||||
{{ value == "final" ? "Processed" : value == "raw" ? item.raw_files ? "Acquired" : "In acquisition" : value == "ntbp" ? "NTBP" : `Unknown (${status})` }}
|
||||
@@ -368,7 +380,7 @@
|
||||
</template>
|
||||
|
||||
<template v-slot:item.duration_final="{item: {duration_final: value}}">
|
||||
{{
|
||||
<b>{{
|
||||
value
|
||||
?
|
||||
"" +
|
||||
@@ -379,7 +391,7 @@
|
||||
":" + String(value.minutes || 0).padStart(2, "0") +
|
||||
":" + String(value.seconds || 0).padStart(2, "0")
|
||||
: "N/A"
|
||||
}}
|
||||
}}</b>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.ts0="{value}">
|
||||
@@ -395,15 +407,15 @@
|
||||
</template>
|
||||
|
||||
<template v-slot:item.ts0_final="{value}">
|
||||
<span v-if="value">
|
||||
<b v-if="value">
|
||||
{{ value.replace(/(.{10})T(.{8}).{4}Z$/, "$1 $2") }}
|
||||
</span>
|
||||
</b>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.ts1_final="{value}">
|
||||
<span v-if="value">
|
||||
<b v-if="value">
|
||||
{{ value.replace(/(.{10})T(.{8}).{4}Z$/, "$1 $2") }}
|
||||
</span>
|
||||
</b>
|
||||
</template>
|
||||
|
||||
<template v-slot:item.missing_shots="{value}">
|
||||
|
||||
@@ -6,6 +6,7 @@ const cookieParser = require('cookie-parser')
|
||||
const maybeSendAlert = require("../lib/alerts");
|
||||
const mw = require('./middleware');
|
||||
|
||||
const { ERROR, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
const verbose = process.env.NODE_ENV != 'test';
|
||||
const app = express();
|
||||
app.locals.version = "0.3.1"; // API version
|
||||
@@ -22,7 +23,7 @@ app.map = function(a, route){
|
||||
} // else drop through
|
||||
// get: function(){ ... }
|
||||
case 'function':
|
||||
if (verbose) console.log('%s %s', key, route);
|
||||
if (verbose) INFO('%s %s', key, route);
|
||||
app[key](route, a[key]);
|
||||
break;
|
||||
}
|
||||
@@ -297,10 +298,12 @@ app.use(function (err, req, res, next) {
|
||||
const alert = {title, message, description, error: err};
|
||||
|
||||
console.log("Error:", err);
|
||||
ERROR("%O", err)
|
||||
|
||||
res.set("Content-Type", "application/json");
|
||||
if (err instanceof Error && err.name != "UnauthorizedError") {
|
||||
console.error(err.stack);
|
||||
// console.error(err.stack);
|
||||
ERROR(err.stack);
|
||||
res.set("Content-Type", "text/plain");
|
||||
res.status(500).send('General internal error');
|
||||
maybeSendAlert(alert);
|
||||
@@ -317,7 +320,7 @@ app.use(function (err, req, res, next) {
|
||||
|
||||
app.disable('x-powered-by');
|
||||
app.enable('trust proxy');
|
||||
console.log('trust proxy is ' + (app.get('trust proxy')? 'on' : 'off'));
|
||||
INFO('trust proxy is ' + (app.get('trust proxy')? 'on' : 'off'));
|
||||
|
||||
const addr = "127.0.0.1";
|
||||
|
||||
@@ -325,7 +328,7 @@ if (!module.parent) {
|
||||
var port = process.env.HTTP_PORT || 3000;
|
||||
var server = http.createServer(app).listen(port, addr);
|
||||
|
||||
console.log('API started on port ' + port);
|
||||
INFO('API started on port ' + port);
|
||||
} else {
|
||||
app.start = function (port = 3000, path) {
|
||||
|
||||
@@ -338,7 +341,8 @@ if (!module.parent) {
|
||||
|
||||
const server = http.createServer(root).listen(port, addr);
|
||||
if (server) {
|
||||
console.log(`API started on port ${port}, prefix: ${path || "/"}`);
|
||||
// console.log(`API started on port ${port}, prefix: ${path || "/"}`);
|
||||
INFO(`API started on port ${port}, prefix: ${path || "/"}`);
|
||||
}
|
||||
return server;
|
||||
}
|
||||
|
||||
@@ -4,8 +4,6 @@ const { qc } = require('../../../../lib/db');
|
||||
module.exports = async function (req, res, next) {
|
||||
|
||||
try {
|
||||
const payload = req.body;
|
||||
|
||||
await qc.results.delete(req.params.project, req.params.sequence);
|
||||
res.status(204).send();
|
||||
next();
|
||||
|
||||
48
lib/www/server/debug.js
Normal file
48
lib/www/server/debug.js
Normal file
@@ -0,0 +1,48 @@
|
||||
const path = require('path');
|
||||
|
||||
function debuggers (filename) {
|
||||
const ns = filename
|
||||
? "dougal:server:"+path.basename(path.relative(__dirname, filename).replace(/\//g, ':'), path.extname(filename)).replace(/:index$/, "")
|
||||
: "dougal:server";
|
||||
const debug = require('debug')(ns);
|
||||
return {
|
||||
debug,
|
||||
|
||||
// A "panic" condition affecting multiple applications, servers, or sites.
|
||||
// System is unusable. Notify all technical staff on call.
|
||||
EMERGENCY: debug.extend("EMERGENCY"),
|
||||
|
||||
// A condition requiring immediate correction or indicating a failure in a
|
||||
// primary system, for example, a loss of a primary ISP connection.
|
||||
// Fix CRITICAL issues before ALERT-level problems.
|
||||
CRITICAL: debug.extend("CRITICAL"),
|
||||
|
||||
// A condition requiring immediate correction, for example, the loss of a
|
||||
// backup ISP connection. Notify staff who can fix the problem.
|
||||
ALERT: debug.extend("ALERT"),
|
||||
|
||||
// Non-urgent failures. Notify developers or administrators as errors must
|
||||
// be resolved within a given time.
|
||||
ERROR: debug.extend("ERROR"),
|
||||
|
||||
// Warning messages are not errors, but they indicate that an error will occur
|
||||
// if required action is not taken. An example is a file system that is 85% full.
|
||||
// Each item must be resolved within a given time.
|
||||
WARNING: debug.extend("WARNING"),
|
||||
|
||||
// Events that are unusual but are not error conditions. These items might be
|
||||
// summarized in an email to developers or administrators to spot potential
|
||||
// problems. No immediate action is required.
|
||||
NOTICE: debug.extend("NOTICE"),
|
||||
|
||||
// Normal operational messages. These may be harvested for network maintenance
|
||||
// functions like reporting and throughput measurement. No action is required.
|
||||
INFO: debug.extend("INFO"),
|
||||
|
||||
// Information useful to developers for debugging an application. This information
|
||||
// is not useful during operations.
|
||||
DEBUG: debug.extend("DEBUG"),
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = debuggers;
|
||||
143
lib/www/server/events/handlers/detect-fdsp.js
Normal file
143
lib/www/server/events/handlers/detect-fdsp.js
Normal file
@@ -0,0 +1,143 @@
|
||||
const { schema2pid } = require('../../lib/db/connection');
|
||||
const { event } = require('../../lib/db');
|
||||
const { ALERT, ERROR, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
/** Midnight shot detection.
|
||||
*
|
||||
* This event handler checks if there is an UTC date jump between consecutive
|
||||
* shots. If a jump is detected, it sends to new entries to the event log, for
|
||||
* the last shot and first shot of the previous and current dates, respectively.
|
||||
*/
|
||||
class DetectFDSP {
|
||||
/* Data may come much faster than we can process it, so we put it
|
||||
* in a queue and process it at our own pace.
|
||||
*
|
||||
* The run() method fills the queue with the necessary data and then
|
||||
* calls processQueue().
|
||||
*
|
||||
* The processQueue() method looks at the first two elements in
|
||||
* the queue and processes them if they are not already being taken
|
||||
* care of by a previous processQueue() call – this will happen when
|
||||
* data is coming in faster than it can be processed.
|
||||
*
|
||||
* If the processQueue() call is the first to see the two bottommost
|
||||
* two elements, it will process them and, when finished, it will set
|
||||
* the `isPending` flag of the bottommost element to `false`, thus
|
||||
* letting the next call know that it has work to do.
|
||||
*
|
||||
* If the queue was empty, run() will set the `isPending` flag of its
|
||||
* first element to a falsy value, thus bootstrapping the process.
|
||||
*/
|
||||
static MAX_QUEUE_SIZE = 125000;
|
||||
|
||||
queue = [];
|
||||
|
||||
async processQueue () {
|
||||
DEBUG("Queue length", this.queue.length)
|
||||
while (this.queue.length > 1) {
|
||||
|
||||
if (this.queue[0].isPending) {
|
||||
setImmediate(() => this.processQueue());
|
||||
return;
|
||||
}
|
||||
|
||||
const prev = this.queue.shift();
|
||||
const cur = this.queue[0];
|
||||
|
||||
const sequence = Number(cur._sequence);
|
||||
|
||||
try {
|
||||
|
||||
if (prev.lineName == cur.lineName && prev._sequence == cur._sequence &&
|
||||
prev.lineStatus == "online" && cur.lineStatus == "online" && sequence) {
|
||||
|
||||
// DEBUG("Previous", prev);
|
||||
// DEBUG("Current", cur);
|
||||
|
||||
if (prev.time.substr(0, 10) != cur.time.substr(0, 10)) {
|
||||
// Possible a date change, but could also be a missing timestamp
|
||||
// or something else.
|
||||
|
||||
const ts0 = new Date(prev.time)
|
||||
const ts1 = new Date(cur.time);
|
||||
|
||||
if (!isNaN(ts0) && !isNaN(ts1) && ts0.getUTCDay() != ts1.getUTCDay()) {
|
||||
INFO("Sequence shot across midnight UTC detected", cur._sequence, cur.lineName);
|
||||
|
||||
const ldsp = {
|
||||
sequence: prev._sequence,
|
||||
point: prev._point,
|
||||
remarks: "Last shotpoint of the day",
|
||||
labels: ["LDSP", "Prod"],
|
||||
meta: {auto: true, insertedBy: this.constructor.name}
|
||||
};
|
||||
|
||||
const fdsp = {
|
||||
sequence: cur._sequence,
|
||||
point: cur._point,
|
||||
remarks: "First shotpoint of the day",
|
||||
labels: ["FDSP", "Prod"],
|
||||
meta: {auto: true, insertedBy: this.constructor.name}
|
||||
};
|
||||
|
||||
INFO("LDSP", ldsp);
|
||||
INFO("FDSP", fdsp);
|
||||
|
||||
const projectId = await schema2pid(prev._schema);
|
||||
|
||||
if (projectId) {
|
||||
await event.post(projectId, ldsp);
|
||||
await event.post(projectId, fdsp);
|
||||
} else {
|
||||
ERROR("projectId not found for", prev._schema);
|
||||
}
|
||||
} else {
|
||||
WARNING("False positive on these timestamps", prev.time, cur.time);
|
||||
WARNING("No events were created");
|
||||
}
|
||||
}
|
||||
}
|
||||
// Processing of this shot has already been completed.
|
||||
// The queue can now move forward.
|
||||
} catch (err) {
|
||||
ERROR(err);
|
||||
} finally {
|
||||
cur.isPending = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async run (data) {
|
||||
if (!data || data.channel !== "realtime") {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!(data.payload && data.payload.new && data.payload.new.meta)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const meta = data.payload.new.meta;
|
||||
|
||||
if (this.queue.length < DetectFDSP.MAX_QUEUE_SIZE) {
|
||||
|
||||
const event = {
|
||||
isPending: this.queue.length,
|
||||
_schema: meta._schema,
|
||||
time: meta.time,
|
||||
lineStatus: meta.lineStatus,
|
||||
_sequence: meta._sequence,
|
||||
_point: meta._point,
|
||||
lineName: meta.lineName
|
||||
};
|
||||
this.queue.push(event);
|
||||
// DEBUG("EVENT", event);
|
||||
|
||||
} else {
|
||||
ALERT("Queue full at", this.queue.length);
|
||||
}
|
||||
|
||||
this.processQueue();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DetectFDSP;
|
||||
@@ -1,5 +1,6 @@
|
||||
const Handlers = [
|
||||
require('./detect-soleol')
|
||||
require('./detect-soleol'),
|
||||
require('./detect-fdsp')
|
||||
];
|
||||
|
||||
function init () {
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
const { listen } = require('../ws/db');
|
||||
const channels = require('../lib/db/channels');
|
||||
const handlers = require('./handlers').init();
|
||||
const { ERROR, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
function start () {
|
||||
listen(channels, async function (data) {
|
||||
DEBUG("Incoming data", data);
|
||||
for (const handler of handlers) {
|
||||
// NOTE: We are intentionally passing the same instance
|
||||
// of the data to every handler. This means that earlier
|
||||
@@ -15,7 +17,7 @@ function start () {
|
||||
}
|
||||
});
|
||||
|
||||
console.log("Events manager started.", handlers.length, "active handlers");
|
||||
INFO("Events manager started.", handlers.length, "active handlers");
|
||||
}
|
||||
|
||||
module.exports = { start }
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
#!/usr/bin/node
|
||||
|
||||
const { INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
async function main () {
|
||||
// Check that we're running against the correct database version
|
||||
const version = require('./lib/version');
|
||||
console.log("Running version", await version.describe());
|
||||
INFO("Running version", await version.describe());
|
||||
version.compatible()
|
||||
.then( (versions) => {
|
||||
const api = require('./api');
|
||||
@@ -17,12 +19,13 @@ async function main () {
|
||||
const eventManagerPath = [__dirname, "events"].join("/");
|
||||
const eventManager = fork(eventManagerPath, /*{ stdio: 'ignore' }*/);
|
||||
|
||||
console.info("Versions", versions);
|
||||
INFO("Versions:", versions);
|
||||
|
||||
process.on('exit', () => eventManager.kill());
|
||||
})
|
||||
.catch( ({current, wanted, component}) => {
|
||||
console.error(`Fatal error: incompatible ${component} version ${current} (wanted: ${wanted})`);
|
||||
ERROR(`Fatal error: incompatible ${component} version ${current} (wanted: ${wanted})`);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -5,6 +5,8 @@ async function patch (projectId, eventId, payload, opts = {}) {
|
||||
const p = payload; // Shorter
|
||||
const client = await setSurvey(projectId);
|
||||
try {
|
||||
await transaction.begin(client);
|
||||
|
||||
// The order of attributes in an object is not defined, so
|
||||
// in theory we could get a different order if we made separate
|
||||
// calls to Object.keys() and Object.values(), unlikely as that
|
||||
@@ -22,10 +24,32 @@ async function patch (projectId, eventId, payload, opts = {}) {
|
||||
const values = [ eventId, ...v ];
|
||||
|
||||
await client.query(text, values);
|
||||
|
||||
// NOTE Horrible hack warning.
|
||||
// If we try to do the UPDATE on event_log, as you normally would,
|
||||
// we get a constraint violation on `event_log_full_validity_check`.
|
||||
// We would normally set that constraint as DEFERRABLE, except that
|
||||
// as of version 14, PostgreSQL does not support deferrable CHECK
|
||||
// constraints:
|
||||
// https://www.postgresql.org/docs/current/sql-createtable.html
|
||||
// So the options that I can think of are either not to use a transaction
|
||||
// or to apply this second update directly on the `event_log_full` table.
|
||||
|
||||
const text1 = `
|
||||
UPDATE event_log_full
|
||||
SET remarks = replace_placeholders(remarks, tstamp, sequence, point)
|
||||
WHERE id = $1 AND validity @> current_timestamp;
|
||||
`;
|
||||
const values1 = [ eventId ];
|
||||
|
||||
await client.query(text1, values1);
|
||||
|
||||
await transaction.commit(client);
|
||||
} catch (err) {
|
||||
err.origin = __filename;
|
||||
throw err;
|
||||
} finally {
|
||||
client.release();
|
||||
client.release(); // implies ROLLBACK;
|
||||
}
|
||||
|
||||
return;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
const { DEBUG, ERROR } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
const { setSurvey, transaction } = require('../connection');
|
||||
|
||||
async function post (projectId, payload, opts = {}) {
|
||||
@@ -9,12 +10,14 @@ async function post (projectId, payload, opts = {}) {
|
||||
const text = `
|
||||
INSERT
|
||||
INTO event_log (tstamp, sequence, point, remarks, labels)
|
||||
VALUES ($1, $2, $3, $4, $5);
|
||||
VALUES ($1, $2, $3, replace_placeholders($4, $1, $2, $3), $5);
|
||||
`;
|
||||
const values = [ p.tstamp, p.sequence, p.point, p.remarks, p.labels ];
|
||||
|
||||
DEBUG("Inserting new values: %O", values);
|
||||
await client.query(text, values);
|
||||
} catch (err) {
|
||||
err.origin = __filename;
|
||||
throw err;
|
||||
} finally {
|
||||
client.release();
|
||||
|
||||
@@ -12,7 +12,7 @@ async function put (projectId, eventId, payload, opts = {}) {
|
||||
tstamp = $1,
|
||||
sequence = $2,
|
||||
point = $3,
|
||||
remarks = $4,
|
||||
remarks = replace_placeholders($4, $1, $2, $3),
|
||||
labels = $5
|
||||
WHERE id = $6;
|
||||
`;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
// FIXME This code is in painful need of refactoring
|
||||
|
||||
const { DEBUG } = require("DOUGAL_ROOT/debug")(__filename);
|
||||
const { setSurvey, transaction, pool } = require('../connection');
|
||||
|
||||
let last_tstamp = 0;
|
||||
@@ -106,7 +107,7 @@ async function getNearestOfflinePreplot (candidates) {
|
||||
}
|
||||
}
|
||||
client.release();
|
||||
const _schema = results.sort( (a, b) => a.distance - b.distance).shift()._schema;
|
||||
const _schema = results.sort( (a, b) => a.distance - b.distance).shift()?._schema;
|
||||
return candidates.find(c => c._schema == _schema);
|
||||
}
|
||||
|
||||
@@ -253,6 +254,7 @@ async function save (navData, opts = {}) {
|
||||
});
|
||||
return obj;
|
||||
}).filter(c => !!c);
|
||||
DEBUG("Candidates: %j", candidates);
|
||||
// console.log("CANDIDATES", candidates);
|
||||
|
||||
if (candidates.length == 0) {
|
||||
|
||||
252
lib/www/server/package-lock.json
generated
252
lib/www/server/package-lock.json
generated
@@ -14,6 +14,7 @@
|
||||
],
|
||||
"dependencies": {
|
||||
"cookie-parser": "^1.4.5",
|
||||
"debug": "^4.3.4",
|
||||
"express": "^4.17.1",
|
||||
"express-jwt": "^6.0.0",
|
||||
"json2csv": "^5.0.6",
|
||||
@@ -165,22 +166,6 @@
|
||||
"node": ">= 6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/agent-base/node_modules/debug": {
|
||||
"version": "4.3.1",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz",
|
||||
"integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==",
|
||||
"dependencies": {
|
||||
"ms": "2.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/agent-base/node_modules/ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
},
|
||||
"node_modules/ansi-regex": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz",
|
||||
@@ -285,6 +270,19 @@
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/body-parser/node_modules/debug": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||
"dependencies": {
|
||||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/body-parser/node_modules/ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
@@ -511,11 +509,19 @@
|
||||
}
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||
"version": "4.3.4",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
|
||||
"integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
|
||||
"dependencies": {
|
||||
"ms": "2.0.0"
|
||||
"ms": "2.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/decimal.js": {
|
||||
@@ -744,6 +750,19 @@
|
||||
"resolved": "https://registry.npmjs.org/express-unless/-/express-unless-1.0.0.tgz",
|
||||
"integrity": "sha512-zXSSClWBPfcSYjg0hcQNompkFN/MxQQ53eyrzm9BYgik2ut2I7PxAf2foVqBRMYCwWaZx/aWodi+uk76npdSAw=="
|
||||
},
|
||||
"node_modules/express/node_modules/debug": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||
"dependencies": {
|
||||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/express/node_modules/ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
|
||||
},
|
||||
"node_modules/express/node_modules/safe-buffer": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||
@@ -803,6 +822,19 @@
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/finalhandler/node_modules/debug": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||
"dependencies": {
|
||||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/finalhandler/node_modules/ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
|
||||
},
|
||||
"node_modules/forever-agent": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
|
||||
@@ -1009,22 +1041,6 @@
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/http-proxy-agent/node_modules/debug": {
|
||||
"version": "4.3.1",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz",
|
||||
"integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==",
|
||||
"dependencies": {
|
||||
"ms": "2.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/http-proxy-agent/node_modules/ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
},
|
||||
"node_modules/http-signature": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
|
||||
@@ -1051,22 +1067,6 @@
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/https-proxy-agent/node_modules/debug": {
|
||||
"version": "4.3.1",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz",
|
||||
"integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==",
|
||||
"dependencies": {
|
||||
"ms": "2.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/https-proxy-agent/node_modules/ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
},
|
||||
"node_modules/iconv-lite": {
|
||||
"version": "0.4.24",
|
||||
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
|
||||
@@ -1234,11 +1234,6 @@
|
||||
"npm": ">=1.4.28"
|
||||
}
|
||||
},
|
||||
"node_modules/jsonwebtoken/node_modules/ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
},
|
||||
"node_modules/jsprim": {
|
||||
"version": "1.4.2",
|
||||
"resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz",
|
||||
@@ -1517,9 +1512,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
},
|
||||
"node_modules/nan": {
|
||||
"version": "2.14.2",
|
||||
@@ -5348,6 +5343,19 @@
|
||||
"node": ">= 0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/send/node_modules/debug": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||
"dependencies": {
|
||||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/send/node_modules/debug/node_modules/ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
|
||||
},
|
||||
"node_modules/send/node_modules/ms": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
@@ -5924,21 +5932,6 @@
|
||||
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
|
||||
"requires": {
|
||||
"debug": "4"
|
||||
},
|
||||
"dependencies": {
|
||||
"debug": {
|
||||
"version": "4.3.1",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz",
|
||||
"integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==",
|
||||
"requires": {
|
||||
"ms": "2.1.2"
|
||||
}
|
||||
},
|
||||
"ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"ansi-regex": {
|
||||
@@ -6031,6 +6024,21 @@
|
||||
"qs": "6.9.7",
|
||||
"raw-body": "2.4.3",
|
||||
"type-is": "~1.6.18"
|
||||
},
|
||||
"dependencies": {
|
||||
"debug": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||
"requires": {
|
||||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
|
||||
}
|
||||
}
|
||||
},
|
||||
"brace-expansion": {
|
||||
@@ -6205,11 +6213,11 @@
|
||||
}
|
||||
},
|
||||
"debug": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||
"version": "4.3.4",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
|
||||
"integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
|
||||
"requires": {
|
||||
"ms": "2.0.0"
|
||||
"ms": "2.1.2"
|
||||
}
|
||||
},
|
||||
"decimal.js": {
|
||||
@@ -6371,6 +6379,19 @@
|
||||
"vary": "~1.1.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"debug": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||
"requires": {
|
||||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
|
||||
},
|
||||
"safe-buffer": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||
@@ -6426,6 +6447,21 @@
|
||||
"parseurl": "~1.3.3",
|
||||
"statuses": "~1.5.0",
|
||||
"unpipe": "~1.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"debug": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||
"requires": {
|
||||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
|
||||
}
|
||||
}
|
||||
},
|
||||
"forever-agent": {
|
||||
@@ -6599,21 +6635,6 @@
|
||||
"@tootallnate/once": "1",
|
||||
"agent-base": "6",
|
||||
"debug": "4"
|
||||
},
|
||||
"dependencies": {
|
||||
"debug": {
|
||||
"version": "4.3.1",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz",
|
||||
"integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==",
|
||||
"requires": {
|
||||
"ms": "2.1.2"
|
||||
}
|
||||
},
|
||||
"ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"http-signature": {
|
||||
@@ -6633,21 +6654,6 @@
|
||||
"requires": {
|
||||
"agent-base": "6",
|
||||
"debug": "4"
|
||||
},
|
||||
"dependencies": {
|
||||
"debug": {
|
||||
"version": "4.3.1",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz",
|
||||
"integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==",
|
||||
"requires": {
|
||||
"ms": "2.1.2"
|
||||
}
|
||||
},
|
||||
"ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"iconv-lite": {
|
||||
@@ -6788,13 +6794,6 @@
|
||||
"lodash.once": "^4.0.0",
|
||||
"ms": "^2.1.1",
|
||||
"semver": "^5.6.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"jsprim": {
|
||||
@@ -7021,9 +7020,9 @@
|
||||
"integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw=="
|
||||
},
|
||||
"ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
},
|
||||
"nan": {
|
||||
"version": "2.14.2",
|
||||
@@ -10195,6 +10194,21 @@
|
||||
"statuses": "~1.5.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"debug": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||
"requires": {
|
||||
"ms": "2.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
|
||||
}
|
||||
}
|
||||
},
|
||||
"ms": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
|
||||
@@ -5,13 +5,13 @@
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"postinstall": "redoc-cli bundle ./spec/openapi.yaml -o ./spec/openapi.html"
|
||||
"postinstall": "[[ -e ./node_modules/DOUGAL_ROOT ]] || ln -s .. ./node_modules/DOUGAL_ROOT; redoc-cli bundle ./spec/openapi.yaml -o ./spec/openapi.html"
|
||||
},
|
||||
"author": "Aaltronav s.r.o.",
|
||||
"license": "UNLICENSED",
|
||||
"private": true,
|
||||
"config": {
|
||||
"db_schema": "^0.3.0",
|
||||
"db_schema": "^0.3.11",
|
||||
"api": "^0.3.0"
|
||||
},
|
||||
"engines": {
|
||||
@@ -22,6 +22,7 @@
|
||||
],
|
||||
"dependencies": {
|
||||
"cookie-parser": "^1.4.5",
|
||||
"debug": "^4.3.4",
|
||||
"express": "^4.17.1",
|
||||
"express-jwt": "^6.0.0",
|
||||
"json2csv": "^5.0.6",
|
||||
|
||||
@@ -5,6 +5,7 @@ const cfg = require("../lib/config");
|
||||
const { navdata } = require('../lib/db');
|
||||
const sendAlert = require("../lib/alerts");
|
||||
const headers = require('../lib/headers');
|
||||
const { ERROR, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
|
||||
function maybeSendError(error, payload = {}) {
|
||||
const defaults = {
|
||||
@@ -74,8 +75,11 @@ function parseMessages (buffer, formats = [ "hydronav", "labo", "smartsource" ])
|
||||
}
|
||||
// else unknown
|
||||
}
|
||||
if (header.tstamp && header._received) {
|
||||
header._latency = header._received - header.tstamp;
|
||||
if (header.tstamp) {
|
||||
navData.tstamp = header.tstamp;
|
||||
if (header._received) {
|
||||
header._latency = header._received - header.tstamp;
|
||||
}
|
||||
}
|
||||
navData.payload.push(header);
|
||||
}
|
||||
@@ -97,22 +101,22 @@ for (const header of (cfg._("global.navigation.headers") || []).filter(h => h.ty
|
||||
});
|
||||
|
||||
server.on('message', (msg, rinfo) => {
|
||||
// console.log(`${header.type} :${header.port} ← ${msg.length} bytes from ${rinfo.address}:${rinfo.port}`);
|
||||
DEBUG(`${header.type} :${header.port} ← ${msg.length} bytes from ${rinfo.address}:${rinfo.port}`);
|
||||
|
||||
const navData = parseMessages(msg);
|
||||
|
||||
if (navData.payload.length) {
|
||||
navData.payload = navData.payload.reduce( (a, b) => Object.assign(a, b), {});
|
||||
delete navData.payload._type;
|
||||
// console.log("SAVE", JSON.stringify(navData, null, 4));
|
||||
// console.log("META", header.meta);
|
||||
// DEBUG("SAVE", JSON.stringify(navData, null, 4));
|
||||
// DEBUG("META", header.meta);
|
||||
navdata.save(navData, header.meta);
|
||||
}
|
||||
});
|
||||
|
||||
server.on('listening', () => {
|
||||
const address = server.address();
|
||||
console.log(`server listening ${address.address}:${address.port}`);
|
||||
INFO(`server listening ${address.address}:${address.port}`);
|
||||
});
|
||||
|
||||
server.bind(header.port);
|
||||
|
||||
37
sbin/packet-capture.sh
Executable file
37
sbin/packet-capture.sh
Executable file
@@ -0,0 +1,37 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Capture network packets for later replay on dev machines
|
||||
#
|
||||
# This should be run as root via a service.
|
||||
#
|
||||
|
||||
OUTDIR="$(realpath "$(dirname "$0")/..")/var/pcap"
|
||||
OUTNAME="capture-$(hostname)-$(date -u +%s)-$$-pcap"
|
||||
OUTPATH="$OUTDIR/$OUTNAME"
|
||||
|
||||
# Inputs:
|
||||
#
|
||||
# 4461/UDP: GPS NMEA
|
||||
# 4462/UDP: AIS NMEA
|
||||
# 30000/UDP: Navigation system headers
|
||||
# Not all inputs will be present in all systems.
|
||||
#
|
||||
EXPR="udp and (port 4461 or port 4462 or port 30000)"
|
||||
|
||||
if [[ ! -d "$OUTDIR" ]]; then
|
||||
mkdir "$OUTDIR"
|
||||
fi
|
||||
|
||||
# The size of each capture file is 50 MB (-C 50)
|
||||
# and it will use a ring of 1000 files (-W 1000).
|
||||
# The capture packet size is unlimited (-s 0).
|
||||
#
|
||||
# 50 MB (47.7 MiB) is about one day's worth of data
|
||||
# so in theory it shouldn't overwrite files even if
|
||||
# it was running continuously for over two years.
|
||||
# NOTE: The above figures do not include AIS data.
|
||||
|
||||
echo "Logging to: $OUTPATH"
|
||||
echo "Expression: $EXPR"
|
||||
|
||||
tcpdump -n -s 0 -W 1000 -C 50 -w "$OUTPATH" "$EXPR"
|
||||
Reference in New Issue
Block a user