Compare commits

..

38 Commits

Author SHA1 Message Date
D. Berge
ad3998d4c6 Add database upgrade file 2021-05-24 17:41:11 +02:00
D. Berge
bc5aef5144 Run post-import functions after final lines.
The reason why need to do it like this instead of relying on a trigger
is because the entry in final_lines is created first and the final_shots
are populated. If we first the trigger on final_lines it is not going
to find any shots; if we fire it as a row trigger on final_shots it
would try to label every point in sequence as it is imported; finally if
we fire it as a statement trigger on final_shots we have no idea which
sequence was imported.
2021-05-24 16:59:56 +02:00
D. Berge
2b798c3ea3 Ignore attempts to put the same label twice on the same event 2021-05-24 16:59:20 +02:00
D. Berge
4d97784829 Upgrade database project schema template.
Adds:

* label_in_sequence (_sequence integer, _label text):
  Returns events containing the specified label.

* handle_final_line_events (_seq integer, _label text, _column text):
  - If _label does not exist in the events for sequence _seq:
    it adds a new _label label at the shotpoint obtained from
    final_lines_summary[_column].
  - If _label does exist (and hasn't been auto-added by this function
    in a previous run), it will add information about it to the final
    line's metadata.

* final_line_post_import (_seq integer):
  Calls handle_final_line_events() on the given sequence to check
  for FSP, FGSP, LGSP and LSP labels.

* events_seq_labels_single ():
  Trigger function to ensure that labels that have the attribute
  `model.multiple` set to `false` occur at most only once per
  sequence. If a new instance is added to a sequence, the previous
  instance is deleted.

* Trigger on events_seq_labels that calls events_seq_labels_single().

* Trigger on events_timed_labels that calls events_seq_labels_single().
2021-05-24 16:49:39 +02:00
D. Berge
5af89050fb Refactor SOL/EOL real-time detection handler.
This also implements a generic handler mechanism that can be
reused for other purposes, such as sending email / XMPP notifications,
doing real-time QC checks and so on.

Fixes #113.
2021-05-24 13:48:53 +02:00
D. Berge
d40ceb8343 Refactor list of notification channels into its own file 2021-05-24 13:38:19 +02:00
D. Berge
56d1279584 Allow api action to make arbitrary HTTP(S) requests.
If the URL is an absolute HTTP(S) one, we use it as-is.
2021-05-24 13:35:36 +02:00
D. Berge
d02edb4e76 Force the argument into String prior to splitting 2021-05-24 13:32:03 +02:00
D. Berge
9875ae86f3 Record P1/11 line name in database on import 2021-05-24 13:30:25 +02:00
D. Berge
53f71f7005 Set primary key on events_seq_labels in schema template 2021-05-23 22:27:00 +02:00
D. Berge
5de64e6b45 Add meta column to events view in schema template 2021-05-23 22:26:00 +02:00
D. Berge
67af85eca9 Recognise PENDING status in sequence imports.
If a final sequence file or directory name matches a pattern
which is recognised to indicate a ‘pending acceptance’ status,
the final data (if any exists) for that sequence will be deleted
and a comment added to the effect that the sequence has been
marked as ‘pending’.

To accept the sequence, rename its final file or directory name
accordingly.

Note: it is the *final* data that is searched for a matching
pattern, not the raw.

Closes #91.
2021-05-21 15:15:15 +02:00
D. Berge
779b28a331 Add info table to system dumps 2021-05-21 12:18:36 +02:00
D. Berge
b9a4d18ed9 Do not fail if no equipment has been defined.
Fixes #112.
2021-05-20 21:16:39 +02:00
D. Berge
0dc9ac2b3c Merge branch '71-add-equipment-info-to-the-logs' into 'devel'
Resolve "Add equipment info to the logs"

Closes #71

See merge request wgp/dougal/software!10
2021-05-20 19:05:35 +00:00
D. Berge
39d85a692b Use default Nunjucks template if necessary.
If the survey configuration does not itself have a template
we will use the one in etc/defaults/templates/sequence.html.njk.

It is not very likely that the template will be changed all that
often and it avoids issues when people forget to copy it across
to a new survey, etc.
2021-05-20 20:38:39 +02:00
D. Berge
e7661bfd1c Do not fail if requested object does not exist 2021-05-20 20:38:08 +02:00
D. Berge
1649de6c68 Update default sequence HTML template 2021-05-20 20:37:37 +02:00
D. Berge
1089d1fe75 Add equipment configuration fontend user interface 2021-05-20 18:35:56 +02:00
D. Berge
fc58a4d435 Implement equipment frontend component 2021-05-20 18:35:56 +02:00
D. Berge
c832d8b107 Commit default template for sequences 2021-05-20 18:35:56 +02:00
D. Berge
4a9e61be78 Add unique filter to Nunjucks renderer 2021-05-20 18:35:56 +02:00
D. Berge
8cfd1a7fc9 Export equipment info to Seis+JSON files 2021-05-20 18:35:56 +02:00
D. Berge
315733eec0 Refactor events export middleware.
Uses the `prepare` method for better reusability.
2021-05-20 18:35:56 +02:00
D. Berge
ad422abe94 Add prepare method for Seis+JSON and related exports.
It retrieves the data necessary for a complete Seis+JSON
export, including equipment info.
2021-05-20 18:35:56 +02:00
D. Berge
92210378e1 Listen for and broadcast info notifications 2021-05-20 18:21:01 +02:00
D. Berge
8d3e665206 Expose new API endpoint: /info/:path(*).
Provides CRUD access to values (which may be deeply nested) from the
global `info` table.
2021-05-20 18:19:29 +02:00
D. Berge
4ee65ef284 Implement info/delete middleware 2021-05-20 18:18:26 +02:00
D. Berge
d048a19066 Implement info/put middleware 2021-05-20 18:18:13 +02:00
D. Berge
97ed9bcce4 Implement info/post middleware 2021-05-20 18:17:52 +02:00
D. Berge
316117cb83 Implement info.delete() database method.
It deletes a (possibly deeply nested) element in the
`info` table.
2021-05-20 18:16:26 +02:00
D. Berge
1d38f6526b Implement info.put() database method.
Replaces an existing element with a new one, or inserts it
if there is nothing to replace. The element may be deeply
nested inside a JSON object or array in the `info` table.

Works for both public.info and survey_?.info.
2021-05-20 18:14:43 +02:00
D. Berge
6feb7d49ee Implement info.post() database method.
It adds an element to a JSON array corresponding to a
key in the info table. Errors out if the value is not
an array.
2021-05-20 18:13:15 +02:00
D. Berge
ac51f72180 Ignore empty path parts in info.get() 2021-05-20 18:10:51 +02:00
D. Berge
86d3323869 Remove logging statement 2021-05-20 18:10:27 +02:00
D. Berge
b181e4f424 Let the user set the search path to no survey.
This is so that we can access tables in the `public`
schema which are overloaded by survey tables, as is
the case with `info`.
2021-05-20 18:08:03 +02:00
D. Berge
7917eeeb0b Add table info to schema.
This one is independent of any projects so it goes
into `public`.
2021-05-20 18:07:05 +02:00
D. Berge
b18907fb05 Merge branch '53-mark-points-as-not-to-be-acquired-ntba' into 'devel'
Resolve "Mark points as ‘not to be acquired’ (NTBA)"

Closes #53

See merge request wgp/dougal/software!9
2021-05-17 18:34:46 +00:00
40 changed files with 1799 additions and 147 deletions

View File

@@ -406,12 +406,12 @@ class Datastore:
self.del_hash("*online*", cursor)
qry = """
INSERT INTO raw_lines (sequence, line, remarks, ntbp, incr)
VALUES (%s, %s, '', %s, %s)
INSERT INTO raw_lines (sequence, line, remarks, ntbp, incr, meta)
VALUES (%s, %s, '', %s, %s, %s)
ON CONFLICT DO NOTHING;
"""
cursor.execute(qry, (fileinfo["sequence"], fileinfo["line"], ntbp, incr))
cursor.execute(qry, (fileinfo["sequence"], fileinfo["line"], ntbp, incr, json.dumps(fileinfo["meta"])))
qry = """
INSERT INTO raw_lines_files (sequence, hash)
@@ -448,12 +448,12 @@ class Datastore:
hash = self.add_file(filepath, cursor)
qry = """
INSERT INTO final_lines (sequence, line, remarks)
VALUES (%s, %s, '')
INSERT INTO final_lines (sequence, line, remarks, meta)
VALUES (%s, %s, '', %s)
ON CONFLICT DO NOTHING;
"""
cursor.execute(qry, (fileinfo["sequence"], fileinfo["line"]))
cursor.execute(qry, (fileinfo["sequence"], fileinfo["line"], json.dumps(fileinfo["meta"])))
qry = """
INSERT INTO final_lines_files (sequence, hash)
@@ -479,6 +479,8 @@ class Datastore:
if filedata is not None:
self.save_file_data(filepath, json.dumps(filedata), cursor)
cursor.execute("CALL final_line_post_import(%s);", (fileinfo["sequence"],))
self.maybe_commit()
@@ -639,3 +641,21 @@ class Datastore:
self.maybe_commit()
# We do not commit if we've been passed a cursor, instead
# we assume that we are in the middle of a transaction
def del_sequence_final(self, sequence, cursor = None):
"""
Remove final data for a sequence.
"""
if cursor is None:
cur = self.conn.cursor()
else:
cur = cursor
qry = "DELETE FROM files WHERE hash = (SELECT hash FROM final_lines_files WHERE sequence = %s);"
cur.execute(qry, (sequence,))
if cursor is None:
self.maybe_commit()
# We do not commit if we've been passed a cursor, instead
# we assume that we are in the middle of a transaction

View File

@@ -17,6 +17,35 @@ import configuration
import p111
from datastore import Datastore
def add_pending_remark(db, sequence):
text = '<!-- @@DGL:PENDING@@ --><h4 style="color:red;cursor:help;" title="Edit the sequence file or directory name to import final data">Marked as <code>PENDING</code>.</h4><!-- @@/DGL:PENDING@@ -->\n'
with db.conn.cursor() as cursor:
qry = "SELECT remarks FROM raw_lines WHERE sequence = %s;"
cursor.execute(qry, (sequence,))
remarks = cursor.fetchone()[0]
rx = re.compile("^(<!-- @@DGL:PENDING@@ -->.*<!-- @@/DGL:PENDING@@ -->\n)")
m = rx.match(remarks)
if m is None:
remarks = text + remarks
qry = "UPDATE raw_lines SET remarks = %s WHERE sequence = %s;"
cursor.execute(qry, (remarks, sequence))
db.maybe_commit()
def del_pending_remark(db, sequence):
with db.conn.cursor() as cursor:
qry = "SELECT remarks FROM raw_lines WHERE sequence = %s;"
cursor.execute(qry, (sequence,))
remarks = cursor.fetchone()[0]
rx = re.compile("^(<!-- @@DGL:PENDING@@ -->.*<!-- @@/DGL:PENDING@@ -->\n)")
m = rx.match(remarks)
if m is not None:
remarks = rx.sub("",remarks)
qry = "UPDATE raw_lines SET remarks = %s WHERE sequence = %s;"
cursor.execute(qry, (remarks, sequence))
db.maybe_commit()
if __name__ == '__main__':
print("Reading configuration")
@@ -42,6 +71,9 @@ if __name__ == '__main__':
pattern = final_p111["pattern"]
rx = re.compile(pattern["regex"])
if "pending" in survey["final"]:
pendingRx = re.compile(survey["final"]["pending"]["pattern"]["regex"])
for fileprefix in final_p111["paths"]:
print(f"Path prefix: {fileprefix}")
@@ -50,6 +82,10 @@ if __name__ == '__main__':
filepath = str(filepath)
print(f"Found {filepath}")
pending = False
if pendingRx:
pending = pendingRx.search(filepath) is not None
if not db.file_in_db(filepath):
age = time.time() - os.path.getmtime(filepath)
@@ -67,16 +103,30 @@ if __name__ == '__main__':
continue
file_info = dict(zip(pattern["captures"], match.groups()))
file_info["meta"] = {}
if pending:
print("Skipping / removing final file because marked as PENDING", filepath)
db.del_sequence_final(file_info["sequence"])
add_pending_remark(db, file_info["sequence"])
continue
else:
del_pending_remark(db, file_info["sequence"])
p111_data = p111.from_file(filepath)
print("Saving")
p111_records = p111.p111_type("S", p111_data)
file_info["meta"]["lineName"] = p111.line_name(p111_data)
db.save_final_p111(p111_records, file_info, filepath, survey["epsg"])
else:
print("Already in DB")
if pending:
print("Removing from database because marked as PENDING")
db.del_sequence_final(file_info["sequence"])
add_pending_remark(db, file_info["sequence"])
print("Done")

View File

@@ -75,12 +75,14 @@ if __name__ == '__main__':
continue
file_info = dict(zip(pattern["captures"], match.groups()))
file_info["meta"] = {}
p111_data = p111.from_file(filepath)
print("Saving")
p111_records = p111.p111_type("S", p111_data)
file_info["meta"]["lineName"] = p111.line_name(p111_data)
db.save_raw_p111(p111_records, file_info, filepath, survey["epsg"], ntbp=ntbp)
else:

View File

@@ -153,6 +153,9 @@ def parse_line (string):
return None
def line_name(records):
return set([ r['Acquisition Line Name'] for r in p111_type("S", records) ]).pop()
def p111_type(type, records):
return [ r for r in records if r["type"] == type ]

View File

@@ -24,6 +24,7 @@ locals().update(configuration.vars())
exportables = {
"public": {
"projects": [ "meta" ],
"info": None,
"real_time_inputs": None
},
"survey": {

View File

@@ -40,6 +40,10 @@ if __name__ == '__main__':
continue
try:
for table in exportables:
path = os.path.join(pathPrefix, table)
if os.path.exists(path):
cursor.execute(f"DELETE FROM {table};")
for table in exportables:
path = os.path.join(pathPrefix, table)
print("", path, "", table)

View File

@@ -19,6 +19,7 @@ locals().update(configuration.vars())
exportables = {
"public": {
"projects": [ "meta" ],
"info": None,
"real_time_inputs": None
},
"survey": {

View File

@@ -226,6 +226,18 @@ CREATE TABLE public.real_time_inputs (
ALTER TABLE public.real_time_inputs OWNER TO postgres;
--
-- Name: info; Type: TABLE; Schema: public; Owner: postgres
--
CREATE TABLE public.info (
key text NOT NULL,
value jsonb
);
ALTER TABLE public.info OWNER TO postgres;
--
-- Name: projects projects_name_key; Type: CONSTRAINT; Schema: public; Owner: postgres
--
@@ -250,6 +262,16 @@ ALTER TABLE ONLY public.projects
ADD CONSTRAINT projects_schema_key UNIQUE (schema);
--
-- Name: info info_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.info
ADD CONSTRAINT info_pkey PRIMARY KEY (key);
--
-- Name: tstamp_idx; Type: INDEX; Schema: public; Owner: postgres
--
@@ -271,6 +293,13 @@ CREATE TRIGGER projects_tg AFTER INSERT OR DELETE OR UPDATE ON public.projects F
CREATE TRIGGER real_time_inputs_tg AFTER INSERT ON public.real_time_inputs FOR EACH ROW EXECUTE FUNCTION public.notify('realtime');
--
-- Name: info info_tg; Type: TRIGGER; Schema: public; Owner: postgres
--
CREATE TRIGGER info_tg AFTER INSERT OR DELETE OR UPDATE ON public.info FOR EACH ROW EXECUTE FUNCTION public.notify('info');
--
-- PostgreSQL database dump complete
--

View File

@@ -2,8 +2,8 @@
-- PostgreSQL database dump
--
-- Dumped from database version 12.4
-- Dumped by pg_dump version 12.4
-- Dumped from database version 12.6
-- Dumped by pg_dump version 12.7
SET statement_timeout = 0;
SET lock_timeout = 0;
@@ -136,6 +136,38 @@ $$;
ALTER FUNCTION _SURVEY__TEMPLATE_.clear_shot_qc() OWNER TO postgres;
--
-- Name: events_seq_labels_single(); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--
CREATE FUNCTION _SURVEY__TEMPLATE_.events_seq_labels_single() RETURNS trigger
LANGUAGE plpgsql
AS $$
DECLARE _sequence integer;
BEGIN
IF EXISTS(SELECT 1 FROM labels WHERE name = NEW.label AND (data->'model'->'multiple')::boolean IS FALSE) THEN
SELECT sequence INTO _sequence FROM events WHERE id = NEW.id;
DELETE
FROM events_seq_labels
WHERE
id <> NEW.id
AND label = NEW.label
AND id IN (SELECT id FROM events_seq WHERE sequence = _sequence);
DELETE
FROM events_timed_labels
WHERE
id <> NEW.id
AND label = NEW.label
AND id IN (SELECT id FROM events_timed_seq WHERE sequence = _sequence);
END IF;
RETURN NULL;
END;
$$;
ALTER FUNCTION _SURVEY__TEMPLATE_.events_seq_labels_single() OWNER TO postgres;
--
-- Name: events_timed_seq_match(); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--
@@ -213,82 +245,102 @@ $$;
ALTER PROCEDURE _SURVEY__TEMPLATE_.events_timed_seq_update_all() OWNER TO postgres;
--
-- Name: reset_events_serials(); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
-- Name: final_line_post_import(integer); Type: PROCEDURE; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--
CREATE FUNCTION _SURVEY__TEMPLATE_.reset_events_serials() RETURNS void
CREATE PROCEDURE _SURVEY__TEMPLATE_.final_line_post_import(_seq integer)
LANGUAGE plpgsql
AS $$
BEGIN
PERFORM setval('events_timed_id_seq', (SELECT max(id)+1 FROM events_timed));
PERFORM setval('events_seq_id_seq', (SELECT max(id)+1 FROM events_seq));
CALL handle_final_line_events(_seq, 'FSP', 'fsp');
CALL handle_final_line_events(_seq, 'FGSP', 'fsp');
CALL handle_final_line_events(_seq, 'LGSP', 'lsp');
CALL handle_final_line_events(_seq, 'LSP', 'lsp');
END;
$$;
ALTER FUNCTION _SURVEY__TEMPLATE_.reset_events_serials() OWNER TO postgres;
ALTER PROCEDURE _SURVEY__TEMPLATE_.final_line_post_import(_seq integer) OWNER TO postgres;
--
-- Name: to_binning_grid(public.geometry); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
-- Name: handle_final_line_events(integer, text, text); Type: PROCEDURE; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--
CREATE FUNCTION _SURVEY__TEMPLATE_.to_binning_grid(geom public.geometry) RETURNS public.geometry
LANGUAGE plpgsql STABLE LEAKPROOF
AS $$DECLARE
bp jsonb := binning_parameters();
theta numeric := (bp->>'theta')::numeric * pi() / 180;
I_inc numeric DEFAULT 1;
J_inc numeric DEFAULT 1;
I_width numeric := (bp->>'I_width')::numeric;
J_width numeric := (bp->>'J_width')::numeric;
CREATE PROCEDURE _SURVEY__TEMPLATE_.handle_final_line_events(_seq integer, _label text, _column text)
LANGUAGE plpgsql
AS $$
a numeric := (I_inc/I_width) * cos(theta);
b numeric := (I_inc/I_width) * -sin(theta);
c numeric := (J_inc/J_width) * sin(theta);
d numeric := (J_inc/J_width) * cos(theta);
xoff numeric := (bp->'origin'->>'I')::numeric;
yoff numeric := (bp->'origin'->>'J')::numeric;
E0 numeric := (bp->'origin'->>'easting')::numeric;
N0 numeric := (bp->'origin'->>'northing')::numeric;
DECLARE
_line final_lines_summary%ROWTYPE;
_column_value integer;
_tg_name text := 'final_line';
_event events%ROWTYPE;
event_id integer;
BEGIN
-- RAISE NOTICE 'Matrix: a: %, b: %, c: %, d: %, xoff: %, yoff: %', a, b, c, d, xoff, yoff;
RETURN ST_SetSRID(ST_Affine(ST_Translate(geom, -E0, -N0), a, b, c, d, xoff, yoff), 0);
END
SELECT * INTO _line FROM final_lines_summary WHERE sequence = _seq;
_event := label_in_sequence(_seq, _label);
_column_value := row_to_json(_line)->>_column;
--RAISE NOTICE '% is %', _label, _event;
--RAISE NOTICE 'Line is %', _line;
--RAISE NOTICE '% is % (%)', _column, _column_value, _label;
IF _event IS NULL THEN
--RAISE NOTICE 'We will populate the event log from the sequence data';
SELECT id INTO event_id FROM events_seq WHERE sequence = _seq AND point = _column_value ORDER BY id LIMIT 1;
IF event_id IS NULL THEN
--RAISE NOTICE ' but there is no existing event so we create a new one for sequence % and point %', _line.sequence, _column_value;
INSERT INTO events_seq (sequence, point, remarks)
VALUES (_line.sequence, _column_value, format('%s %s', _label, (SELECT meta->>'lineName' FROM final_lines WHERE sequence = _seq)))
RETURNING id INTO event_id;
--RAISE NOTICE 'Created event_id %', event_id;
END IF;
--RAISE NOTICE 'Remove any other auto-inserted % labels in sequence %', _label, _seq;
DELETE FROM events_seq_labels
WHERE label = _label AND id = (SELECT id FROM events_seq WHERE sequence = _seq AND meta->'auto' ? _label);
--RAISE NOTICE 'We now add a label to the event (id, label) = (%, %)', event_id, _label;
INSERT INTO events_seq_labels (id, label) VALUES (event_id, _label) ON CONFLICT ON CONSTRAINT events_seq_labels_pkey DO NOTHING;
--RAISE NOTICE 'And also clear the %: % flag from meta.auto for any existing events for sequence %', _label, _tg_name, _seq;
UPDATE events_seq
SET meta = meta #- ARRAY['auto', _label]
WHERE meta->'auto' ? _label AND sequence = _seq AND id <> event_id;
--RAISE NOTICE 'Finally, flag the event as having been had label % auto-created by %', _label, _tg_name;
UPDATE events_seq
SET meta = jsonb_set(jsonb_set(meta, '{auto}', COALESCE(meta->'auto', '{}')), ARRAY['auto', _label], to_jsonb(_tg_name))
WHERE id = event_id;
ELSE
--RAISE NOTICE 'We may populate the sequence meta from the event log';
--RAISE NOTICE 'Unless the event log was populated by us previously';
--RAISE NOTICE 'Populated by us previously? %', _event.meta->'auto'->>_label = _tg_name;
IF _event.meta->'auto'->>_label IS DISTINCT FROM _tg_name THEN
--RAISE NOTICE 'Adding % found in events log to final_line meta', _label;
UPDATE final_lines
SET meta = jsonb_set(meta, ARRAY[_label], to_jsonb(_event.point))
WHERE sequence = _seq;
--RAISE NOTICE 'Clearing the %: % flag from meta.auto for any existing events in sequence %', _label, _tg_name, _seq;
UPDATE events_seq
SET meta = meta #- ARRAY['auto', _label]
WHERE sequence = _seq AND meta->'auto'->>_label = _tg_name;
END IF;
END IF;
END;
$$;
ALTER FUNCTION _SURVEY__TEMPLATE_.to_binning_grid(geom public.geometry) OWNER TO postgres;
--
-- Name: to_binning_grid(public.geometry, jsonb); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--
CREATE FUNCTION _SURVEY__TEMPLATE_.to_binning_grid(geom public.geometry, bp jsonb) RETURNS public.geometry
LANGUAGE plpgsql IMMUTABLE PARALLEL SAFE
AS $$DECLARE
-- bp jsonb := binning_parameters();
theta numeric := (bp->>'theta')::numeric * pi() / 180;
I_inc numeric DEFAULT 1;
J_inc numeric DEFAULT 1;
I_width numeric := (bp->>'I_width')::numeric;
J_width numeric := (bp->>'J_width')::numeric;
a numeric := (I_inc/I_width) * cos(theta);
b numeric := (I_inc/I_width) * -sin(theta);
c numeric := (J_inc/J_width) * sin(theta);
d numeric := (J_inc/J_width) * cos(theta);
xoff numeric := (bp->'origin'->>'I')::numeric;
yoff numeric := (bp->'origin'->>'J')::numeric;
E0 numeric := (bp->'origin'->>'easting')::numeric;
N0 numeric := (bp->'origin'->>'northing')::numeric;
BEGIN
-- RAISE NOTICE 'Matrix: a: %, b: %, c: %, d: %, xoff: %, yoff: %', a, b, c, d, xoff, yoff;
RETURN ST_SetSRID(ST_Affine(ST_Translate(geom, -E0, -N0), a, b, c, d, xoff, yoff), 0);
END
$$;
ALTER FUNCTION _SURVEY__TEMPLATE_.to_binning_grid(geom public.geometry, bp jsonb) OWNER TO postgres;
ALTER PROCEDURE _SURVEY__TEMPLATE_.handle_final_line_events(_seq integer, _label text, _column text) OWNER TO postgres;
SET default_tablespace = '';
@@ -430,6 +482,7 @@ CREATE VIEW _SURVEY__TEMPLATE_.events_seq_timed AS
rs.objref,
rs.tstamp,
rs.hash,
s.meta,
rs.geometry
FROM (_SURVEY__TEMPLATE_.events_seq s
LEFT JOIN _SURVEY__TEMPLATE_.raw_shots rs USING (sequence, point));
@@ -524,6 +577,7 @@ CREATE VIEW _SURVEY__TEMPLATE_.events AS
s.objref,
s.tstamp,
s.hash,
s.meta,
(public.st_asgeojson(public.st_transform(s.geometry, 4326)))::jsonb AS geometry,
ARRAY( SELECT esl.label
FROM _SURVEY__TEMPLATE_.events_seq_labels esl
@@ -540,6 +594,7 @@ UNION
rs.objref,
t.tstamp,
rs.hash,
t.meta,
(t.meta -> 'geometry'::text) AS geometry,
ARRAY( SELECT etl.label
FROM _SURVEY__TEMPLATE_.events_timed_labels etl
@@ -558,6 +613,7 @@ UNION
v1.objref,
v1.tstamp,
v1.hash,
'{}'::jsonb AS meta,
(public.st_asgeojson(public.st_transform(v1.geometry, 4326)))::jsonb AS geometry,
ARRAY[v1.label] AS labels
FROM _SURVEY__TEMPLATE_.events_midnight_shot v1
@@ -572,6 +628,7 @@ UNION
rs.objref,
rs.tstamp,
rs.hash,
'{}'::jsonb AS meta,
(public.st_asgeojson(public.st_transform(rs.geometry, 4326)))::jsonb AS geometry,
('{QC}'::text[] || qc.labels) AS labels
FROM (_SURVEY__TEMPLATE_.raw_shots rs
@@ -582,6 +639,97 @@ UNION
ALTER TABLE _SURVEY__TEMPLATE_.events OWNER TO postgres;
--
-- Name: label_in_sequence(integer, text); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--
CREATE FUNCTION _SURVEY__TEMPLATE_.label_in_sequence(_sequence integer, _label text) RETURNS _SURVEY__TEMPLATE_.events
LANGUAGE sql
AS $$
SELECT * FROM events WHERE sequence = _sequence AND _label = ANY(labels);
$$;
ALTER FUNCTION _SURVEY__TEMPLATE_.label_in_sequence(_sequence integer, _label text) OWNER TO postgres;
--
-- Name: reset_events_serials(); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--
CREATE FUNCTION _SURVEY__TEMPLATE_.reset_events_serials() RETURNS void
LANGUAGE plpgsql
AS $$
BEGIN
PERFORM setval('events_timed_id_seq', (SELECT max(id)+1 FROM events_timed));
PERFORM setval('events_seq_id_seq', (SELECT max(id)+1 FROM events_seq));
END;
$$;
ALTER FUNCTION _SURVEY__TEMPLATE_.reset_events_serials() OWNER TO postgres;
--
-- Name: to_binning_grid(public.geometry); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--
CREATE FUNCTION _SURVEY__TEMPLATE_.to_binning_grid(geom public.geometry) RETURNS public.geometry
LANGUAGE plpgsql STABLE LEAKPROOF
AS $$DECLARE
bp jsonb := binning_parameters();
theta numeric := (bp->>'theta')::numeric * pi() / 180;
I_inc numeric DEFAULT 1;
J_inc numeric DEFAULT 1;
I_width numeric := (bp->>'I_width')::numeric;
J_width numeric := (bp->>'J_width')::numeric;
a numeric := (I_inc/I_width) * cos(theta);
b numeric := (I_inc/I_width) * -sin(theta);
c numeric := (J_inc/J_width) * sin(theta);
d numeric := (J_inc/J_width) * cos(theta);
xoff numeric := (bp->'origin'->>'I')::numeric;
yoff numeric := (bp->'origin'->>'J')::numeric;
E0 numeric := (bp->'origin'->>'easting')::numeric;
N0 numeric := (bp->'origin'->>'northing')::numeric;
BEGIN
-- RAISE NOTICE 'Matrix: a: %, b: %, c: %, d: %, xoff: %, yoff: %', a, b, c, d, xoff, yoff;
RETURN ST_SetSRID(ST_Affine(ST_Translate(geom, -E0, -N0), a, b, c, d, xoff, yoff), 0);
END
$$;
ALTER FUNCTION _SURVEY__TEMPLATE_.to_binning_grid(geom public.geometry) OWNER TO postgres;
--
-- Name: to_binning_grid(public.geometry, jsonb); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--
CREATE FUNCTION _SURVEY__TEMPLATE_.to_binning_grid(geom public.geometry, bp jsonb) RETURNS public.geometry
LANGUAGE plpgsql IMMUTABLE PARALLEL SAFE
AS $$DECLARE
-- bp jsonb := binning_parameters();
theta numeric := (bp->>'theta')::numeric * pi() / 180;
I_inc numeric DEFAULT 1;
J_inc numeric DEFAULT 1;
I_width numeric := (bp->>'I_width')::numeric;
J_width numeric := (bp->>'J_width')::numeric;
a numeric := (I_inc/I_width) * cos(theta);
b numeric := (I_inc/I_width) * -sin(theta);
c numeric := (J_inc/J_width) * sin(theta);
d numeric := (J_inc/J_width) * cos(theta);
xoff numeric := (bp->'origin'->>'I')::numeric;
yoff numeric := (bp->'origin'->>'J')::numeric;
E0 numeric := (bp->'origin'->>'easting')::numeric;
N0 numeric := (bp->'origin'->>'northing')::numeric;
BEGIN
-- RAISE NOTICE 'Matrix: a: %, b: %, c: %, d: %, xoff: %, yoff: %', a, b, c, d, xoff, yoff;
RETURN ST_SetSRID(ST_Affine(ST_Translate(geom, -E0, -N0), a, b, c, d, xoff, yoff), 0);
END
$$;
ALTER FUNCTION _SURVEY__TEMPLATE_.to_binning_grid(geom public.geometry, bp jsonb) OWNER TO postgres;
--
-- Name: events_labels; Type: VIEW; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--
@@ -824,7 +972,8 @@ CREATE VIEW _SURVEY__TEMPLATE_.final_lines_summary AS
WHERE ((preplot_points.line = fl.line) AND (((preplot_points.point >= s.fsp) AND (preplot_points.point <= s.lsp)) OR ((preplot_points.point >= s.lsp) AND (preplot_points.point <= s.fsp))))) - s.num_points) AS missing_shots,
s.length,
s.azimuth,
fl.remarks
fl.remarks,
fl.meta
FROM (summary s
JOIN _SURVEY__TEMPLATE_.final_lines fl USING (sequence));
@@ -1555,6 +1704,14 @@ ALTER TABLE ONLY _SURVEY__TEMPLATE_.events_seq ALTER COLUMN id SET DEFAULT nextv
ALTER TABLE ONLY _SURVEY__TEMPLATE_.events_timed ALTER COLUMN id SET DEFAULT nextval('_SURVEY__TEMPLATE_.events_timed_id_seq'::regclass);
--
-- Name: events_seq_labels events_seq_labels_pkey; Type: CONSTRAINT; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--
ALTER TABLE ONLY _SURVEY__TEMPLATE_.events_seq_labels
ADD CONSTRAINT events_seq_labels_pkey PRIMARY KEY (id, label);
--
-- Name: events_seq events_seq_pkey; Type: CONSTRAINT; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--
@@ -1713,6 +1870,20 @@ CREATE INDEX events_seq_sequence_idx ON _SURVEY__TEMPLATE_.events_seq USING btre
CREATE INDEX events_timed_ts0_idx ON _SURVEY__TEMPLATE_.events_timed USING btree (tstamp);
--
-- Name: events_seq_labels events_seq_labels_single_tg; Type: TRIGGER; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--
CREATE TRIGGER events_seq_labels_single_tg AFTER INSERT OR UPDATE ON _SURVEY__TEMPLATE_.events_seq_labels FOR EACH ROW EXECUTE FUNCTION _SURVEY__TEMPLATE_.events_seq_labels_single();
--
-- Name: events_timed_labels events_seq_labels_single_tg; Type: TRIGGER; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--
CREATE TRIGGER events_timed_labels_single_tg AFTER INSERT OR UPDATE ON _SURVEY__TEMPLATE_.events_timed_labels FOR EACH ROW EXECUTE FUNCTION _SURVEY__TEMPLATE_.events_seq_labels_single();
--
-- Name: events_seq events_tg; Type: TRIGGER; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--

View File

@@ -0,0 +1,171 @@
-- Upgrade the database from commit 53f71f70 to 4d977848.
--
-- NOTE: This upgrade must be applied to every schema in the database.
-- NOTE: Each application starts a transaction, which must be committed
-- or rolled back.
--
-- This adds:
--
-- * label_in_sequence (_sequence integer, _label text):
-- Returns events containing the specified label.
--
-- * handle_final_line_events (_seq integer, _label text, _column text):
-- - If _label does not exist in the events for sequence _seq:
-- it adds a new _label label at the shotpoint obtained from
-- final_lines_summary[_column].
-- - If _label does exist (and hasn't been auto-added by this function
-- in a previous run), it will add information about it to the final
-- line's metadata.
--
-- * final_line_post_import (_seq integer):
-- Calls handle_final_line_events() on the given sequence to check
-- for FSP, FGSP, LGSP and LSP labels.
--
-- * events_seq_labels_single ():
-- Trigger function to ensure that labels that have the attribute
-- `model.multiple` set to `false` occur at most only once per
-- sequence. If a new instance is added to a sequence, the previous
-- instance is deleted.
--
-- * Trigger on events_seq_labels that calls events_seq_labels_single().
--
-- * Trigger on events_timed_labels that calls events_seq_labels_single().
--
-- To apply, run as the dougal user, for every schema in the database:
--
-- psql <<EOF
-- SET search_path TO survey_*,public;
-- \i $THIS_FILE
-- COMMIT;
-- EOF
--
-- NOTE: It will fail harmlessly if applied twice.
BEGIN;
CREATE OR REPLACE FUNCTION label_in_sequence (_sequence integer, _label text)
RETURNS events
LANGUAGE sql
AS $$
SELECT * FROM events WHERE sequence = _sequence AND _label = ANY(labels);
$$;
CREATE OR REPLACE PROCEDURE handle_final_line_events (_seq integer, _label text, _column text)
LANGUAGE plpgsql
AS $$
DECLARE
_line final_lines_summary%ROWTYPE;
_column_value integer;
_tg_name text := 'final_line';
_event events%ROWTYPE;
event_id integer;
BEGIN
SELECT * INTO _line FROM final_lines_summary WHERE sequence = _seq;
_event := label_in_sequence(_seq, _label);
_column_value := row_to_json(_line)->>_column;
--RAISE NOTICE '% is %', _label, _event;
--RAISE NOTICE 'Line is %', _line;
--RAISE NOTICE '% is % (%)', _column, _column_value, _label;
IF _event IS NULL THEN
--RAISE NOTICE 'We will populate the event log from the sequence data';
SELECT id INTO event_id FROM events_seq WHERE sequence = _seq AND point = _column_value ORDER BY id LIMIT 1;
IF event_id IS NULL THEN
--RAISE NOTICE '… but there is no existing event so we create a new one for sequence % and point %', _line.sequence, _column_value;
INSERT INTO events_seq (sequence, point, remarks)
VALUES (_line.sequence, _column_value, format('%s %s', _label, (SELECT meta->>'lineName' FROM final_lines WHERE sequence = _seq)))
RETURNING id INTO event_id;
--RAISE NOTICE 'Created event_id %', event_id;
END IF;
--RAISE NOTICE 'Remove any other auto-inserted % labels in sequence %', _label, _seq;
DELETE FROM events_seq_labels
WHERE label = _label AND id = (SELECT id FROM events_seq WHERE sequence = _seq AND meta->'auto' ? _label);
--RAISE NOTICE 'We now add a label to the event (id, label) = (%, %)', event_id, _label;
INSERT INTO events_seq_labels (id, label) VALUES (event_id, _label) ON CONFLICT ON CONSTRAINT events_seq_labels_pkey DO NOTHING;
--RAISE NOTICE 'And also clear the %: % flag from meta.auto for any existing events for sequence %', _label, _tg_name, _seq;
UPDATE events_seq
SET meta = meta #- ARRAY['auto', _label]
WHERE meta->'auto' ? _label AND sequence = _seq AND id <> event_id;
--RAISE NOTICE 'Finally, flag the event as having been had label % auto-created by %', _label, _tg_name;
UPDATE events_seq
SET meta = jsonb_set(jsonb_set(meta, '{auto}', COALESCE(meta->'auto', '{}')), ARRAY['auto', _label], to_jsonb(_tg_name))
WHERE id = event_id;
ELSE
--RAISE NOTICE 'We may populate the sequence meta from the event log';
--RAISE NOTICE 'Unless the event log was populated by us previously';
--RAISE NOTICE 'Populated by us previously? %', _event.meta->'auto'->>_label = _tg_name;
IF _event.meta->'auto'->>_label IS DISTINCT FROM _tg_name THEN
--RAISE NOTICE 'Adding % found in events log to final_line meta', _label;
UPDATE final_lines
SET meta = jsonb_set(meta, ARRAY[_label], to_jsonb(_event.point))
WHERE sequence = _seq;
--RAISE NOTICE 'Clearing the %: % flag from meta.auto for any existing events in sequence %', _label, _tg_name, _seq;
UPDATE events_seq
SET meta = meta #- ARRAY['auto', _label]
WHERE sequence = _seq AND meta->'auto'->>_label = _tg_name;
END IF;
END IF;
END;
$$;
CREATE OR REPLACE PROCEDURE final_line_post_import (_seq integer)
LANGUAGE plpgsql
AS $$
BEGIN
CALL handle_final_line_events(_seq, 'FSP', 'fsp');
CALL handle_final_line_events(_seq, 'FGSP', 'fsp');
CALL handle_final_line_events(_seq, 'LGSP', 'lsp');
CALL handle_final_line_events(_seq, 'LSP', 'lsp');
END;
$$;
CREATE OR REPLACE FUNCTION events_seq_labels_single ()
RETURNS trigger
LANGUAGE plpgsql
AS $$
DECLARE _sequence integer;
BEGIN
IF EXISTS(SELECT 1 FROM labels WHERE name = NEW.label AND (data->'model'->'multiple')::boolean IS FALSE) THEN
SELECT sequence INTO _sequence FROM events WHERE id = NEW.id;
DELETE
FROM events_seq_labels
WHERE
id <> NEW.id
AND label = NEW.label
AND id IN (SELECT id FROM events_seq WHERE sequence = _sequence);
DELETE
FROM events_timed_labels
WHERE
id <> NEW.id
AND label = NEW.label
AND id IN (SELECT id FROM events_timed_seq WHERE sequence = _sequence);
END IF;
RETURN NULL;
END;
$$;
CREATE TRIGGER events_seq_labels_single_tg AFTER INSERT OR UPDATE ON events_seq_labels FOR EACH ROW EXECUTE FUNCTION events_seq_labels_single();
CREATE TRIGGER events_seq_labels_single_tg AFTER INSERT OR UPDATE ON events_timed_labels FOR EACH ROW EXECUTE FUNCTION events_seq_labels_single();
--
--NOTE Run `COMMIT;` now if all went well
--

File diff suppressed because one or more lines are too long

View File

@@ -12,6 +12,34 @@
<v-toolbar-title class="mx-2" @click="$router.push('/')" style="cursor: pointer;">Dougal</v-toolbar-title>
<v-spacer></v-spacer>
<v-menu bottom offset-y>
<template v-slot:activator="{on, attrs}">
<v-hover v-slot="{hover}">
<v-btn
class="align-self-center"
:xcolor="hover ? 'secondary' : 'secondary lighten-3'"
small
text
v-bind="attrs"
v-on="on"
title="Settings"
>
<v-icon small>mdi-cog-outline</v-icon>
</v-btn>
</v-hover>
</template>
<v-list dense>
<v-list-item :href="`/settings/equipment`">
<v-list-item-title>Equipment list</v-list-item-title>
<v-list-item-action><v-icon small>mdi-view-list</v-icon></v-list-item-action>
</v-list-item>
</v-list>
</v-menu>
<v-breadcrumbs :items="path"></v-breadcrumbs>
<template v-if="$route.name != 'Login'">

View File

@@ -41,6 +41,11 @@ Vue.use(VueRouter)
// which is lazy-loaded when the route is visited.
component: () => import(/* webpackChunkName: "about" */ '../views/Feed.vue')
},
{
path: "/settings/equipment",
name: "equipment",
component: () => import(/* webpackChunkName: "about" */ '../views/Equipment.vue')
},
{
pathToRegexpOptions: { strict: true },
path: "/login",

View File

@@ -13,7 +13,8 @@ async function api ({state, commit, dispatch}, [resource, init = {}, cb]) {
init.body = JSON.stringify(init.body);
}
}
const res = await fetch(`${state.apiUrl}${resource}`, init);
const url = /^https?:\/\//i.test(resource) ? resource : (state.apiUrl + resource);
const res = await fetch(url, init);
if (typeof cb === 'function') {
cb(null, res);
}

View File

@@ -0,0 +1,513 @@
<template>
<v-container fluid>
<v-row>
<v-col>
<v-dialog
max-width="600px"
:value="dialog"
@input="closeDialog"
>
<template v-slot:activator="{ on, attrs }">
<v-btn v-if="writeaccess"
small
color="primary"
v-bind="attrs"
v-on="on"
>Add</v-btn>
</template>
<v-card>
<v-card-title v-if="dialogMode=='new'">Add new item</v-card-title>
<v-card-title v-else>Edit item</v-card-title>
<v-card-text>
<v-container>
<v-row>
<v-col cols="12">
<v-text-field
label="Kind"
required
v-model="item.kind"
:disabled="dialogMode == 'edit'"
>
</v-text-field>
</v-col>
<v-col cols="12">
<v-textarea
class="markdown"
label="Description"
dense
auto-grow
rows="1"
v-model="item.description"
>
</v-textarea>
</v-col>
<v-col cols="6">
<v-text-field
label="Date"
type="date"
step="1"
v-model="item.date"
>
</v-text-field>
</v-col>
<v-col cols="6">
<v-text-field
label="Time"
type="time"
step="60"
v-model="item.time"
>
</v-text-field>
</v-col>
<template v-for="(attr, idx) in item.attributes">
<v-col cols="4">
<v-text-field
label="Attribute"
v-model="attr.key"
>
</v-text-field>
</v-col>
<v-col cols="8">
<v-textarea
label="Value"
class="markdown"
auto-grow
rows="1"
v-model="attr.value"
>
<template v-slot:append-outer>
<v-btn
fab
x-small
dark
color="red"
title="Remove this attribute / value pair"
@click="removeAttribute(idx)"
>
<v-icon>mdi-minus</v-icon>
</v-btn>
</template>
</v-textarea>
</v-col>
</template>
<v-col cols="12" class="text-right">
<v-btn
fab
x-small
color="primary"
title="Add a new attribute / value pair to further describe the equipment"
@click="addAttribute"
>
<v-icon>mdi-plus</v-icon>
</v-btn>
</v-col>
</v-row>
</v-container>
</v-card-text>
<v-card-actions>
<v-btn
color="warning"
@click="closeDialog"
>
Cancel
</v-btn>
<v-spacer></v-spacer>
<v-btn
color="success"
:loading="loading"
:disabled="!canSave || loading"
@click="saveItem"
>
Save
</v-btn>
</v-card-actions>
</v-card>
</v-dialog>
</v-col>
</v-row>
<v-row>
<v-col cols="4">
<v-toolbar
dense
flat
>
<v-toolbar-title>
Equipment
</v-toolbar-title>
</v-toolbar>
<v-list dense two-line>
<v-subheader v-if="!latest.length">
There are no items of equipment
</v-subheader>
<v-list-item-group
v-model="selectedIndex"
color="primary"
>
<v-list-item v-for="(item, idx) in latest" :key="idx">
<v-list-item-content>
<v-list-item-title>
{{item.kind}}
</v-list-item-title>
<v-list-item-subtitle>
Last updated: {{item.tstamp.substring(0,16)}}Z
</v-list-item-subtitle>
</v-list-item-content>
</v-list-item>
</v-list-item-group>
</v-list>
</v-col>
<v-col cols="8">
<v-card v-if="selectedItem">
<v-card-title>{{selectedItem.kind}}</v-card-title>
<v-card-subtitle class="text-caption">{{selectedItem.tstamp}}</v-card-subtitle>
<v-card-text>
<v-container>
<v-row>
<div v-html="$options.filters.markdown(selectedItem.description||'')"></div>
</v-row>
<v-row>
<v-simple-table>
<template v-slot:default>
<tbody>
<tr v-for="(attr, idx) in selectedItem.attributes" :key="idx">
<td>{{attr.key}}</td>
<td v-html="$options.filters.markdown(attr.value||'')"></td>
</tr>
</tbody>
</template>
</v-simple-table>
</v-row>
</v-container>
</v-card-text>
<v-card-actions>
<v-btn v-if="writeaccess"
small
text
color="primary"
title="Make a change to this item"
@click="editItem(selectedItem)"
>
Update
</v-btn>
<v-btn-toggle
group
v-model="historyMode"
>
<v-btn
small
text
:disabled="false"
title="View item's full history of changes"
>
History
</v-btn>
</v-btn-toggle>
<v-spacer></v-spacer>
<v-btn v-if="writeaccess"
small
dark
color="red"
title="Remove this instance from the item's history"
@click="confirmDelete(selectedItem)"
>
Delete
</v-btn>
</v-card-actions>
</v-card>
<v-subheader v-else-if="latest.length" class="justify-center">Select an item from the list</v-subheader>
<v-expand-transition v-if="selectedItem">
<div v-if="historyMode===0">
<v-subheader v-if="!selectedItemHistory || !selectedItemHistory.length"
class="justify-center"
>No more history</v-subheader>
<v-card v-for="item in selectedItemHistory" class="mt-5">
<v-card-title>{{selectedItem.kind}}</v-card-title>
<v-card-subtitle class="text-caption">{{item.tstamp}}</v-card-subtitle>
<v-card-text>
<v-container>
<v-row>
<div v-html="$options.filters.markdown(item.description||'')"></div>
</v-row>
<v-row>
<v-simple-table>
<template v-slot:default>
<tbody>
<tr v-for="(attr, idx) in item.attributes" :key="idx">
<td>{{attr.key}}</td>
<td v-html="$options.filters.markdown(attr.value||'')"></td>
</tr>
</tbody>
</template>
</v-simple-table>
</v-row>
</v-container>
</v-card-text>
<v-card-actions>
<v-spacer></v-spacer>
<v-btn v-if="writeaccess"
small
dark
color="red"
title="Remove this instance from the item's history"
@click="confirmDelete(item)"
>
Delete
</v-btn>
</v-card-actions>
</v-card>
</div>
</v-expand-transition>
</v-col>
</v-row>
<v-dialog
:value="confirm.message"
max-width="500px"
persistent
>
<v-sheet
class="px-7 pt-7 pb-4 mx-auto text-center d-inline-block"
color="blue-grey darken-3"
dark
>
<div class="grey--text text--lighten-1 text-body-2 mb-4" v-html="confirm.message"></div>
<v-btn
:disabled="loading"
class="ma-1"
color="grey"
plain
@click="cancelConfirmAction"
>
{{ confirm.no || "Cancel" }}
</v-btn>
<v-btn
:loading="loading"
class="ma-1"
color="error"
plain
@click="doConfirmAction"
>
{{ confirm.yes || "Delete" }}
</v-btn>
</v-sheet>
</v-dialog>
</v-container>
</template>
<script>
import { mapActions, mapGetters } from 'vuex';
export default {
name: "Equipment",
data () {
return {
latest: [],
all: [],
item: {
kind: null,
description: null,
tstamp: null,
date: null,
time: null,
attributes: []
},
dialogMode: null,
selectedIndex: null,
historyMode: false,
confirm: {
message: null,
action: null,
yes: null,
no: null
}
}
},
watch: {
dialog (newVal, oldVal) {
if (newVal) {
const tstamp = new Date();
this.item.date = tstamp.toISOString().substr(0, 10);
this.item.time = tstamp.toISOString().substr(11, 5);
}
},
"item.date": function (newVal) {
if (newVal) {
this.item.tstamp = new Date(this.item.date+"T"+this.item.time);
}
},
"item.time": function (newVal) {
if (newVal) {
this.item.tstamp = new Date(this.item.date+"T"+this.item.time);
}
},
async serverEvent (event) {
if (event.payload.schema == "public") {
if (event.channel == "info") {
if (!this.loading) {
this.getEquipment();
}
}
}
}
},
computed: {
dialog () {
return !!this.dialogMode;
},
canSave () {
return this.item.kind &&
this.item.date && this.item.time &&
(this.item.attributes.length
? this.item.attributes.every(i => i.key && i.value)
: (this.item.description ||"").trim());
},
selectedItem () {
return this.selectedIndex !== null
? this.latest[this.selectedIndex]
: null;
},
selectedItemHistory () {
if (this.selectedItem && this.historyMode === 0) {
const items = this.all
.filter(i => i.kind == this.selectedItem.kind && i.tstamp != this.selectedItem.tstamp)
.sort( (a, b) => new Date(b.tstamp) - new Date(a.tstamp) );
return items;
}
return null;
},
...mapGetters(['user', 'writeaccess', 'loading', 'serverEvent'])
},
methods: {
async cancelConfirmAction () {
this.confirm.action = null;
this.confirm.message = null;
this.confirm.yes = null;
this.confirm.no = null;
},
async doConfirmAction () {
await this.confirm.action();
this.cancelConfirmAction();
},
async getEquipment () {
const url = `/info/equipment`;
const items = await this.api([url]) || [];
this.all = [...items];
this.latest = this.all.filter(i =>
!this.all.find(j => i.kind == j.kind && i.tstamp < j.tstamp)
)
.sort( (a, b) => a.kind < b.kind ? -1 : a.kind > b.kind ? 1 : 0 );
},
addAttribute () {
this.item.attributes.push({key: undefined, value: undefined});
},
removeAttribute (idx) {
this.item.attributes.splice(idx, 1);
},
async deleteItem (item) {
const idx = this.all.findIndex(i => i.kind == item.kind && i.tstamp == item.tstamp);
if (idx == -1) {
return;
}
const url = `/info/equipment/${idx}`;
const init = {
method: "DELETE"
};
await this.api([url, init]);
await this.getEquipment();
},
confirmDelete (item) {
this.confirm.action = () => this.deleteItem(item);
this.confirm.message = "Are you sure? <b>This action is irreversible.</b>";
},
clearItem () {
this.item.kind = null;
this.item.description = null;
this.item.date = null;
this.item.time = null;
this.item.attributes = [];
},
editItem (item) {
this.item.kind = item.kind;
this.item.description = item.description;
this.item.tstamp = new Date();
this.item.attributes = [...item.attributes];
this.dialogMode = "edit";
this.dialog = true;
},
async saveItem () {
const item = {};
item.kind = this.item.kind;
item.description = this.item.description;
item.tstamp = this.item.tstamp.toISOString();
item.attributes = [...this.item.attributes.filter(i => i.key && i.value)];
if (this.dialogMode == "edit") {
this.latest.splice(this.selectedIndex, 1, item);
} else {
this.latest.push(item);
}
const url = `/info/equipment`;
const init = {
method: "POST",
body: item
};
await this.api([url, init]);
this.closeDialog();
await this.getEquipment();
},
clearItem () {
this.item.kind = null;
this.item.description = null;
this.item.attributes = [];
this.item.tstamp = null;
},
closeDialog (state = false) {
this.clearItem();
this.dialogMode = state===true ? "new" : null;
},
...mapActions(["api"])
},
async mounted () {
await this.getEquipment();
}
}
</script>

View File

@@ -186,6 +186,14 @@ app.map({
get: [ mw.gis.navdata.get ]
}
},
'/info/': {
':path(*)': {
get: [ mw.info.get ],
put: [ mw.auth.access.write, mw.info.put ],
post: [ mw.auth.access.write, mw.info.post ],
delete: [ mw.auth.access.write, mw.info.delete ]
}
},
'/rss/': {
get: [ mw.rss.get ]
}

View File

@@ -1,16 +1,18 @@
const { event, sequence, configuration } = require('../../../../lib/db');
const { transform } = require('../../../../lib/sse');
const { configuration } = require('../../../../lib/db');
const { transform, prepare } = require('../../../../lib/sse');
const render = require('../../../../lib/render');
// FIXME Refactor when able
const defaultTemplatePath = require('path').resolve(__dirname, "../../../../../../../etc/default/templates/sequence.html.njk");
const html = async function (req, res, next) {
try {
const query = req.query;
query.sequence = req.params.sequence;
const events = await event.list(req.params.project, query);
const sequences = await sequence.list(req.params.project, query);
const {events, sequences} = await prepare(req.params.project, query);
const seis = transform(events, sequences, {projectId: req.params.project});
const templates = await configuration.get(req.params.project, "sse/templates");
const template = templates[0].template;
const template = (await configuration.get(req.params.project, "sse/templates/0/template")) || defaultTemplatePath;
console.log("TEMPLATE", template);
const response = await render(seis, template);

View File

@@ -1,11 +1,14 @@
const fs = require('fs/promises');
const Path = require('path');
const crypto = require('crypto');
const { event, sequence, configuration } = require('../../../../lib/db');
const { transform } = require('../../../../lib/sse');
const { configuration } = require('../../../../lib/db');
const { transform, prepare } = require('../../../../lib/sse');
const render = require('../../../../lib/render');
const { url2pdf } = require('../../../../lib/selenium');
// FIXME Refactor when able
const defaultTemplatePath = require('path').resolve(__dirname, "../../../../../../../etc/default/templates/sequence.html.njk");
function tmpname (tmpdir="/dev/shm") {
return Path.join(tmpdir, crypto.randomBytes(16).toString('hex')+".tmp");
}
@@ -15,11 +18,9 @@ const pdf = async function (req, res, next) {
try {
const query = req.query;
query.sequence = req.params.sequence;
const events = await event.list(req.params.project, query);
const sequences = await sequence.list(req.params.project, query);
const {events, sequences} = await prepare(req.params.project, query);
const seis = transform(events, sequences, {projectId: req.params.project});
const templates = await configuration.get(req.params.project, "sse/templates");
const template = templates[0].template;
const template = (await configuration.get(req.params.project, "sse/templates/0/template")) || defaultTemplatePath;
const html = await render(seis, template);

View File

@@ -1,12 +1,10 @@
const { event, sequence } = require('../../../../lib/db');
const { transform } = require('../../../../lib/sse');
const { transform, prepare } = require('../../../../lib/sse');
const seis = async function (req, res, next) {
try {
const query = req.query;
query.sequence = req.params.sequence;
const events = await event.list(req.params.project, query);
const sequences = await sequence.list(req.params.project, query);
const {events, sequences} = await prepare(req.params.project, query);
const response = transform(events, sequences, {projectId: req.params.project});
if ("download" in query || "d" in query) {
const filename = `${req.params.project}-seq${query.sequence.padStart(3, "0")}.json`;

View File

@@ -0,0 +1,14 @@
const { info } = require('../../../lib/db');
module.exports = async function (req, res, next) {
try {
await info.delete(req.params.project, req.params.path);
res.status(204).send();
next();
} catch (err) {
next(err);
}
};

View File

@@ -0,0 +1,16 @@
const { info } = require('../../../lib/db');
module.exports = async function (req, res, next) {
try {
const payload = req.body;
await info.post(req.params.project, req.params.path, payload);
res.status(201).send();
next();
} catch (err) {
next(err);
}
};

View File

@@ -0,0 +1,16 @@
const { info } = require('../../../lib/db');
module.exports = async function (req, res, next) {
try {
const payload = req.body;
await info.put(req.params.project, req.params.path, payload);
res.status(201).send();
next();
} catch (err) {
next(err);
}
};

View File

@@ -0,0 +1,146 @@
const { schema2pid } = require('../../lib/db/connection');
const { event } = require('../../lib/db');
class DetectSOLEOL {
/* Data may come much faster than we can process it, so we put it
* in a queue and process it at our own pace.
*
* The run() method fills the queue with the necessary data and then
* calls processQueue().
*
* The processQueue() method looks takes the first two elements in
* the queue and processes them if they are not already being taken
* care of by a previous processQueue() call this will happen when
* data is coming in faster than it can be processed.
*
* If the processQueue() call is the first to see the two bottommost
* two elements, it will process them and, when finished, it will set
* the `isPending` flag of the bottommost element to `false`, thus
* letting the next call know that it has work to do.
*
* If the queue was empty, run() will set the `isPending` flag of its
* first element to a falsy value, thus bootstrapping the process.
*/
static MAX_QUEUE_SIZE = 125000;
queue = [];
async processQueue () {
while (this.queue.length > 1) {
if (this.queue[0].isPending) {
setImmediate(() => this.processQueue());
return;
}
const prev = this.queue.shift();
const cur = this.queue[0];
const sequence = Number(cur._sequence);
try {
if (prev.lineName == cur.lineName && prev._sequence == cur._sequence &&
prev.lineStatus != "online" && cur.lineStatus == "online" && sequence) {
// console.log("TRANSITION TO ONLINE", prev, cur);
// Check if there are already FSP, FGSP events for this sequence
const projectId = await schema2pid(cur._schema);
const sequenceEvents = await event.list(projectId, {sequence});
const labels = ["FSP", "FGSP"].filter(l => !sequenceEvents.find(i => i.labels.includes(l)));
if (labels.includes("FSP")) {
// At this point labels contains either FSP only or FSP + FGSP,
// depending on whether a FGSP event has already been entered.
const remarks = `SEQ ${cur._sequence}, SOL ${cur.lineName}, BSP: ${(cur.speed*3.6/1.852).toFixed(1)} kt, Water depth: ${Number(cur.waterDepth).toFixed(0)} m.`;
const payload = {
type: "sequence",
sequence,
point: cur._point,
remarks,
labels
}
// console.log(projectId, payload);
await event.post(projectId, payload);
} else {
// A first shot point has been already entered in the log,
// so we have nothing to do here.
}
} else if (prev.lineStatus == "online" && cur.lineStatus != "online") {
// console.log("TRANSITION TO OFFLINE", prev, cur);
// Check if there are already LSP, LGSP events for this sequence
const projectId = await schema2pid(prev._schema);
const sequenceEvents = await event.list(projectId, {sequence});
const labels = ["LSP", "LGSP"].filter(l => !sequenceEvents.find(i => i.labels.includes(l)));
if (labels.includes("LSP")) {
// At this point labels contains either LSP only or LSP + LGSP,
// depending on whether a LGSP event has already been entered.
const remarks = `SEQ ${prev._sequence}, EOL ${prev.lineName}, BSP: ${(prev.speed*3.6/1.852).toFixed(1)} kt, Water depth: ${Number(prev.waterDepth).toFixed(0)} m.`;
const payload = {
type: "sequence",
sequence,
point: prev._point,
remarks,
labels
}
// console.log(projectId, payload);
await event.post(projectId, payload);
} else {
// A first shot point has been already entered in the log,
// so we have nothing to do here.
}
}
// Processing of this shot has already been completed.
// The queue can now move forward.
} catch (err) {
console.error("DetectSOLEOL Error")
console.log(err);
} finally {
cur.isPending = false;
}
}
}
async run (data) {
if (!data || !data.channel === "realtime") {
return;
}
if (!(data.payload && data.payload.new && data.payload.new.meta)) {
return;
}
const meta = data.payload.new.meta;
if (this.queue.length < DetectSOLEOL.MAX_QUEUE_SIZE) {
this.queue.push({
isPending: this.queue.length,
_schema: meta._schema,
time: meta.time,
shot: meta.shot,
lineStatus: meta.lineStatus,
_sequence: meta._sequence,
_point: meta._point,
lineName: meta.lineName,
speed: meta.speed,
waterDepth: meta.waterDepth
});
} else {
// FIXME Change to alert
console.error("DetectSOLEOL queue full at", this.queue.length);
}
this.processQueue();
}
}
module.exports = DetectSOLEOL;

View File

@@ -0,0 +1,12 @@
const Handlers = [
require('./detect-soleol')
];
function init () {
return Handlers.map(Handler => new Handler());
}
module.exports = {
Handlers,
init
}

View File

@@ -1,56 +1,21 @@
const { schema2pid } = require('../lib/db/connection');
const { listen } = require('../ws/db');
const { event } = require('../lib/db');
const channels = require('../lib/db/channels');
const handlers = require('./handlers').init();
function start () {
let prevPos = null;
listen(["realtime"], function (data) {
if (!(data.payload && data.payload.new && data.payload.new.meta)) {
console.log("Wrong event", data);
return;
listen(channels, async function (data) {
for (const handler of handlers) {
// NOTE: We are intentionally passing the same instance
// of the data to every handler. This means that earlier
// handlers could, in principle, modify the data to be
// consumed by latter ones, provided that they are
// synchronous (as otherwise, the completion order is
// undefined).
await handler.run(data);
}
const pos = data.payload.new.meta;
if (prevPos) {
if (pos.lineStatus == "online") {
if (prevPos.lineStatus != "online") {
// FIXME TODO Check if there are already FSP, FGSP events for this sequence
// Tag this as FSP/FGSP
const remarks = `SEQ ${pos._sequence}, SOL ${pos.lineName}, BSP: ${(pos.speed*3.6/1.852).toFixed(1)} kt, Water depth: ${Number(pos.waterDepth).toFixed(0)} m.`;
const payload = {
type: "sequence",
sequence: pos._sequence,
point: pos._point,
remarks,
labels: [ "FSP", "FGSP" ]
}
schema2pid(pos._schema).then(projectId => event.post(projectId, payload));
// console.log("post fsp", pos._schema);
}
} else {
if (prevPos.lineStatus == "online") {
// FIXME TODO Check if there are already LSP, LGSP events for this sequence
// Tag this as LSP/LGSP
const remarks = `SEQ ${prevPos._sequence}, EOL ${prevPos.lineName}, BSP: ${(prevPos.speed*3.6/1.852).toFixed(1)} kt, Water depth: ${Number(prevPos.waterDepth).toFixed(0)} m.`;
const payload = {
type: "sequence",
sequence: prevPos._sequence,
point: prevPos._point,
remarks,
labels: [ "LSP", "LGSP" ]
}
schema2pid(prevPos._schema).then(projectId => event.post(projectId, payload));
// console.log("post lsp", prevPos._schema);
}
}
}
prevPos = JSON.parse(JSON.stringify(pos));
});
console.log("Events manager started");
console.log("Events manager started.", handlers.length, "active handlers");
}
module.exports = { start }

View File

@@ -0,0 +1,13 @@
// This is the list of all channels for which the
// database issues notifications.
// NOTE: This needs to be kept up to date with
// database schema changes.
module.exports = [
"realtime", "event", "project",
"preplot_lines", "preplot_points",
"planned_lines",
"raw_lines", "raw_shots",
"final_lines", "final_shots", "info"
];

View File

@@ -18,7 +18,8 @@ async function get (projectId, path, opts = {}) {
: res.rows.map(r => r.data);
if (path) {
return path.split('/').reduce( (obj, idx) => obj[idx], config);
return path.split('/').filter(i => i !== "").reduce( (obj, idx) =>
typeof obj !== 'undefined' ? obj[idx] : undefined, config);
} else {
return config;
}

View File

@@ -32,7 +32,11 @@ async function setSurvey (projectId, client) {
if (!client) {
client = await pool.connect();
}
await client.query("CALL set_survey($1);", [projectId]);
if (projectId) {
await client.query("CALL set_survey($1);", [projectId]);
} else {
await client.query("SET search_path TO public;");
}
return client;
}

View File

@@ -27,7 +27,7 @@ async function list (projectId, opts = {}) {
const limit = Math.abs(Number(opts.itemsPerPage)) || null;
const filter = opts.sequence
? opts.sequence.includes(";")
? String(opts.sequence).includes(";")
? [ "sequence = ANY ( $1 )", [ opts.sequence.split(";") ] ]
: [ "sequence = $1", [ opts.sequence ] ]
: opts.date0

View File

@@ -31,6 +31,7 @@ async function insertSequenceEventLabels(event, client) {
FROM unnest($2::text[]) l (name)
INNER JOIN labels USING (name)
WHERE (data->'model'->'user')::boolean IS true
ON CONFLICT ON CONSTRAINT events_seq_labels_pkey DO NOTHING;
`;
// console.log("insertSequenceEventLabels", text, event);

View File

@@ -51,7 +51,8 @@ async function updateSeqEventLabels (event, client) {
const text = `
INSERT INTO events_seq_labels (id, label)
SELECT $1, label FROM unnest($2::text[]) t (label);
SELECT $1, label FROM unnest($2::text[]) t (label)
ON CONFLICT ON CONSTRAINT events_seq_labels_pkey DO NOTHING;
`;
return client.query(text, [event.id, event.labels]);

View File

@@ -0,0 +1,28 @@
const { setSurvey, transaction } = require('../connection');
async function del (projectId, path, opts = {}) {
const client = await setSurvey(projectId);
const [key, ...jsonpath] = (path||"").split("/").filter(i => i.length);
try {
const text = jsonpath.length
? `
UPDATE info
SET value = value #- $2
WHERE key = $1;
`
: `
DELETE FROM info
WHERE key = $1;
`;
const values = jsonpath.length ? [key, jsonpath] : [key];
await client.query(text, values);
} catch (err) {
console.error("ERROR", err);
throw err;
} finally {
client.release();
}
}
module.exports = del;

View File

@@ -2,7 +2,7 @@ const { setSurvey } = require('../connection');
async function get (projectId, path, opts = {}) {
const client = await setSurvey(projectId);
const [key, ...subkey] = path.split("/");
const [key, ...subkey] = path.split("/").filter(i => i.trim().length);
const text = `
SELECT value
@@ -17,7 +17,7 @@ async function get (projectId, path, opts = {}) {
if (subkey.length) {
const res = subkey.reduce( (obj, idx) => typeof obj != "undefined" ? obj[idx] : obj, value);
console.log(res);
//console.log(res);
return res;
} else {
return value;

View File

@@ -0,0 +1,41 @@
const { setSurvey, transaction } = require('../connection');
async function post (projectId, path, payload, opts = {}) {
const client = await setSurvey(projectId);
const [key, ...jsonpath] = (path||"").split("/").filter(i => i.length);
try {
const text = jsonpath.length
? `
INSERT INTO info (key, value)
VALUES ($2, jsonb_insert('${isNaN(Number(jsonpath[0])) ? "{}" : "[]"}'::jsonb, $3, $1))
ON CONFLICT (key) DO UPDATE
SET
key = $2,
value = jsonb_insert((SELECT value FROM info WHERE key = $2), $3, $1, true)
RETURNING *;
`
: `
INSERT INTO info (key, value)
VALUES ($2, jsonb_insert('[]'::jsonb, '{0}', $1))
ON CONFLICT (key) DO UPDATE
SET
key = $2,
value = jsonb_insert((SELECT value FROM info WHERE key = $2), '{-1}'::text[], $1, true)
RETURNING *;
`;
const values = jsonpath.length ? [JSON.stringify(payload), key, jsonpath] : [JSON.stringify(payload), key];
await client.query(text, values);
} catch (err) {
console.error("ERROR", err);
if (err.code == 22023) {
throw {status: 400, message: "Cannot post to non-array"};
} else {
throw err;
}
} finally {
client.release();
}
}
module.exports = post;

View File

@@ -0,0 +1,37 @@
const { setSurvey, transaction } = require('../connection');
async function put (projectId, path, payload, opts = {}) {
const client = await setSurvey(projectId);
const [key, ...jsonpath] = (path||"").split("/").filter(i => i.length);
try {
const text = jsonpath.length
? `
INSERT INTO info (key, value)
VALUES ($2, jsonb_set('${isNaN(Number(jsonpath[0])) ? "{}" : "[]"}'::jsonb, $3, $1))
ON CONFLICT (key) DO UPDATE
SET
key = $2,
value = jsonb_set((SELECT value FROM info WHERE key = $2), $3, $1)
RETURNING *;
`
: `
INSERT INTO info (key, value)
VALUES ($2, $1)
ON CONFLICT (key) DO UPDATE
SET
key = $2,
value = $1
RETURNING *;
`;
const values = jsonpath.length ? [JSON.stringify(payload), key, jsonpath] : [JSON.stringify(payload), key];
await client.query(text, values);
} catch (err) {
console.error("ERROR", err);
throw err;
} finally {
client.release();
}
}
module.exports = put;

View File

@@ -29,6 +29,10 @@ function njkCollect (entries, key, collectables) {
return out;
}
function njkUnique (entries) {
return entries.filter((element, index, array) => array.indexOf(element) === index);
}
function njkPadStart (str, len, chr) {
return String(str).padStart(len, chr);
}
@@ -58,6 +62,7 @@ async function render (data, template) {
const nenv = nunjucks.configure(Path.dirname(template), {autoescape: false, lstripBlocks: false, trimBlocks: false});
nenv.addFilter('find', njkFind);
nenv.addFilter('unique', njkUnique);
nenv.addFilter('collect', njkCollect);
nenv.addFilter('padStart', njkPadStart);
nenv.addFilter('timestamp', njkTimestamp);

View File

@@ -1,3 +1,4 @@
module.exports = {
transform: require('./transform')
transform: require('./transform'),
prepare: require('./prepare')
}

View File

@@ -0,0 +1,16 @@
const { event, sequence, info } = require('../db');
async function prepare (project, query) {
const events = await event.list(project, query);
const sequences = await sequence.list(project, query);
const equipment = await info.get(null, "equipment");
for (const sequence of sequences) {
const maxTstamp = sequence.ts1_final || sequence.ts1 || +Infinity;
if (equipment) {
sequence.equipment = equipment.filter(i => new Date(i.tstamp) <= maxTstamp);
}
}
return {events, sequences};
}
module.exports = prepare;

View File

@@ -37,7 +37,8 @@ function transform (events, sequences, opts = {}) {
// exists or not.
DglLength: sequence.length,
DglAzimuth: sequence.azimuth,
DglDuration: sequence.duration_final || sequence.duration
DglDuration: sequence.duration_final || sequence.duration,
DglEquipmentInfo: sequence.equipment
};
[sequence.remarks, sequence.remarks_final].filter(i => !!i).forEach(i => {
if (!SequenceObject.DglSequenceComments) {

View File

@@ -1,17 +1,10 @@
const ws = require('ws');
const URL = require('url');
const db = require('./db');
const channels = require('../lib/db/channels');
function start (server, pingInterval=30000) {
const channels = [
"realtime", "event", "project",
"preplot_lines", "preplot_points",
"planned_lines",
"raw_lines", "raw_shots",
"final_lines", "final_shots"
];
const wsServer = new ws.Server({ noServer: true });
wsServer.on('connection', socket => {
socket.alive = true;