Compare commits

..

7 Commits

Author SHA1 Message Date
D. Berge
b9e776ae74 Use ETag middleware 2023-09-01 12:28:55 +02:00
D. Berge
c1ece08f38 Add ETag middleware 2023-09-01 12:27:44 +02:00
D. Berge
7d5fb4bceb Add notifier to DB library 2023-09-01 12:22:54 +02:00
D. Berge
fa9f7ad600 Add pg-listen dependency 2023-09-01 12:19:57 +02:00
D. Berge
2eeaddb159 Update package-lock.json to stop gyp from failing 2023-09-01 12:18:01 +02:00
D. Berge
1130ca7ec3 Request ancillary library via HTTPS rather than SSH.
Otherwise newer versions of npm will choke during `npm install` due
to this npm bug: https://github.com/npm/cli/issues/2610
2023-09-01 12:14:16 +02:00
D. Berge
67f3f83c61 Add wx forecast info to plan (experiment).
Use https://open-meteo.com/ as a weather forecast provider.

This code is intended for demonstration only, not for
production purposes.

(issue #157)
2023-09-01 00:37:15 +02:00
83 changed files with 659 additions and 2329 deletions

View File

@@ -1,27 +0,0 @@
#!/usr/bin/python3
"""
Check if any of the directories provided in the imports.mounts configuration
section are empty.
Returns 0 if all arguments are non-empty, 1 otherwise. It stops at the first
empty directory.
"""
import os
import configuration
cfg = configuration.read()
if cfg and "imports" in cfg and "mounts" in cfg["imports"]:
mounts = cfg["imports"]["mounts"]
for item in mounts:
with os.scandir(item) as contents:
if not any(contents):
exit(1)
else:
print("No mounts in configuration")
exit(0)

View File

@@ -1,5 +1,4 @@
import os
import pathlib
from glob import glob
from yaml import full_load as _load
@@ -12,18 +11,6 @@ surveys should be under $HOME/etc/surveys/*.yaml. In both cases,
$HOME is the home directory of the user running this script.
"""
def is_relative_to(it, other):
"""
is_relative_to() is not present version Python 3.9, so we
need this kludge to get Dougal to run on OpenSUSE 15.4
"""
if "is_relative_to" in dir(it):
return it.is_relative_to(other)
return str(it.absolute()).startswith(str(other.absolute()))
prefix = os.environ.get("DOUGAL_ROOT", os.environ.get("HOME", ".")+"/software")
DOUGAL_ROOT = os.environ.get("DOUGAL_ROOT", os.environ.get("HOME", ".")+"/software")
@@ -67,10 +54,6 @@ def files (globspec = None, include_archived = False):
quickly and temporarily “disabling” a survey configuration by renaming
the relevant file.
"""
print("This method is obsolete")
return
tuples = []
if globspec is None:
@@ -104,73 +87,3 @@ def rxflags (flagstr):
for flag in flagstr:
flags |= cases.get(flag, 0)
return flags
def translate_path (file):
"""
Translate a path from a Dougal import directory to an actual
physical path on disk.
Any user files accessible by Dougal must be under a path prefixed
by `(config.yaml).imports.paths`. The value of `imports.paths` may
be either a string, in which case this represents the prefix under
which all Dougal data resides, or a dictionary where the keys are
logical paths and their values the corresponding physical path.
"""
cfg = read()
root = pathlib.Path(DOUGAL_ROOT)
filepath = pathlib.Path(file).resolve()
import_paths = cfg["imports"]["paths"]
if filepath.is_absolute():
if type(import_paths) == str:
# Substitute the root for the real physical path
# NOTE: `root` deals with import_paths not being absolute
prefix = root.joinpath(pathlib.Path(import_paths)).resolve()
return str(pathlib.Path(prefix).joinpath(*filepath.parts[2:]))
else:
# Look for a match on the second path element
if filepath.parts[1] in import_paths:
# NOTE: `root` deals with import_paths[…] not being absolute
prefix = root.joinpath(import_paths[filepath.parts[1]])
return str(pathlib.Path(prefix).joinpath(*filepath.parts[2:]))
else:
# This path is invalid
raise TypeError("invalid path or file: {0!r}".format(filepath))
else:
# A relative filepath is always resolved relative to the logical root
root = pathlib.Path("/")
return translate_path(root.joinpath(filepath))
def untranslate_path (file):
"""
Attempt to convert a physical path into a logical one.
See `translate_path()` above for details.
"""
cfg = read()
dougal_root = pathlib.Path(DOUGAL_ROOT)
filepath = pathlib.Path(file).resolve()
import_paths = cfg["imports"]["paths"]
physical_root = pathlib.Path("/")
if filepath.is_absolute():
if type(import_paths) == str:
if is_relative_to(filepath, import_paths):
physical_root = pathlib.Path("/")
physical_prefix = pathlib.Path(import_paths)
return str(root.joinpath(filepath.relative_to(physical_prefix)))
else:
raise TypeError("invalid path or file: {0!r}".format(filepath))
else:
for key, value in import_paths.items():
value = dougal_root.joinpath(value)
physical_prefix = pathlib.Path(value)
if is_relative_to(filepath, physical_prefix):
logical_prefix = physical_root.joinpath(pathlib.Path(key)).resolve()
return str(logical_prefix.joinpath(filepath.relative_to(physical_prefix)))
# If we got here with no matches, this is not a valid
# Dougal data path
raise TypeError("invalid path or file: {0!r}".format(filepath))
else:
# A relative filepath is always resolved relative to DOUGAL_ROOT
return untranslate_path(root.joinpath(filepath))

View File

@@ -52,7 +52,7 @@ class Datastore:
self.conn = psycopg2.connect(configuration.read()["db"]["connection_string"], **opts)
def set_autocommit(self, value = True):
def set_autocommit(value = True):
"""
Enable or disable autocommit.
@@ -95,7 +95,7 @@ class Datastore:
cursor.execute(qry, (filepath,))
results = cursor.fetchall()
if len(results):
return (filepath, file_hash(configuration.translate_path(filepath))) in results
return (filepath, file_hash(filepath)) in results
def add_file(self, path, cursor = None):
@@ -107,8 +107,7 @@ class Datastore:
else:
cur = cursor
realpath = configuration.translate_path(path)
hash = file_hash(realpath)
hash = file_hash(path)
qry = "CALL add_file(%s, %s);"
cur.execute(qry, (path, hash))
if cursor is None:
@@ -177,7 +176,7 @@ class Datastore:
else:
cur = cursor
hash = file_hash(configuration.translate_path(path))
hash = file_hash(path)
qry = """
UPDATE raw_lines rl
SET ntbp = %s
@@ -589,63 +588,7 @@ class Datastore:
# We do not commit if we've been passed a cursor, instead
# we assume that we are in the middle of a transaction
def get_file_data(self, path, cursor = None):
"""
Retrieve arbitrary data associated with a file.
"""
if cursor is None:
cur = self.conn.cursor()
else:
cur = cursor
realpath = configuration.translate_path(path)
hash = file_hash(realpath)
qry = """
SELECT data
FROM file_data
WHERE hash = %s;
"""
cur.execute(qry, (hash,))
res = cur.fetchone()
if cursor is None:
self.maybe_commit()
# We do not commit if we've been passed a cursor, instead
# we assume that we are in the middle of a transaction
return res[0]
def surveys (self, include_archived = False):
"""
Return list of survey definitions.
"""
if self.conn is None:
self.connect()
if include_archived:
qry = """
SELECT meta
FROM public.projects;
"""
else:
qry = """
SELECT meta
FROM public.projects
WHERE NOT (meta->'archived')::boolean IS true
"""
with self.conn:
with self.conn.cursor() as cursor:
cursor.execute(qry)
results = cursor.fetchall()
return [r[0] for r in results if r[0]]
# TODO Does this need tweaking on account of #246?
def apply_survey_configuration(self, cursor = None):
if cursor is None:
cur = self.conn.cursor()

View File

@@ -9,9 +9,11 @@ from datastore import Datastore
if __name__ == '__main__':
print("Reading configuration")
surveys = configuration.surveys()
print("Connecting to database")
db = Datastore()
surveys = db.surveys()
print("Reading surveys")
for survey in surveys:

View File

@@ -51,11 +51,12 @@ def del_pending_remark(db, sequence):
if __name__ == '__main__':
print("Reading configuration")
surveys = configuration.surveys()
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
print("Connecting to database")
db = Datastore()
surveys = db.surveys()
db.connect()
print("Reading surveys")
for survey in surveys:
@@ -76,31 +77,29 @@ if __name__ == '__main__':
pendingRx = re.compile(survey["final"]["pending"]["pattern"]["regex"])
for fileprefix in final_p111["paths"]:
realprefix = configuration.translate_path(fileprefix)
print(f"Path prefix: {fileprefix}{realprefix}")
print(f"Path prefix: {fileprefix}")
for globspec in final_p111["globs"]:
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
physical_filepath = str(physical_filepath)
logical_filepath = configuration.untranslate_path(physical_filepath)
print(f"Found {logical_filepath}")
for filepath in pathlib.Path(fileprefix).glob(globspec):
filepath = str(filepath)
print(f"Found {filepath}")
pending = False
if pendingRx:
pending = pendingRx.search(physical_filepath) is not None
pending = pendingRx.search(filepath) is not None
if not db.file_in_db(logical_filepath):
if not db.file_in_db(filepath):
age = time.time() - os.path.getmtime(physical_filepath)
age = time.time() - os.path.getmtime(filepath)
if age < file_min_age:
print("Skipping file because too new", logical_filepath)
print("Skipping file because too new", filepath)
continue
print("Importing")
match = rx.match(os.path.basename(logical_filepath))
match = rx.match(os.path.basename(filepath))
if not match:
error_message = f"File path not match the expected format! ({logical_filepath} ~ {pattern['regex']})"
error_message = f"File path not match the expected format! ({filepath} ~ {pattern['regex']})"
print(error_message, file=sys.stderr)
print("This file will be ignored!")
continue
@@ -109,21 +108,21 @@ if __name__ == '__main__':
file_info["meta"] = {}
if pending:
print("Skipping / removing final file because marked as PENDING", logical_filepath)
print("Skipping / removing final file because marked as PENDING", filepath)
db.del_sequence_final(file_info["sequence"])
add_pending_remark(db, file_info["sequence"])
continue
else:
del_pending_remark(db, file_info["sequence"])
p111_data = p111.from_file(physical_filepath)
p111_data = p111.from_file(filepath)
print("Saving")
p111_records = p111.p111_type("S", p111_data)
file_info["meta"]["lineName"] = p111.line_name(p111_data)
db.save_final_p111(p111_records, file_info, logical_filepath, survey["epsg"])
db.save_final_p111(p111_records, file_info, filepath, survey["epsg"])
else:
print("Already in DB")
if pending:

View File

@@ -1,109 +0,0 @@
#!/usr/bin/python3
"""
Import SmartSource data.
For each survey in configuration.surveys(), check for new
or modified final gun header files and (re-)import them into the
database.
"""
import os
import sys
import pathlib
import re
import time
import json
import configuration
from datastore import Datastore
if __name__ == '__main__':
"""
Imports map layers from the directories defined in the configuration object
`import.map.layers`. The content of that key is an object with the following
structure:
{
layer1Name: [
format: "geojson",
path: "", // Logical path to a directory
globs: [
"**/*.geojson", // List of globs matching map data files
]
],
layer2Name: …
}
"""
def process (layer_name, layer, physical_filepath):
physical_filepath = str(physical_filepath)
logical_filepath = configuration.untranslate_path(physical_filepath)
print(f"Found {logical_filepath}")
if not db.file_in_db(logical_filepath):
age = time.time() - os.path.getmtime(physical_filepath)
if age < file_min_age:
print("Skipping file because too new", logical_filepath)
return
print("Importing")
file_info = {
"type": "map_layer",
"format": layer["format"],
"name": layer_name
}
db.save_file_data(logical_filepath, json.dumps(file_info))
else:
file_info = db.get_file_data(logical_filepath)
if file_info and file_info["name"] != layer_name:
print("Renaming to", layer_name)
file_info["name"] = layer_name
db.save_file_data(logical_filepath, json.dumps(file_info))
else:
print("Already in DB")
print("Reading configuration")
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
print("Connecting to database")
db = Datastore()
surveys = db.surveys()
print("Reading surveys")
for survey in surveys:
print(f'Survey: {survey["id"]} ({survey["schema"]})')
db.set_survey(survey["schema"])
try:
map_layers = survey["imports"]["map"]["layers"]
except KeyError:
print("No map layers defined")
continue
for layer_name, layer_items in map_layers.items():
for layer in layer_items:
fileprefix = layer["path"]
realprefix = configuration.translate_path(fileprefix)
if os.path.isfile(realprefix):
process(layer_name, layer, realprefix)
else:
for globspec in layer["globs"]:
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
process(layer_name, layer, physical_filepath)
print("Done")

View File

@@ -17,31 +17,29 @@ from datastore import Datastore
if __name__ == '__main__':
print("Reading configuration")
surveys = configuration.surveys()
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
print("Connecting to database")
db = Datastore()
surveys = db.surveys()
print("Reading configuration")
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
print("Reading surveys")
for survey in surveys:
print(f'Survey: {survey["id"]} ({survey["schema"]})')
db.set_survey(survey["schema"])
for file in survey["preplots"]:
realpath = configuration.translate_path(file["path"])
print(f"Preplot: {file['path']}")
if not db.file_in_db(file["path"]):
age = time.time() - os.path.getmtime(realpath)
age = time.time() - os.path.getmtime(file["path"])
if age < file_min_age:
print("Skipping file because too new", file["path"])
continue
print("Importing")
try:
preplot = preplots.from_file(file, realpath)
preplot = preplots.from_file(file)
except FileNotFoundError:
print(f"File does not exist: {file['path']}", file=sys.stderr)
continue

View File

@@ -20,11 +20,12 @@ from datastore import Datastore
if __name__ == '__main__':
print("Reading configuration")
surveys = configuration.surveys()
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
print("Connecting to database")
db = Datastore()
surveys = db.surveys()
db.connect()
print("Reading surveys")
for survey in surveys:
@@ -45,32 +46,30 @@ if __name__ == '__main__':
ntbpRx = re.compile(survey["raw"]["ntbp"]["pattern"]["regex"])
for fileprefix in raw_p111["paths"]:
realprefix = configuration.translate_path(fileprefix)
print(f"Path prefix: {fileprefix}{realprefix}")
print(f"Path prefix: {fileprefix}")
for globspec in raw_p111["globs"]:
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
physical_filepath = str(physical_filepath)
logical_filepath = configuration.untranslate_path(physical_filepath)
print(f"Found {logical_filepath}")
for filepath in pathlib.Path(fileprefix).glob(globspec):
filepath = str(filepath)
print(f"Found {filepath}")
if ntbpRx:
ntbp = ntbpRx.search(physical_filepath) is not None
ntbp = ntbpRx.search(filepath) is not None
else:
ntbp = False
if not db.file_in_db(logical_filepath):
if not db.file_in_db(filepath):
age = time.time() - os.path.getmtime(physical_filepath)
age = time.time() - os.path.getmtime(filepath)
if age < file_min_age:
print("Skipping file because too new", logical_filepath)
print("Skipping file because too new", filepath)
continue
print("Importing")
match = rx.match(os.path.basename(logical_filepath))
match = rx.match(os.path.basename(filepath))
if not match:
error_message = f"File path not matching the expected format! ({logical_filepath} ~ {pattern['regex']})"
error_message = f"File path not match the expected format! ({filepath} ~ {pattern['regex']})"
print(error_message, file=sys.stderr)
print("This file will be ignored!")
continue
@@ -78,7 +77,7 @@ if __name__ == '__main__':
file_info = dict(zip(pattern["captures"], match.groups()))
file_info["meta"] = {}
p111_data = p111.from_file(physical_filepath)
p111_data = p111.from_file(filepath)
print("Saving")
@@ -86,7 +85,7 @@ if __name__ == '__main__':
if len(p111_records):
file_info["meta"]["lineName"] = p111.line_name(p111_data)
db.save_raw_p111(p111_records, file_info, logical_filepath, survey["epsg"], ntbp=ntbp)
db.save_raw_p111(p111_records, file_info, filepath, survey["epsg"], ntbp=ntbp)
else:
print("No source records found in file")
else:
@@ -94,7 +93,7 @@ if __name__ == '__main__':
# Update the NTBP status to whatever the latest is,
# as it might have changed.
db.set_ntbp(logical_filepath, ntbp)
db.set_ntbp(filepath, ntbp)
if ntbp:
print("Sequence is NTBP")

View File

@@ -20,11 +20,12 @@ from datastore import Datastore
if __name__ == '__main__':
print("Reading configuration")
surveys = configuration.surveys()
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
print("Connecting to database")
db = Datastore()
surveys = db.surveys()
db.connect()
print("Reading surveys")
for survey in surveys:
@@ -46,38 +47,36 @@ if __name__ == '__main__':
rx = re.compile(pattern["regex"], flags)
for fileprefix in raw_smsrc["paths"]:
realprefix = configuration.translate_path(fileprefix)
print(f"Path prefix: {fileprefix}{realprefix}")
print(f"Path prefix: {fileprefix}")
for globspec in raw_smsrc["globs"]:
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
physical_filepath = str(physical_filepath)
logical_filepath = configuration.untranslate_path(physical_filepath)
print(f"Found {logical_filepath}")
for filepath in pathlib.Path(fileprefix).glob(globspec):
filepath = str(filepath)
print(f"Found {filepath}")
if not db.file_in_db(logical_filepath):
if not db.file_in_db(filepath):
age = time.time() - os.path.getmtime(physical_filepath)
age = time.time() - os.path.getmtime(filepath)
if age < file_min_age:
print("Skipping file because too new", logical_filepath)
print("Skipping file because too new", filepath)
continue
print("Importing")
match = rx.match(os.path.basename(logical_filepath))
match = rx.match(os.path.basename(filepath))
if not match:
error_message = f"File path not matching the expected format! ({logical_filepath} ~ {pattern['regex']})"
error_message = f"File path not matching the expected format! ({filepath} ~ {pattern['regex']})"
print(error_message, file=sys.stderr)
print("This file will be ignored!")
continue
file_info = dict(zip(pattern["captures"], match.groups()))
smsrc_records = smsrc.from_file(physical_filepath)
smsrc_records = smsrc.from_file(filepath)
print("Saving")
db.save_raw_smsrc(smsrc_records, file_info, logical_filepath)
db.save_raw_smsrc(smsrc_records, file_info, filepath)
else:
print("Already in DB")

View File

@@ -15,4 +15,25 @@ from datastore import Datastore
if __name__ == '__main__':
print("This function is obsolete. Returning with no action")
print("Reading configuration")
configs = configuration.files(include_archived = True)
print("Connecting to database")
db = Datastore()
#db.connect()
print("Reading surveys")
for config in configs:
filepath = config[0]
survey = config[1]
print(f'Survey: {survey["id"]} ({filepath})')
db.set_survey(survey["schema"])
if not db.file_in_db(filepath):
print("Saving to DB")
db.save_file_data(filepath, json.dumps(survey))
print("Applying survey configuration")
db.apply_survey_configuration()
else:
print("Already in DB")
print("Done")

View File

@@ -4,10 +4,9 @@ import sps
Preplot importing functions.
"""
def from_file (file, realpath = None):
filepath = realpath or file["path"]
def from_file (file):
if not "type" in file or file["type"] == "sps":
records = sps.from_file(filepath, file["format"] if "format" in file else None )
records = sps.from_file(file["path"], file["format"] if "format" in file else None )
else:
return "Not an SPS file"

View File

@@ -13,27 +13,21 @@ from datastore import Datastore
if __name__ == '__main__':
print("Reading configuration")
surveys = configuration.surveys()
print("Connecting to database")
db = Datastore()
print("Reading configuration")
surveys = db.surveys()
print("Reading surveys")
for survey in surveys:
print(f'Survey: {survey["id"]} ({survey["schema"]})')
db.set_survey(survey["schema"])
for file in db.list_files():
try:
path = configuration.translate_path(file[0])
if not os.path.exists(path):
print(path, "NOT FOUND")
db.del_file(file[0])
except TypeError:
# In case the logical path no longer matches
# the Dougal configuration.
print(file[0], "COULD NOT BE TRANSLATED TO A PHYSICAL PATH. DELETING")
db.del_file(file[0])
path = file[0]
if not os.path.exists(path):
print(path, "NOT FOUND")
db.del_file(path)
print("Done")

View File

@@ -90,12 +90,6 @@ function run () {
rm $STDOUTLOG $STDERRLOG
}
function cleanup () {
if [[ -f $LOCKFILE ]]; then
rm "$LOCKFILE"
fi
}
if [[ -f $LOCKFILE ]]; then
PID=$(cat "$LOCKFILE")
if pgrep -F "$LOCKFILE"; then
@@ -113,13 +107,6 @@ echo "$$" > "$LOCKFILE" || {
}
print_info "Start run"
print_log "Check if data is accessible"
$BINDIR/check_mounts_present.py || {
print_warning "Import mounts not accessible. Inhibiting all tasks!"
cleanup
exit 253
}
print_log "Purge deleted files"
run $BINDIR/purge_deleted_files.py

View File

@@ -32,25 +32,6 @@ imports:
# least this many seconds ago.
file_min_age: 60
# These paths refer to remote mounts which must be present in order
# for imports to work. If any of these paths are empty, import actions
# (including data deletion) will be inhibited. This is to cope with
# things like transient network failures.
mounts:
- /srv/mnt/Data
# These paths can be exposed to end users via the API. They should
# contain the locations were project data, or any other user data
# that needs to be accessible by Dougal, is located.
#
# This key can be either a string or an object:
# - If a string, it points to the root path for Dougal-accessible data.
# - If an object, there is an implicit root and the first-level
# paths are denoted by the keys, with the values being their
# respective physical paths.
# Non-absolute paths are relative to $DOUGAL_ROOT.
paths: /srv/mnt/Data
queues:
asaqc:
request:

View File

@@ -1,5 +1,3 @@
\connect dougal
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.13"}')
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.12"}')
ON CONFLICT (key) DO UPDATE
SET value = public.info.value || '{"db_schema": "0.3.13"}' WHERE public.info.key = 'version';
SET value = public.info.value || '{"db_schema": "0.3.12"}' WHERE public.info.key = 'version';

View File

@@ -671,7 +671,7 @@ BEGIN
id <> NEW.id
AND label = NEW.label
AND id IN (SELECT id FROM events_seq WHERE sequence = _sequence);
DELETE
FROM events_timed_labels
WHERE
@@ -854,7 +854,7 @@ CREATE FUNCTION _SURVEY__TEMPLATE_.ij_error(line double precision, point double
DECLARE
bp jsonb := binning_parameters();
ij public.geometry := to_binning_grid(geom, bp);
theta numeric := (bp->>'theta')::numeric * pi() / 180;
I_inc numeric DEFAULT 1;
J_inc numeric DEFAULT 1;
@@ -869,13 +869,13 @@ DECLARE
yoff numeric := (bp->'origin'->>'J')::numeric;
E0 numeric := (bp->'origin'->>'easting')::numeric;
N0 numeric := (bp->'origin'->>'northing')::numeric;
error_i double precision;
error_j double precision;
BEGIN
error_i := (public.st_x(ij) - line) * I_width;
error_j := (public.st_y(ij) - point) * J_width;
RETURN public.ST_MakePoint(error_i, error_j);
END
$$;
@@ -1488,9 +1488,9 @@ CREATE VIEW _SURVEY__TEMPLATE_.final_lines_summary AS
s.ts1,
(s.ts1 - s.ts0) AS duration,
s.num_points,
( SELECT count(*) AS count
FROM _SURVEY__TEMPLATE_.missing_sequence_final_points
WHERE missing_sequence_final_points.sequence = s.sequence) AS missing_shots,
(( SELECT count(*) AS count
FROM _SURVEY__TEMPLATE_.preplot_points
WHERE ((preplot_points.line = fl.line) AND (((preplot_points.point >= s.fsp) AND (preplot_points.point <= s.lsp)) OR ((preplot_points.point >= s.lsp) AND (preplot_points.point <= s.fsp))))) - s.num_points) AS missing_shots,
s.length,
s.azimuth,
fl.remarks,
@@ -2137,9 +2137,9 @@ CREATE VIEW _SURVEY__TEMPLATE_.raw_lines_summary AS
(s.ts1 - s.ts0) AS duration,
s.num_points,
s.num_preplots,
(SELECT count(*) AS count
FROM _SURVEY__TEMPLATE_.missing_sequence_raw_points
WHERE missing_sequence_raw_points.sequence = s.sequence) AS missing_shots,
(( SELECT count(*) AS count
FROM _SURVEY__TEMPLATE_.preplot_points
WHERE ((preplot_points.line = rl.line) AND (((preplot_points.point >= s.fsp) AND (preplot_points.point <= s.lsp)) OR ((preplot_points.point >= s.lsp) AND (preplot_points.point <= s.fsp))))) - s.num_preplots) AS missing_shots,
s.length,
s.azimuth,
rl.remarks,

View File

@@ -1,162 +0,0 @@
-- Fix wrong number of missing shots in summary views
--
-- New schema version: 0.3.13
--
-- ATTENTION:
--
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
--
--
-- NOTE: This upgrade affects all schemas in the database.
-- NOTE: Each application starts a transaction, which must be committed
-- or rolled back.
--
-- Fixes a bug in the `final_lines_summary` and `raw_lines_summary` views
-- which results in the number of missing shots being miscounted on jobs
-- using three sources.
--
-- To apply, run as the dougal user:
--
-- psql <<EOF
-- \i $THIS_FILE
-- COMMIT;
-- EOF
--
-- NOTE: It can be applied multiple times without ill effect.
--
BEGIN;
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
BEGIN
RAISE NOTICE '%', notice;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
BEGIN
RAISE NOTICE 'Updating schema %', schema_name;
-- We need to set the search path because some of the trigger
-- functions reference other tables in survey schemas assuming
-- they are in the search path.
EXECUTE format('SET search_path TO %I,public', schema_name);
CREATE OR REPLACE VIEW raw_lines_summary AS
WITH summary AS (
SELECT DISTINCT rs.sequence,
first_value(rs.point) OVER w AS fsp,
last_value(rs.point) OVER w AS lsp,
first_value(rs.tstamp) OVER w AS ts0,
last_value(rs.tstamp) OVER w AS ts1,
count(rs.point) OVER w AS num_points,
count(pp.point) OVER w AS num_preplots,
public.st_distance(first_value(rs.geometry) OVER w, last_value(rs.geometry) OVER w) AS length,
((public.st_azimuth(first_value(rs.geometry) OVER w, last_value(rs.geometry) OVER w) * (180)::double precision) / pi()) AS azimuth
FROM (raw_shots rs
LEFT JOIN preplot_points pp USING (line, point))
WINDOW w AS (PARTITION BY rs.sequence ORDER BY rs.tstamp ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
)
SELECT rl.sequence,
rl.line,
s.fsp,
s.lsp,
s.ts0,
s.ts1,
(s.ts1 - s.ts0) AS duration,
s.num_points,
s.num_preplots,
(SELECT count(*) AS count
FROM missing_sequence_raw_points
WHERE missing_sequence_raw_points.sequence = s.sequence) AS missing_shots,
s.length,
s.azimuth,
rl.remarks,
rl.ntbp,
rl.meta
FROM (summary s
JOIN raw_lines rl USING (sequence));
CREATE OR REPLACE VIEW final_lines_summary AS
WITH summary AS (
SELECT DISTINCT fs.sequence,
first_value(fs.point) OVER w AS fsp,
last_value(fs.point) OVER w AS lsp,
first_value(fs.tstamp) OVER w AS ts0,
last_value(fs.tstamp) OVER w AS ts1,
count(fs.point) OVER w AS num_points,
public.st_distance(first_value(fs.geometry) OVER w, last_value(fs.geometry) OVER w) AS length,
((public.st_azimuth(first_value(fs.geometry) OVER w, last_value(fs.geometry) OVER w) * (180)::double precision) / pi()) AS azimuth
FROM final_shots fs
WINDOW w AS (PARTITION BY fs.sequence ORDER BY fs.tstamp ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
)
SELECT fl.sequence,
fl.line,
s.fsp,
s.lsp,
s.ts0,
s.ts1,
(s.ts1 - s.ts0) AS duration,
s.num_points,
( SELECT count(*) AS count
FROM missing_sequence_final_points
WHERE missing_sequence_final_points.sequence = s.sequence) AS missing_shots,
s.length,
s.azimuth,
fl.remarks,
fl.meta
FROM (summary s
JOIN final_lines fl USING (sequence));
END;
$outer$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
DECLARE
row RECORD;
current_db_version TEXT;
BEGIN
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
IF current_db_version >= '0.3.13' THEN
RAISE EXCEPTION
USING MESSAGE='Patch already applied';
END IF;
IF current_db_version != '0.3.12' THEN
RAISE EXCEPTION
USING MESSAGE='Invalid database version: ' || current_db_version,
HINT='Ensure all previous patches have been applied.';
END IF;
FOR row IN
SELECT schema_name FROM information_schema.schemata
WHERE schema_name LIKE 'survey_%'
ORDER BY schema_name
LOOP
CALL pg_temp.upgrade_survey_schema(row.schema_name);
END LOOP;
END;
$outer$ LANGUAGE plpgsql;
CALL pg_temp.upgrade();
CALL pg_temp.show_notice('Cleaning up');
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
DROP PROCEDURE pg_temp.upgrade ();
CALL pg_temp.show_notice('Updating db_schema version');
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.13"}')
ON CONFLICT (key) DO UPDATE
SET value = public.info.value || '{"db_schema": "0.3.13"}' WHERE public.info.key = 'version';
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
DROP PROCEDURE pg_temp.show_notice (notice text);
--
--NOTE Run `COMMIT;` now if all went well
--

View File

@@ -9,7 +9,7 @@
"version": "0.0.0",
"license": "UNLICENSED",
"dependencies": {
"@mdi/font": "^7.2.96",
"@mdi/font": "^5.6.55",
"core-js": "^3.6.5",
"d3": "^7.0.1",
"jwt-decode": "^3.0.0",
@@ -1763,9 +1763,9 @@
}
},
"node_modules/@mdi/font": {
"version": "7.2.96",
"resolved": "https://registry.npmjs.org/@mdi/font/-/font-7.2.96.tgz",
"integrity": "sha512-e//lmkmpFUMZKhmCY9zdjRe4zNXfbOIJnn6xveHbaV2kSw5aJ5dLXUxcRt1Gxfi7ZYpFLUWlkG2MGSFAiqAu7w=="
"version": "5.9.55",
"resolved": "https://registry.npmjs.org/@mdi/font/-/font-5.9.55.tgz",
"integrity": "sha512-jswRF6q3eq8NWpWiqct6q+6Fg/I7nUhrxYJfiEM8JJpap0wVJLQdbKtyS65GdlK7S7Ytnx3TTi/bmw+tBhkGmg=="
},
"node_modules/@mrmlnc/readdir-enhanced": {
"version": "2.2.1",
@@ -3844,24 +3844,14 @@
}
},
"node_modules/caniuse-lite": {
"version": "1.0.30001476",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001476.tgz",
"integrity": "sha512-JmpktFppVSvyUN4gsLS0bShY2L9ZUslHLE72vgemBkS43JD2fOvKTKs+GtRwuxrtRGnwJFW0ye7kWRRlLJS9vQ==",
"version": "1.0.30001317",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001317.tgz",
"integrity": "sha512-xIZLh8gBm4dqNX0gkzrBeyI86J2eCjWzYAs40q88smG844YIrN4tVQl/RhquHvKEKImWWFIVh1Lxe5n1G/N+GQ==",
"dev": true,
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/browserslist"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/caniuse-lite"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
]
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/browserslist"
}
},
"node_modules/case-sensitive-paths-webpack-plugin": {
"version": "2.4.0",
@@ -16442,9 +16432,9 @@
}
},
"@mdi/font": {
"version": "7.2.96",
"resolved": "https://registry.npmjs.org/@mdi/font/-/font-7.2.96.tgz",
"integrity": "sha512-e//lmkmpFUMZKhmCY9zdjRe4zNXfbOIJnn6xveHbaV2kSw5aJ5dLXUxcRt1Gxfi7ZYpFLUWlkG2MGSFAiqAu7w=="
"version": "5.9.55",
"resolved": "https://registry.npmjs.org/@mdi/font/-/font-5.9.55.tgz",
"integrity": "sha512-jswRF6q3eq8NWpWiqct6q+6Fg/I7nUhrxYJfiEM8JJpap0wVJLQdbKtyS65GdlK7S7Ytnx3TTi/bmw+tBhkGmg=="
},
"@mrmlnc/readdir-enhanced": {
"version": "2.2.1",
@@ -18185,9 +18175,9 @@
}
},
"caniuse-lite": {
"version": "1.0.30001476",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001476.tgz",
"integrity": "sha512-JmpktFppVSvyUN4gsLS0bShY2L9ZUslHLE72vgemBkS43JD2fOvKTKs+GtRwuxrtRGnwJFW0ye7kWRRlLJS9vQ==",
"version": "1.0.30001317",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001317.tgz",
"integrity": "sha512-xIZLh8gBm4dqNX0gkzrBeyI86J2eCjWzYAs40q88smG844YIrN4tVQl/RhquHvKEKImWWFIVh1Lxe5n1G/N+GQ==",
"dev": true
},
"case-sensitive-paths-webpack-plugin": {

View File

@@ -3,11 +3,11 @@
"version": "0.0.0",
"private": true,
"scripts": {
"serve": "vue-cli-service serve --host=0.0.0.0",
"serve": "vue-cli-service serve",
"build": "vue-cli-service build"
},
"dependencies": {
"@mdi/font": "^7.2.96",
"@mdi/font": "^5.6.55",
"core-js": "^3.6.5",
"d3": "^7.0.1",
"jwt-decode": "^3.0.0",

View File

@@ -16,7 +16,7 @@ async function api ({state, commit, dispatch}, [resource, init = {}, cb]) {
const url = /^https?:\/\//i.test(resource) ? resource : (state.apiUrl + resource);
const res = await fetch(url, init);
if (typeof cb === 'function') {
await cb(null, res);
cb(null, res);
}
if (res.ok) {
@@ -35,14 +35,7 @@ async function api ({state, commit, dispatch}, [resource, init = {}, cb]) {
throw err;
}
} else {
let message = res.statusText;
if (res.headers.get("Content-Type").match(/^application\/json/i)) {
const body = await res.json();
if (body.message) {
message = body.message;
}
}
await dispatch('showSnack', [message, "warning"]);
await dispatch('showSnack', [res.statusText, "warning"]);
}
} catch (err) {
if (err && err.name == "AbortError") return;

View File

@@ -8,10 +8,6 @@ async function getProject ({commit, dispatch}, projectId) {
const recentProjects = JSON.parse(localStorage.getItem("recentProjects") || "[]")
recentProjects.unshift(res);
localStorage.setItem("recentProjects", JSON.stringify(recentProjects.slice(0, 3)));
} else {
commit('setProjectName', null);
commit('setProjectId', null);
commit('setProjectSchema', null);
}
}

View File

@@ -1,14 +0,0 @@
function projectId (state) {
return state.projectId;
}
function projectName (state) {
return state.projectName;
}
function projectSchema (state) {
return state.projectSchema;
}
export default { projectId, projectName, projectSchema };

View File

@@ -604,16 +604,16 @@ export default {
async getLabelDefinitions () {
const url = `/project/${this.$route.params.project}/label`;
//const labelSet = {};
this.labels = await this.api([url]) ?? {};
//labels.forEach( l => labelSet[l.name] = l.data );
//this.labels = labelSet;
const labelSet = {};
const labels = await this.api([url]) || [];
labels.forEach( l => labelSet[l.name] = l.data );
this.labels = labelSet;
},
async getPresetRemarks () {
const url = `/project/${this.$route.params.project}/configuration`;
const url = `/project/${this.$route.params.project}/configuration/events/presetRemarks`;
this.presetRemarks = (await this.api([url]))?.events?.presetRemarks ?? {};
this.presetRemarks = await this.api([url]);
},
newItem (from = {}) {

View File

@@ -375,8 +375,7 @@ export default {
}
],
labels: {},
hashMarker: null,
references: {}
hashMarker: null
};
},
@@ -475,7 +474,7 @@ export default {
bounds._northEast.lng,
bounds._northEast.lat
].map(i => i.toFixed(bboxScale)).join(",");
const limit = 10000; // Empirical value
const limit = 10000;
const query = new URLSearchParams({bbox, limit});
@@ -521,9 +520,7 @@ export default {
l.layer.lastRequestURL = url;
} else {
console.warn(`Too much data from ${url} (${layer.features.length ?? layer.length}${limit} features)`);
this.showSnack([`Layer ${l.layer.options.userLayerName} is too large: ${layer.features.length ?? layer.length} features; maximum is ${limit}`, "error"]);
console.warn("Too much data from", url);
}
})
.finally( () => {
@@ -683,109 +680,7 @@ export default {
this.labels = labelSet;
},
removeUserLayers () {
map.eachLayer( layer => {
if (layer.options.userLayer === true) {
console.log("Removing", layer);
layer.eachLayer( sublayer => {
const idx = this.layerRefreshConfig.findIndex(i => i.layer == layer);
if (idx != -1) {
this.layerRefreshConfig.splice(idx, 1);
}
});
map.removeLayer(layer);
this.references.layerControl.removeLayer(layer);
}
});
},
async addUserLayers (userLayers) {
const options = {
userLayer: true,
style (feature) {
const style = {
stroke: undefined,
color: "grey",
weight: 2,
opacity: 0.5,
lineCap: undefined,
lineJoin: undefined,
dashArray: undefined,
dashOffset: undefined,
fill: undefined,
fillColor: "lightgrey",
fillOpacity: 0.5,
fillRule: undefined
};
for (let key in style) {
switch (key) {
case "color":
style[key] = feature.properties?.colour ?? feature.properties?.color ?? style[key];
break;
case "fillColor":
style[key] = feature.properties?.fillColour ?? feature.properties?.fillColor ?? style[key];
break;
default:
style[key] = feature.properties?.[key] ?? style[key];
}
if (typeof style[key] === "undefined") {
delete style[key];
}
}
return style;
}
};
const userLayerGroups = {};
userLayers.forEach(layer => {
if (!(layer.name in userLayerGroups)) {
userLayerGroups[layer.name] = [];
}
userLayerGroups[layer.name].push(layer);
});
for (let userLayerName in userLayerGroups) {
const userLayerGroup = userLayerGroups[userLayerName];
const layer = L.featureGroup(null, {userLayer: true, userLayerGroup: true, userLayerName});
userLayerGroup.forEach(l => {
const sublayer = L.geoJSON(null, {...options, userLayerName});
layer.addLayer(sublayer);
sublayer.on('add', ({target}) => {
this.refreshLayers([target])
});
const refreshConfig = {
layer: sublayer,
url: (query = "") => {
return `/files/${l.path}`;
}
};
this.layerRefreshConfig.push(refreshConfig);
});
layer.on('add', ({target}) => {
this.refreshLayers(target.getLayers())
});
this.references.layerControl.addOverlay(layer, `<span title="User layer" style="text-decoration: dotted underline;">${userLayerName}</span>`);
}
},
async fetchUserLayers () {
const url = `/project/${this.$route.params.project}/gis/layer`;
const userLayers = await this.api([url]) || [];
this.removeUserLayers();
this.addUserLayers(userLayers);
},
...mapActions(["api", "showSnack"])
...mapActions(["api"])
},
@@ -864,9 +759,6 @@ export default {
const layerControl = L.control.layers(tileMaps, layers).addTo(map);
const scaleControl = L.control.scale().addTo(map);
this.references.layerControl = layerControl;
this.references.scaleControl = scaleControl;
if (init.position) {
map.setView(init.position.slice(1), init.position[0]);
} else {
@@ -894,13 +786,10 @@ export default {
map.on('layeradd', this.updateURL);
map.on('layerremove', this.updateURL);
this.layerRefreshConfig.forEach( l => {
l.layer.on('add', ({target}) => this.refreshLayers([target]));
});
this.fetchUserLayers();
if (init.position) {
this.refreshLayers();
} else {

View File

@@ -119,7 +119,11 @@
>
<template v-slot:item.srss="{item}">
<v-icon small :title="srssInfo(item)">{{srssIcon(item)}}</v-icon>
<span style="white-space: nowrap;">
<v-icon small :title="srssInfo(item)">{{srssIcon(item)}}</v-icon>
/
<v-icon small :title="wxInfo(item)" v-if="item.meta.wx">{{wxIcon(item)}}</v-icon>
</span>
</template>
<template v-slot:item.sequence="{item, value}">
@@ -422,6 +426,123 @@ export default {
plannerConfig: null,
shiftAll: false, // Shift all sequences checkbox
// Weather API
wxData: null,
weathercode: {
0: {
description: "Clear sky",
icon: "mdi-weather-sunny"
},
1: {
description: "Mainly clear",
icon: "mdi-weather-sunny"
},
2: {
description: "Partly cloudy",
icon: "mdi-weather-partly-cloudy"
},
3: {
description: "Overcast",
icon: "mdi-weather-cloudy"
},
45: {
description: "Fog",
icon: "mde-weather-fog"
},
48: {
description: "Depositing rime fog",
icon: "mdi-weather-fog"
},
51: {
description: "Light drizzle",
icon: "mdi-weather-partly-rainy"
},
53: {
description: "Moderate drizzle",
icon: "mdi-weather-partly-rainy"
},
55: {
description: "Dense drizzle",
icon: "mdi-weather-rainy"
},
56: {
description: "Light freezing drizzle",
icon: "mdi-weather-partly-snowy-rainy"
},
57: {
description: "Freezing drizzle",
icon: "mdi-weather-partly-snowy-rainy"
},
61: {
description: "Light rain",
icon: "mdi-weather-rainy"
},
63: {
description: "Moderate rain",
icon: "mdi-weather-rainy"
},
65: {
description: "Heavy rain",
icon: "mdi-weather-pouring"
},
66: {
description: "Light freezing rain",
icon: "mdi-loading"
},
67: {
description: "Freezing rain",
icon: "mdi-loading"
},
71: {
description: "Light snow",
icon: "mdi-loading"
},
73: {
description: "Moderate snow",
icon: "mdi-loading"
},
75: {
description: "Heavy snow",
icon: "mdi-loading"
},
77: {
description: "Snow grains",
icon: "mdi-loading"
},
80: {
description: "Light rain showers",
icon: "mdi-loading"
},
81: {
description: "Moderate rain showers",
icon: "mdi-loading"
},
82: {
description: "Violent rain showers",
icon: "mdi-loading"
},
85: {
description: "Light snow showers",
icon: "mdi-loading"
},
86: {
description: "Snow showers",
icon: "mdi-loading"
},
95: {
description: "Thunderstorm",
icon: "mdi-loading"
},
96: {
description: "Hailstorm",
icon: "mdi-loading"
},
99: {
description: "Heavy hailstorm",
icon: "mdi-loading"
},
},
// Context menu stuff
contextMenuShow: false,
contextMenuX: 0,
@@ -630,6 +751,113 @@ export default {
return text.join("\n");
},
wxInfo (line) {
function atm(key) {
return line.meta?.wx?.atmospheric?.hourly[key];
}
function mar(key) {
return line.meta?.wx?.marine?.hourly[key];
}
const code = atm("weathercode");
const description = this.weathercode[code]?.description ?? `WMO code ${code}`;
const wind_speed = Math.round(atm("windspeed_10m"));
const wind_direction = String(Math.round(atm("winddirection_10m"))).padStart(3, "0");
const pressure = Math.round(atm("surface_pressure"));
const temperature = Math.round(atm("temperature_2m"));
const humidity = atm("relativehumidity_2m");
const precipitation = atm("precipitation");
const precipitation_probability = atm("precipitation_probability");
const precipitation_str = precipitation_probability
? `\nPrecipitation ${precipitation} mm (prob. ${precipitation_probability}%)`
: ""
const wave_height = mar("wave_height").toFixed(1);
const wave_direction = mar("wave_direction");
const wave_period = mar("wave_period");
return `${description}\n${temperature}° C\n${pressure} hPa\nWind ${wind_speed} kt ${wind_direction}°\nRelative humidity ${humidity}%${precipitation_str}\nWaves ${wave_height} m ${wave_direction}° @ ${wave_period} s`;
},
wxIcon (line) {
const code = line.meta?.wx?.atmospheric?.hourly?.weathercode;
return this.weathercode[code]?.icon ?? "mdi-help";
},
async wxQuery (line) {
function midpoint(line) {
// WARNING Fails if across the antimeridian
const longitude = (line.geometry.coordinates[0][0] + line.geometry.coordinates[1][0])/2;
const latitude = (line.geometry.coordinates[0][1] + line.geometry.coordinates[1][1])/2;
return [ longitude, latitude ];
}
function extract (fcst) {
const τ = (line.ts0.valueOf() + line.ts1.valueOf()) / 2000;
const [idx, ε] = fcst?.hourly?.time?.reduce( (acc, cur, idx) => {
const δ = Math.abs(cur - τ);
const retval = acc
? acc[1] < δ
? acc
: [ idx, δ ]
: [ idx, δ ];
return retval;
});
if (idx) {
const hourly = {};
for (let key in fcst?.hourly) {
fcst.hourly[key] = fcst.hourly[key][idx];
}
}
return fcst;
}
async function fetch_atmospheric (opts) {
const { longitude, latitude, dt0, dt1 } = opts;
const url = `https://api.open-meteo.com/v1/forecast?latitude=${latitude}&longitude=${longitude}&hourly=temperature_2m,relativehumidity_2m,precipitation_probability,precipitation,weathercode,pressure_msl,surface_pressure,windspeed_10m,winddirection_10m&daily=uv_index_max&windspeed_unit=kn&timeformat=unixtime&timezone=GMT&start_date=${dt0}&end_date=${dt1}&format=json`;
const init = {};
const res = await fetch (url, init);
if (res?.ok) {
const data = await res.json();
return extract(data);
}
}
async function fetch_marine (opts) {
const { longitude, latitude, dt0, dt1 } = opts;
const url = `https://marine-api.open-meteo.com/v1/marine?latitude=${latitude}&longitude=${longitude}&hourly=wave_height,wave_direction,wave_period&timeformat=unixtime&timezone=GMT&start_date=${dt0}&end_date=${dt1}&format=json`;
const init = {};
const res = await fetch (url, init);
if (res?.ok) {
const data = await res.json();
return extract(data);
}
}
if (line) {
const [ longitude, latitude ] = midpoint(line);
const dt0 = line.ts0.toISOString().substr(0, 10);
const dt1 = line.ts1.toISOString().substr(0, 10);
return {
atmospheric: await fetch_atmospheric({longitude, latitude, dt0, dt1}),
marine: await fetch_marine({longitude, latitude, dt0, dt1})
};
}
},
lagAfter (item) {
const pos = this.items.indexOf(item)+1;
if (pos != 0) {
@@ -723,6 +951,9 @@ export default {
for (const item of this.items) {
item.ts0 = new Date(item.ts0);
item.ts1 = new Date(item.ts1);
this.wxQuery(item).then( (wx) => {
item.meta = {...item.meta, wx};
});
}
},

View File

@@ -1,25 +1,18 @@
<template>
<v-container fluid fill-height class="ma-0 pa-0">
<v-row no-gutters align="stretch" class="fill-height">
<v-col cols="12" v-if="projectFound">
<v-col cols="12">
<!-- Show component here according to selected route -->
<keep-alive>
<router-view :key="$route.path"></router-view>
</keep-alive>
</v-col>
<v-col cols="12" v-else>
<v-card>
<v-card-text>
Project does not exist.
</v-card-text>
</v-card>
</v-col>
</v-row>
</v-container>
</template>
<script>
import { mapActions, mapGetters } from 'vuex'
import { mapActions } from 'vuex'
export default {
name: 'Project',
@@ -31,24 +24,6 @@ export default {
}
},
computed: {
projectFound () {
return this.loading || this.projectId;
},
...mapGetters(["loading", "projectId", "serverEvent"])
},
watch: {
async serverEvent (event) {
if (event.channel == "project" && event.payload?.operation == "DELETE" && event.payload?.schema == "public") {
// Project potentially deleted
await this.getProject(this.$route.params.project);
}
}
},
methods: {
...mapActions(["getProject"])
},

View File

@@ -83,22 +83,12 @@ export default {
},
computed: {
...mapGetters(['loading', 'serverEvent'])
},
watch: {
async serverEvent (event) {
if (event.channel == "project" && event.payload?.schema == "public") {
if (event.payload?.operation == "DELETE" || event.payload?.operation == "INSERT") {
await this.load();
}
}
}
...mapGetters(['loading'])
},
methods: {
async list () {
this.items = await this.api(["/project"]) || [];
this.items = await this.api(["/project/"]) || [];
},
async summary (item) {

View File

@@ -292,13 +292,9 @@
<v-list-item v-for="(path, index) in item.raw_files"
key="index"
link
title="Download file"
:href="`/api/files${path}`"
title="View the shot log"
>
{{ basename(path) }}
<v-list-item-action>
<v-icon right small>mdi-cloud-download</v-icon>
</v-list-item-action>
</v-list-item>
</v-list-group>
<v-list-group value="true" v-if="item.final_files">
@@ -312,13 +308,10 @@
</template>
<v-list-item v-for="(path, index) in item.final_files"
key="index"
title="Download file"
:href="`/api/files${path}`"
link
title="View the shot log"
>
{{ basename(path) }}
<v-list-item-action>
<v-icon right small>mdi-cloud-download</v-icon>
</v-list-item-action>
</v-list-item>
</v-list-group>
</v-list>

View File

@@ -4,7 +4,6 @@ module.exports = {
"leaflet-arrowheads"
],
devServer: {
host: "0.0.0.0",
proxy: {
"^/api(/|$)": {
target: "http://localhost:3000",

View File

@@ -1,7 +1,6 @@
const http = require('http');
const express = require('express');
express.yaml ??= require('body-parser').yaml; // NOTE: Use own customised body-parser
const cookieParser = require('cookie-parser')
const maybeSendAlert = require("../lib/alerts");
@@ -10,7 +9,7 @@ const mw = require('./middleware');
const { ERROR, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
const verbose = process.env.NODE_ENV != 'test';
const app = express();
app.locals.version = "0.4.0"; // API version
app.locals.version = "0.3.1"; // API version
app.map = function(a, route){
route = route || '';
@@ -32,7 +31,6 @@ app.map = function(a, route){
};
app.use(express.json({type: "application/json", strict: false, limit: '10mb'}));
app.use(express.yaml({type: "application/yaml", limit: '10mb'}));
app.use(express.urlencoded({ type: "application/x-www-form-urlencoded", extended: true }));
app.use(express.text({type: "text/*", limit: '10mb'}));
app.use((req, res, next) => {
@@ -89,19 +87,13 @@ app.use(mw.etag.ifNoneMatch);
// We must be authenticated before we can access these
app.map({
'/project': {
get: [ mw.project.get ], // Get list of projects
post: [ mw.auth.access.admin, mw.project.post ], // Create a new project
get: [ mw.project.list ], // Get list of projects
},
'/project/:project': {
get: [ mw.project.summary.get ], // Get project data
delete: [ mw.auth.access.admin, mw.project.delete ], // Delete a project (only if empty)
get: [ mw.project.get ], // Get project data
},
'/project/:project/summary': {
get: [ mw.project.summary.get ],
},
'/project/:project/configuration': {
get: [ mw.auth.access.write, mw.project.configuration.get ], // Get project configuration
patch: [ mw.auth.access.write, mw.project.configuration.patch ], // Modify project configuration
get: [ mw.project.get ],
},
/*
@@ -123,12 +115,6 @@ app.map({
'/project/:project/gis/final/:featuretype(line|point)': {
get: [ mw.gis.project.final ]
},
'/project/:project/gis/layer': {
get: [ mw.etag.noSave, mw.gis.project.layer.get ]
},
'/project/:project/gis/layer/:name': {
get: [ mw.etag.noSave, mw.gis.project.layer.get ]
},
/*
* Line endpoints
@@ -261,12 +247,6 @@ app.map({
// // post: [ mw.permissions.post ],
// // delete: [ mw.permissions.delete ]
// },
'/project/:project/files/:path(*)': {
get: [ mw.auth.access.write, mw.files.get ]
},
'/files/?:path(*)': {
get: [ mw.auth.access.write, mw.etag.noSave, mw.files.get ]
},
'/navdata/': {
get: [ mw.navdata.get ],
'gis/:featuretype(line|point)': {
@@ -349,14 +329,15 @@ app.disable('x-powered-by');
app.enable('trust proxy');
INFO('trust proxy is ' + (app.get('trust proxy')? 'on' : 'off'));
const addr = "127.0.0.1";
if (!module.parent) {
const port = process.env.HTTP_PORT || 3000;
const host = process.env.HTTP_HOST || "127.0.0.1";
var server = http.createServer(app).listen(port, host);
var port = process.env.HTTP_PORT || 3000;
var server = http.createServer(app).listen(port, addr);
INFO('API started on port ' + port);
} else {
app.start = function (port = 3000, host = "127.0.0.1", path) {
app.start = function (port = 3000, path) {
var root = app;
if (path) {
@@ -365,9 +346,9 @@ if (!module.parent) {
root.use(path, app);
}
const server = http.createServer(root).listen(port, host);
const server = http.createServer(root).listen(port, addr);
if (server) {
console.log(`API started on port ${port}, prefix: ${path || "/"}`);
// console.log(`API started on port ${port}, prefix: ${path || "/"}`);
INFO(`API started on port ${port}, prefix: ${path || "/"}`);
}
return server;

View File

@@ -1,4 +1,4 @@
const {expressjwt: expressJWT} = require('express-jwt');
const expressJWT = require('express-jwt');
const cfg = require("../../../lib/config").jwt;
@@ -15,12 +15,11 @@ const options = {
secret: cfg.secret,
credentialsRequired: false,
algorithms: ['HS256'],
requestProperty: "user",
getToken
};
const allow = {
path: [/\/login$/, /\/logout$/, /\/$/, /\/version$/],
path: [/\/login$/, /\/logout$/],
useOriginalUrl: false
};

View File

@@ -1,29 +0,0 @@
const files = require('../../../lib/files');
module.exports = async function (req, res, next) {
try {
const entity = await files.get(req.params.path, req.params.project, req.query);
if (entity) {
if (entity.download) {
res.download(...entity.download, (err) => next(err));
} else {
// Directory listing
res.status(203).json(entity);
next();
}
} else {
throw {
status: 404,
code: "ENOENT"
};
}
} catch (err) {
if (err.code == 'ENOENT') {
res.status(404).json({message: err.code});
} else {
next(err);
}
}
};

View File

@@ -1,7 +0,0 @@
module.exports = {
get: require('./get'),
post: require('./post'),
put: require('./put'),
delete: require('./delete')
}

View File

@@ -2,6 +2,5 @@ module.exports = {
bbox: require('./bbox'),
preplot: require('./preplot'),
raw: require('./raw'),
final: require('./final'),
layer: require('./layer')
final: require('./final')
};

View File

@@ -1,18 +0,0 @@
const { gis } = require('../../../../../lib/db');
module.exports = async function (req, res, next) {
try {
const layers = await gis.project.layer.get(req.params.project, req.params.name);
if (req.params.name && (!layers || !layers.length)) {
res.status(404).json({message: "Not found"});
} else {
res.status(200).send(layers ?? []);
}
next();
} catch (err) {
next(err);
}
};

View File

@@ -1,3 +0,0 @@
module.exports = {
get: require('./get')
};

View File

@@ -1,6 +1,5 @@
module.exports = {
event: require('./event'),
files: require('./files'),
plan: require('./plan'),
line: require('./line'),
project: require('./project'),

View File

@@ -1,11 +1,10 @@
const { project } = require('../../../lib/db');
const { label } = require('../../../lib/db');
module.exports = async function (req, res, next) {
try {
const labels = (await project.configuration.get(req.params.project))?.labels ?? {};
res.status(200).send(labels);
res.status(200).send(await label.list(req.params.project, req.query));
next();
} catch (err) {
next(err);

View File

@@ -1,13 +0,0 @@
const { project } = require('../../../../lib/db');
module.exports = async function (req, res, next) {
try {
res.status(200).send(await project.configuration.get(req.params.project));
next();
} catch (err) {
next(err);
}
};

View File

@@ -1,8 +0,0 @@
module.exports = {
get: require('./get'),
// post: require('./post'),
// put: require('./put'),
patch: require('./patch'),
// delete: require('./delete'),
};

View File

@@ -1,16 +0,0 @@
const { project } = require('../../../../lib/db');
module.exports = async function (req, res, next) {
try {
// TODO
// Implement If-Match header requirements
res.send(await project.configuration.patch(req.params.project, req.body));
next();
} catch (err) {
next(err);
}
};

View File

@@ -1,15 +0,0 @@
const { project } = require('../../../lib/db');
module.exports = async function (req, res, next) {
try {
await project.delete(req.params.project)
res.status(204).send();
next();
} catch (err) {
next(err);
}
};

View File

@@ -4,11 +4,10 @@ const { project} = require('../../../lib/db');
module.exports = async function (req, res, next) {
try {
res.status(200).send(await project.get());
res.status(200).send(await project.get(req.params.project));
next();
} catch (err) {
next(err);
}
};

View File

@@ -1,7 +1,4 @@
module.exports = {
get: require('./get'),
post: require('./post'),
delete: require('./delete'),
summary: require('./summary'),
configuration: require('./configuration'),
list: require('./list'),
get: require('./get')
};

View File

@@ -0,0 +1,14 @@
const { project} = require('../../../lib/db');
module.exports = async function (req, res, next) {
try {
res.status(200).send(await project.list());
next();
} catch (err) {
next(err);
}
};

View File

@@ -1,16 +0,0 @@
const { project } = require('../../../lib/db');
module.exports = async function (req, res, next) {
try {
const payload = req.body;
const projectDefinition = await project.post(payload);
res.status(201).send(projectDefinition);
next();
} catch (err) {
next(err);
}
};

View File

@@ -1,13 +0,0 @@
const { project } = require('../../../../lib/db');
module.exports = async function (req, res, next) {
try {
res.status(200).send(await project.summary.get(req.params.project));
next();
} catch (err) {
next(err);
}
};

View File

@@ -1,3 +0,0 @@
module.exports = {
get: require('./get'),
};

View File

@@ -13,10 +13,7 @@ async function main () {
const { fork } = require('child_process');
const port = process.env.HTTP_PORT || 3000;
const host = process.env.HTTP_HOST || "127.0.0.1";
const path = process.env.HTTP_PATH ?? "/api";
const server = api.start(port, host, path);
const server = api.start(process.env.HTTP_PORT || 3000, process.env.HTTP_PATH);
ws.start(server);
const eventManagerPath = [__dirname, "events"].join("/");

View File

@@ -1,8 +1,3 @@
/**
* This refers to the Dougal instance configuration, taken
* from $DOUGAL_ROOT/etc/config.yaml
*/
const fs = require('fs');
const path = require('path');
const crypto = require('crypto');

View File

@@ -1,9 +1,17 @@
const { setSurvey, pool } = require('../connection');
const { setSurvey } = require('../connection');
async function get (projectId, path, opts = {}) {
const client = await setSurvey(projectId);
const text = `SELECT meta FROM public.projects WHERE pid = $1;`;
const res = await pool.query(text, [projectId]);
const text = `
SELECT data
FROM file_data fd
INNER JOIN files f USING (hash)
WHERE f.path LIKE '%.yaml';
`;
const res = await client.query(text);
client.release();
const config = res.rows.length == 1
? res.rows[0].data

View File

@@ -55,15 +55,6 @@ async function schema2pid (schema, client) {
return res.rows[0] && res.rows[0].pid;;
}
async function pid2schema (pid, client) {
if (!client) {
client = await pool.connect();
}
const res = await client.query("SELECT schema FROM projects WHERE pid = $1", [pid]);
client.release();
return res.rows[0] && res.rows[0].schema;
}
/** Fetch one row from a database cursor.
*
* @a cursor A query cursor
@@ -91,6 +82,5 @@ module.exports = {
transaction,
setSurvey,
schema2pid,
pid2schema,
fetchRow
};

View File

@@ -3,6 +3,5 @@ module.exports = {
bbox: require('./bbox'),
preplot: require('./preplot'),
raw: require('./raw'),
final: require('./final'),
layer: require('./layer')
final: require('./final')
};

View File

@@ -1,31 +0,0 @@
const { setSurvey } = require('../../../connection');
async function get (projectId, layerName = null, options = {}) {
const client = await setSurvey(projectId);
const text = `
SELECT path, (data - 'type') data
FROM files f
INNER JOIN file_data
USING (hash)
WHERE data->>'type' = 'map_layer'
AND data->>'format' = 'geojson'
AND (data->>'name' = $1
OR $1 IS NULL);
`;
const values = [ layerName ];
const res = await client.query(text, values);
client.release();
if (res.rows && res.rows.length) {
return res.rows.map(row => ({...row.data, path: row.path}));
} else {
throw {status: 404, message: "Not found"};
}
}
module.exports = get;

View File

@@ -1,4 +0,0 @@
module.exports = {
get: require('./get')
};

View File

@@ -1,25 +0,0 @@
const { setSurvey } = require('../../connection');
async function get (projectId, opts = {}) {
try {
const client = await setSurvey(); // Use public schema
const text = `
SELECT meta
FROM projects
WHERE pid = $1;
`;
const res = await client.query(text, [projectId]);
client.release();
return res.rows[0].meta;
} catch (err) {
if (err.code == "42P01") {
throw { status: 404, message: "Not found" };
} else {
throw err;
}
}
}
module.exports = get;

View File

@@ -1,8 +0,0 @@
module.exports = {
get: require('./get'),
// post: require('./post'),
// put: require('./put'),
patch: require('./patch'),
// delete: require('./delete'),
};

View File

@@ -1,54 +0,0 @@
const { setSurvey } = require('../../connection');
const { deepMerge, removeNulls } = require('../../../utils');
const { modify } = require('../create');
async function patch (projectId, payload, opts = {}) {
let client;
try {
client = await setSurvey(); // Use public schema
const text = `
SELECT meta
FROM projects
WHERE pid = $1;
`;
const res = await client.query(text, [projectId]);
const source = res.rows[0].meta;
if (!source) {
throw { status: 404, message: "Not found" };
}
if (("id" in payload) && (projectId != payload.id)) {
throw {
status: 422,
message: "Project ID cannot be changed in this Dougal version"
}
}
if (("name" in payload) && (source.name != payload.name)) {
throw {
status: 422,
message: "Project name cannot be changed in this Dougal version"
}
}
const dest = removeNulls(deepMerge(source, payload));
await modify(projectId, dest);
return dest;
} catch (err) {
if (err.code == "42P01") {
throw { status: 404, message: "Not found" };
} else {
throw err;
}
} finally {
client.release();
}
}
module.exports = patch;

View File

@@ -1,193 +0,0 @@
const path = require('path');
const fs = require('fs').promises;
const cfg = require('DOUGAL_ROOT/lib/config');
const { setSurvey, pool } = require('../connection');
const get = require('./get');
const { INFO, DEBUG, WARNING, ERROR } = require('DOUGAL_ROOT/debug')(__filename);
function checkSyntax (value, type = "project") {
switch (type) {
case "project":
var requiredFields = {
id: "string",
name: "string",
epsg: "number",
binning: function (value) { return checkSyntax (value, "binning"); }
};
break;
case "binning":
var requiredFields = {
theta: "number",
I_inc: "number",
J_inc: "number",
I_width: "number",
J_width: "number",
origin: function (value) { return checkSyntax (value, "origin"); }
}
break
case "origin":
var requiredFields = {
easting: "number",
northing: "number",
I: "number",
J: "number"
}
break;
default:
return typeof type == "function"
? type(value)
: typeof value == type;
}
// return Object.entries(requiredFields).every( ([field, test]) => {
// return value.hasOwnProperty(field) && checkSyntax(value[field], test);
// });
for (const [field, test] of Object.entries(requiredFields)) {
if (!value.hasOwnProperty(field)) {
return `Missing required property: ${field}`;
}
const res = checkSyntax(value[field], test);
if (res !== true) {
return res === false ? `Syntax error on "${field}"` : res;
}
}
return true;
}
async function applySchemaTemplate (projectDefinition) {
const templatePath = path.resolve(cfg.DOUGAL_ROOT, "etc/db/schema-template.sql");
const text = await fs.readFile(templatePath, "utf-8");
return text.replace(/_SURVEY__TEMPLATE_/g, projectDefinition.schema).replace(/_EPSG__CODE_/g, projectDefinition.epsg);
}
async function surveyIds () {
const res = await pool.query("SELECT schema FROM public.projects;");
if (res.rows?.length) {
return res.rows.map( s => Number(s.schema.replace(/^survey_/, "")) ).sort((a, b) => a-b);
} else {
return []
}
}
async function nextSurveyId () {
const ids = await surveyIds();
if (ids.length) {
return ids.pop() + 1;
} else {
return 1;
}
}
async function idExists (id) {
const surveys = await get();
return surveys.includes(s => s.pid.toLowerCase() == id.toLowerCase());
}
async function createProjectSchema (projectDefinition) {
const sql = await applySchemaTemplate(projectDefinition);
const client = await pool.connect();
try {
await client.query(sql);
} catch (err) {
console.error(err);
} finally {
client.release(true);
}
}
async function dropProjectSchema (projectDefinition) {
const sql = `
DROP SCHEMA ${projectDefinition.schema} CASCADE;
`;
try {
return await pool.query(sql);
} catch (err) {
console.error("dropProjectSchema", err);
}
}
async function addProjectToList (projectDefinition) {
const sql = `
INSERT INTO public.projects (pid, name, schema, meta) VALUES (LOWER($1), $2, $3, $4);
`;
const values = [ projectDefinition.id, projectDefinition.name, projectDefinition.schema, projectDefinition ];
try {
return await pool.query(sql, values);
} catch (err) {
if (err.code == "23505") {
if (err.constraint == "projects_name_key") {
throw { message: "A project with this name already exists" }
}
} else {
throw err;
}
}
}
async function updateProject (projectId, projectDefinition) {
const sql = `
UPDATE public.projects
SET
name = $2,
meta = $3
WHERE pid = $1;
`;
const values = [ projectId, projectDefinition.name, projectDefinition ];
try {
return await pool.query(sql, values);
} catch (err) {
throw err;
}
}
async function create (projectDefinition) {
const syntaxOk = checkSyntax(projectDefinition);
if (syntaxOk !== true) {
throw { status: 400, message: syntaxOk };
} else if (await idExists(projectDefinition.id)) {
throw { status: 409 }
} else {
try {
const survey_id = await nextSurveyId();
projectDefinition.schema = `survey_${survey_id}`;
projectDefinition.archived = projectDefinition.archived ?? false;
await createProjectSchema(projectDefinition)
await addProjectToList(projectDefinition);
} catch (err) {
DEBUG(err);
await dropProjectSchema(projectDefinition);
throw { status: 500, message: err.message ?? "Failed to create database for new project", detail: err.detail }
}
}
}
async function modify (projectId, projectDefinition) {
const syntaxOk = checkSyntax(projectDefinition);
if (syntaxOk !== true) {
throw { status: 400, message: syntaxOk };
} else {
try {
const res = await updateProject(projectId, projectDefinition);
} catch (err) {
DEBUG(err);
throw { status: 500, message: err.message ?? "Failed to update project definition", detail: err.detail }
}
}
}
module.exports = {
checkSyntax,
create,
modify
};

View File

@@ -1,56 +0,0 @@
const { setSurvey, pid2schema, pool } = require('../connection');
const event = require('../event');
const getSummary = require('./summary').get;
// Returns true if the project has no
// preplots, sequences or log entries,
// or if the project ID is not found
// in the database.
async function isDeletable (projectId) {
let summary;
try {
summary = await getSummary(projectId);
} catch (err) {
if (err.code == "42P01") {
// Project does not exist
return true;
} else {
throw err;
}
}
if (!summary) {
// projectId does not exist in the database
return true;
}
if (summary.total == 0 && summary.seq_raw == 0 && summary.seq_final == 0 && !summary.prod_duration) {
// Check for existing events (excluding deleted)
const events = await event.list(projectId, {limit: 1});
return events.length == 0;
}
return false;
};
async function del (projectId, opts = {}) {
if (await isDeletable(projectId)) {
const schema = await pid2schema(projectId);
if (schema) {
// NOTE: Should be reasonably safe as `schema` is not
// under user control.
const sql = `
DROP SCHEMA ${schema} CASCADE;
DELETE FROM public.projects WHERE schema = '${schema}';
`;
console.log(sql);
await pool.query(sql);
}
// We don't care if schema does not exist
} else {
throw { status: 405, message: "Project is not empty" }
}
}
module.exports = del;

View File

@@ -1,8 +1,16 @@
const { setSurvey, pool } = require('../connection');
const { setSurvey } = require('../connection');
async function get () {
const res = await pool.query("SELECT pid, name, schema FROM public.projects;");
return res.rows;
async function get (projectId, opts = {}) {
const client = await setSurvey(projectId);
const text = `
SELECT *
FROM project_summary;
`;
const res = await client.query(text);
client.release();
return res.rows[0];
}
module.exports = get;

View File

@@ -1,9 +1,8 @@
module.exports = {
list: require('./list'),
get: require('./get'),
post: require('./post'),
put: require('./put'),
delete: require('./delete'),
summary: require('./summary'),
configuration: require('./configuration'),
delete: require('./delete')
}

View File

@@ -0,0 +1,8 @@
const { setSurvey, pool } = require('../connection');
async function list () {
const res = await pool.query("SELECT * FROM public.projects;");
return res.rows;
}
module.exports = list;

View File

@@ -1,29 +0,0 @@
const { setSurvey, pool } = require('../connection');
const { create } = require('./create');
/*
* Creating a new project consists of these steps:
*
* - Check that the payload is well formed and includes all required items.
* If not, return 400.
* - Check if the id already exists. If it does, return 409.
* - Figure out what the next schema name is going to be (survey_XXX).
* - Read the SQL template from $DOUGAL_ROOT/etc/db/schema-template.sql and
* replace the `_SURVEY__TEMPLATE_` and `_EPSG__CODE_` placeholders with
* appropriate values.
* - Apply the resulting SQL.
* - Add the appropriate entry into public.projects
* - Add the survey definition details (the request's payload) into the
* database (or create a YAML file under $DOUGAL_ROOT/etc/surveys?)
* - Return a 201 with the survey definition as the payload.
*/
async function post (payload) {
try {
return await create(payload);
} catch (err) {
throw err;
}
}
module.exports = post;

View File

@@ -1,24 +0,0 @@
const { setSurvey } = require('../../connection');
async function get (projectId, opts = {}) {
try {
const client = await setSurvey(projectId);
const text = `
SELECT *
FROM project_summary;
`;
const res = await client.query(text);
client.release();
return res.rows[0];
} catch (err) {
if (err.code == "42P01") {
throw { status: 404, message: "Not found" };
} else {
throw err;
}
}
}
module.exports = get;

View File

@@ -1,3 +0,0 @@
module.exports = {
get: require('./get'),
};

View File

@@ -1,125 +0,0 @@
const fs = require('fs/promises');
const Path = require('path');
const mime = require('./mime-types');
const { translatePath, logicalRoot } = require('./logical');
const systemCfg = require('../config');
const projectCfg = require('../db/configuration');
async function directoryListing (fullPath, root) {
const contents = await fs.readdir(fullPath, {withFileTypes: true});
const listing = [];
for (const entry of contents) {
const resolved = Path.resolve(fullPath, entry.name);
const relative = resolved.substring(fullPath.length).replace(/^\/+/, "");
const logical = Path.join(root, relative);
const stat = await fs.stat(resolved);
const mimetype = entry.isDirectory()
? "inode/directory"
: (mime.contentType(entry.name) || "application/octet-stream");
listing.push({
path: logical,
basename: Path.basename(relative),
"Content-Type": mimetype,
size: stat.size,
atime: stat.atime,
mtime: stat.mtime,
ctime: stat.ctime,
birthtime: stat.birthtime
});
}
return listing;
}
async function virtualDirectoryListing (logicalPaths) {
const listing = [];
for (const logical of logicalPaths) {
const fullPath = translatePath(logical);
const resolved = Path.resolve("/", logical);
const stat = await fs.stat(fullPath);
const mimetype = stat.isDirectory()
? "inode/directory"
: (mime.contentType(fullPath) || "application/octet-stream");
listing.push({
path: resolved,
basename: Path.basename(logical),
"Content-Type": mimetype,
size: stat.size,
atime: stat.atime,
mtime: stat.mtime,
ctime: stat.ctime,
birthtime: stat.birthtime
});
}
return listing;
}
async function projectRelativeGet (path, query) {
console.log("Not implemented yet");
throw {status: 404, message: "ENOENT"};
}
async function systemRelativeGet (path, query) {
try {
if (!path) {
return await systemRelativeGet("/", query);
} else if (Path.resolve(path) == "/") {
return await virtualDirectoryListing(logicalRoot())
} else {
const physicalPath = translatePath(path);
const stats = await fs.stat(physicalPath);
if (stats.isDirectory()) {
// Return directory listing, with types.
return await directoryListing(physicalPath, "/"+path.replace(/^\/+/, ""));
} else if (stats.isFile()) {
// Returns a list of arguments suitable for ExpressJS res.download
const headers = {
"Content-Type": mime.lookup(physicalPath) || "application/octet-stream"
};
return {
download: [ physicalPath, Path.basename(path), { headers } ]
};
} else {
throw {status: 403, message: "ENOACCESS"};
}
}
} catch (err) {
console.error(err);
throw err;
}
}
module.exports = async function get (path, projectId, query) {
if (projectId) {
return await projectRelativeGet(path, query);
} else {
return await systemRelativeGet(path, query);
}
};
/*
module.exports = async function get (path, projectId, query) {
const root = projectId
? Path.resolve(systemCfg.global.files.root, await projectCfg.get(projectId, "rootPath"))
: systemCfg.global.files.root;
const fullPath = Path.resolve(root, path);
// Check if there is an attempt to break out of root path
if (Path.relative(root, fullPath).includes("..")) {
// Throw something resolving to a 404
throw {status: 404, message: "ENOENT"};
} else {
const stats = await fs.stat(fullPath);
if (stats.isDirectory()) {
// Return directory listing, with types.
return await directoryListing(fullPath, root);
} else if (stats.isFile()) {
// Returns a list of arguments suitable for ExpressJS res.download
return {
download: [ fullPath, Path.basename(path) ]
};
} else {
throw {status: 403, message: "ENOACCESS"};
}
}
}*/

View File

@@ -1,7 +0,0 @@
module.exports = {
get: require('./get'),
post: require('./post'),
put: require('./put'),
delete: require('./delete')
}

View File

@@ -1,71 +0,0 @@
const Path = require('path');
const cfg = require('../config');
function translatePath (file) {
const root = Path.resolve(cfg.DOUGAL_ROOT);
const importPaths = cfg.global?.imports?.paths;
function validate (physicalPath, prefix) {
if (physicalPath.startsWith(prefix)) {
return physicalPath;
} else {
// An attempt to break out of the logical path?
throw {
status: 404,
message: "Not found"
};
}
}
if (Path.isAbsolute(file)) {
if (typeof importPaths === "string") {
// Substitute the root for the real physical path
// NOTE: `root` deals with import_paths not being absolute
const prefix = Path.resolve(Path.join(root, importPaths));
const suffix = Path.resolve(file).replace(/^\/+/, "");
const physicalPath = Path.resolve(Path.join(prefix, suffix));
return validate(physicalPath, prefix);
} else if (typeof importPaths === "object") {
const parts = Path.resolve(file).split("/").slice(1);
if (parts[0] in importPaths) {
const prefix = Path.join("/", importPaths[parts[0]])
const suffix = parts.slice(1).join("/");
const physicalPath = Path.resolve(Path.join(prefix, suffix));
return validate(physicalPath, prefix);
} else {
return validate(file, null); // Throws 404
}
} else {
// Most likely importPaths is undefined
return validate(file, null); // Throws 404
}
} else {
// A relative filepath is always resolved relative to the logical root
return translatePath(Path.resolve(Path.join("/", file)));
}
}
function untranslatePath (file) {
}
function logicalRoot () {
const root = Path.resolve(cfg.DOUGAL_ROOT);
const importPaths = cfg.global?.imports?.paths;
if (typeof importPaths === "string") {
return [ "/" ];
} else if (typeof importPaths === "object") {
return Object.keys(importPaths);
} else {
// Most likely importPaths is undefined
return [];
}
}
module.exports = {
translatePath,
untranslatePath,
logicalRoot
};

View File

@@ -1,22 +0,0 @@
const mime = require('mime-types');
const extraTypes = {
"text/plain": [
"sps", "SPS",
"p1", "P1",
"p190", "P190",
"p111", "P111",
"p2", "P2",
"p294", "P294",
"p211", "P211",
"hdr", "HDR"
]
};
for (let [mimeType, extensions] of Object.entries(extraTypes)) {
for (let extension of extensions) {
mime.types[extension] = mimeType;
}
}
module.exports = mime;

View File

@@ -98,7 +98,7 @@ function parse (buffer) {
(buf, ctx) => {
ctx.speed = Number(buf.subarray(s+80, s+80+6).toString('ascii'));
if (isNaN(ctx.speed)) {
throw new NavHeaderError("Unparseable speed value: "+buf.subarray(s+80, s+80+6).toString('ascii'), buf);
throw NavHeaderError("Unparseable speed value: "+buf.subarray(s+80, s+80+6).toString('ascii'), buf);
}
const units = buf.subarray(s+86, s+86+2).toString('ascii');
switch (units.toUpperCase()) {

View File

@@ -56,14 +56,14 @@ async function getProjectQCConfig (projectId) {
async function main () {
// Fetch list of projects
console.log("GET PROJECTS");
const projects = await project.get();
const projects = await project.list();
console.log("PROJECTS", projects);
for (const proj of projects) {
const projectId = proj.pid;
console.log("PROJECT ID", projectId);
if (!proj.archived) {
if (!project.archived) {
const QCTstamp = new Date();
const currentQCHash = await projectHash(projectId);

View File

@@ -1,67 +0,0 @@
// Copied from:
// https://gomakethings.com/how-to-deep-merge-arrays-and-objects-with-javascript/
/*!
* Deep merge two or more objects or arrays.
* (c) 2023 Chris Ferdinandi, MIT License, https://gomakethings.com
* @param {*} ...objs The arrays or objects to merge
* @returns {*} The merged arrays or objects
*/
function deepMerge (...objs) {
/**
* Get the object type
* @param {*} obj The object
* @return {String} The object type
*/
function getType (obj) {
return Object.prototype.toString.call(obj).slice(8, -1).toLowerCase();
}
/**
* Deep merge two objects
* @return {Object}
*/
function mergeObj (clone, obj) {
for (let [key, value] of Object.entries(obj)) {
let type = getType(value);
if (clone[key] !== undefined && getType(clone[key]) === type && ['array', 'object'].includes(type)) {
clone[key] = deepMerge(clone[key], value);
} else {
clone[key] = structuredClone(value);
}
}
}
// Create a clone of the first item in the objs array
let clone = structuredClone(objs.shift());
// Loop through each item
for (let obj of objs) {
// Get the object type
let type = getType(obj);
// If the current item isn't the same type as the clone, replace it
if (getType(clone) !== type) {
clone = structuredClone(obj);
continue;
}
// Otherwise, merge
if (type === 'array') {
// Replace old array with new
clone = [...structuredClone(obj)];
} else if (type === 'object') {
mergeObj(clone, obj);
} else {
clone = obj;
}
}
return clone;
}
module.exports = deepMerge;

View File

@@ -3,7 +3,5 @@ module.exports = {
geometryAsString: require('./geometryAsString'),
dms: require('./dms'),
replaceMarkers: require('./replaceMarkers'),
flattenQCDefinitions: require('./flattenQCDefinitions'),
deepMerge: require('./deepMerge'),
removeNulls: require('./removeNulls')
flattenQCDefinitions: require('./flattenQCDefinitions')
};

View File

@@ -1,23 +0,0 @@
/**
* Delete keys whose value is null.
*
*/
function removeNulls (obj) {
function getType (obj) {
return Object.prototype.toString.call(obj).slice(8, -1).toLowerCase();
}
for (let [key, value] of Object.entries(obj)) {
if (value === null) {
delete obj[key];
} else if (getType(value) == "object") {
removeNulls(value);
}
}
return obj;
}
module.exports = removeNulls;

File diff suppressed because it is too large Load Diff

View File

@@ -5,29 +5,28 @@
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"postinstall": "test -e node_modules/DOUGAL_ROOT || ln -s .. node_modules/DOUGAL_ROOT; test -e spec && test -e spec/openapi.yaml && node_modules/.bin/redoc-cli bundle ./spec/openapi.yaml -o ./spec/openapi.html || echo API documentation not present or failed to compile >/dev/stderr"
"postinstall": "[[ -e ./node_modules/DOUGAL_ROOT ]] || ln -s .. ./node_modules/DOUGAL_ROOT; redoc-cli bundle ./spec/openapi.yaml -o ./spec/openapi.html"
},
"author": "Aaltronav s.r.o.",
"license": "UNLICENSED",
"private": true,
"config": {
"db_schema": "^0.3.11",
"api": "^0.4.0"
"api": "^0.3.0"
},
"engines": {
"node": ">=18.0.0"
"node": ">=14.0.0"
},
"os": [
"linux"
],
"dependencies": {
"body-parser": "gitlab:aaltronav/contrib/expressjs/body-parser",
"cookie-parser": "^1.4.5",
"debug": "^4.3.4",
"express": "^4.17.1",
"express-jwt": "^8.4.1",
"express-jwt": "^6.0.0",
"json2csv": "^5.0.6",
"jsonwebtoken": "^9.0.2",
"jsonwebtoken": "^8.5.1",
"leaflet-headless": "git+https://git@gitlab.com/aaltronav/contrib/leaflet-headless.git#devel",
"marked": "^4.0.12",
"netmask": "^2.0.2",