diff --git a/bin/configuration.py b/bin/configuration.py
index 17cf0f7..481f0b0 100644
--- a/bin/configuration.py
+++ b/bin/configuration.py
@@ -1,4 +1,5 @@
import os
+import pathlib
from glob import glob
from yaml import full_load as _load
@@ -54,6 +55,10 @@ def files (globspec = None, include_archived = False):
quickly and temporarily “disabling” a survey configuration by renaming
the relevant file.
"""
+
+ print("This method is obsolete")
+ return
+
tuples = []
if globspec is None:
@@ -87,3 +92,73 @@ def rxflags (flagstr):
for flag in flagstr:
flags |= cases.get(flag, 0)
return flags
+
+def translate_path (file):
+ """
+ Translate a path from a Dougal import directory to an actual
+ physical path on disk.
+
+ Any user files accessible by Dougal must be under a path prefixed
+ by `(config.yaml).imports.paths`. The value of `imports.paths` may
+ be either a string, in which case this represents the prefix under
+ which all Dougal data resides, or a dictionary where the keys are
+ logical paths and their values the corresponding physical path.
+ """
+ cfg = read()
+ root = pathlib.Path(DOUGAL_ROOT)
+ filepath = pathlib.Path(file).resolve()
+ import_paths = cfg["imports"]["paths"]
+
+ if filepath.is_absolute():
+ if type(import_paths) == str:
+ # Substitute the root for the real physical path
+ # NOTE: `root` deals with import_paths not being absolute
+ prefix = root.joinpath(pathlib.Path(import_paths)).resolve()
+ return str(pathlib.Path(prefix).joinpath(*filepath.parts[2:]))
+ else:
+ # Look for a match on the second path element
+ if filepath.parts[1] in import_paths:
+ # NOTE: `root` deals with import_paths[…] not being absolute
+ prefix = root.joinpath(import_paths[filepath.parts[1]])
+ return str(pathlib.Path(prefix).joinpath(*filepath.parts[2:]))
+ else:
+ # This path is invalid
+ raise TypeError("invalid path or file: {0!r}".format(filepath))
+ else:
+ # A relative filepath is always resolved relative to the logical root
+ root = pathlib.Path("/")
+ return translate_path(root.joinpath(filepath))
+
+def untranslate_path (file):
+ """
+ Attempt to convert a physical path into a logical one.
+ See `translate_path()` above for details.
+ """
+ cfg = read()
+ dougal_root = pathlib.Path(DOUGAL_ROOT)
+ filepath = pathlib.Path(file).resolve()
+ import_paths = cfg["imports"]["paths"]
+ physical_root = pathlib.Path("/")
+
+ if filepath.is_absolute():
+ if type(import_paths) == str:
+ if filepath.is_relative_to(import_paths):
+ physical_root = pathlib.Path("/")
+ physical_prefix = pathlib.Path(import_paths)
+ return str(root.joinpath(filepath.relative_to(physical_prefix)))
+ else:
+ raise TypeError("invalid path or file: {0!r}".format(filepath))
+ else:
+ for key, value in import_paths.items():
+ value = dougal_root.joinpath(value)
+ physical_prefix = pathlib.Path(value)
+ if filepath.is_relative_to(physical_prefix):
+ logical_prefix = physical_root.joinpath(pathlib.Path(key)).resolve()
+ return str(logical_prefix.joinpath(filepath.relative_to(physical_prefix)))
+
+ # If we got here with no matches, this is not a valid
+ # Dougal data path
+ raise TypeError("invalid path or file: {0!r}".format(filepath))
+ else:
+ # A relative filepath is always resolved relative to DOUGAL_ROOT
+ return untranslate_path(root.joinpath(filepath))
diff --git a/bin/datastore.py b/bin/datastore.py
index f5b1646..e6441d0 100644
--- a/bin/datastore.py
+++ b/bin/datastore.py
@@ -52,7 +52,7 @@ class Datastore:
self.conn = psycopg2.connect(configuration.read()["db"]["connection_string"], **opts)
- def set_autocommit(value = True):
+ def set_autocommit(self, value = True):
"""
Enable or disable autocommit.
@@ -95,7 +95,7 @@ class Datastore:
cursor.execute(qry, (filepath,))
results = cursor.fetchall()
if len(results):
- return (filepath, file_hash(filepath)) in results
+ return (filepath, file_hash(configuration.translate_path(filepath))) in results
def add_file(self, path, cursor = None):
@@ -107,7 +107,8 @@ class Datastore:
else:
cur = cursor
- hash = file_hash(path)
+ realpath = configuration.translate_path(path)
+ hash = file_hash(realpath)
qry = "CALL add_file(%s, %s);"
cur.execute(qry, (path, hash))
if cursor is None:
@@ -176,7 +177,7 @@ class Datastore:
else:
cur = cursor
- hash = file_hash(path)
+ hash = file_hash(configuration.translate_path(path))
qry = """
UPDATE raw_lines rl
SET ntbp = %s
@@ -589,6 +590,36 @@ class Datastore:
# we assume that we are in the middle of a transaction
+
+ def surveys (self, include_archived = False):
+ """
+ Return list of survey definitions.
+ """
+
+ if self.conn is None:
+ self.connect()
+
+ if include_archived:
+ qry = """
+ SELECT meta
+ FROM public.projects;
+ """
+ else:
+ qry = """
+ SELECT meta
+ FROM public.projects
+ WHERE NOT (meta->'archived')::boolean IS true
+ """
+
+ with self.conn:
+ with self.conn.cursor() as cursor:
+
+ cursor.execute(qry)
+ results = cursor.fetchall()
+ return [r[0] for r in results if r[0]]
+
+
+ # TODO Does this need tweaking on account of #246?
def apply_survey_configuration(self, cursor = None):
if cursor is None:
cur = self.conn.cursor()
diff --git a/bin/import_final_p111.py b/bin/import_final_p111.py
index 4056924..bc0c146 100755
--- a/bin/import_final_p111.py
+++ b/bin/import_final_p111.py
@@ -51,12 +51,11 @@ def del_pending_remark(db, sequence):
if __name__ == '__main__':
print("Reading configuration")
- surveys = configuration.surveys()
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
print("Connecting to database")
db = Datastore()
- db.connect()
+ surveys = db.surveys()
print("Reading surveys")
for survey in surveys:
@@ -77,29 +76,31 @@ if __name__ == '__main__':
pendingRx = re.compile(survey["final"]["pending"]["pattern"]["regex"])
for fileprefix in final_p111["paths"]:
- print(f"Path prefix: {fileprefix}")
+ realprefix = configuration.translate_path(fileprefix)
+ print(f"Path prefix: {fileprefix} → {realprefix}")
for globspec in final_p111["globs"]:
- for filepath in pathlib.Path(fileprefix).glob(globspec):
- filepath = str(filepath)
- print(f"Found {filepath}")
+ for physical_filepath in pathlib.Path(realprefix).glob(globspec):
+ physical_filepath = str(physical_filepath)
+ logical_filepath = configuration.untranslate_path(physical_filepath)
+ print(f"Found {logical_filepath}")
pending = False
if pendingRx:
- pending = pendingRx.search(filepath) is not None
+ pending = pendingRx.search(physical_filepath) is not None
- if not db.file_in_db(filepath):
+ if not db.file_in_db(logical_filepath):
- age = time.time() - os.path.getmtime(filepath)
+ age = time.time() - os.path.getmtime(physical_filepath)
if age < file_min_age:
- print("Skipping file because too new", filepath)
+ print("Skipping file because too new", logical_filepath)
continue
print("Importing")
- match = rx.match(os.path.basename(filepath))
+ match = rx.match(os.path.basename(logical_filepath))
if not match:
- error_message = f"File path not match the expected format! ({filepath} ~ {pattern['regex']})"
+ error_message = f"File path not match the expected format! ({logical_filepath} ~ {pattern['regex']})"
print(error_message, file=sys.stderr)
print("This file will be ignored!")
continue
@@ -108,21 +109,21 @@ if __name__ == '__main__':
file_info["meta"] = {}
if pending:
- print("Skipping / removing final file because marked as PENDING", filepath)
+ print("Skipping / removing final file because marked as PENDING", logical_filepath)
db.del_sequence_final(file_info["sequence"])
add_pending_remark(db, file_info["sequence"])
continue
else:
del_pending_remark(db, file_info["sequence"])
- p111_data = p111.from_file(filepath)
+ p111_data = p111.from_file(physical_filepath)
print("Saving")
p111_records = p111.p111_type("S", p111_data)
file_info["meta"]["lineName"] = p111.line_name(p111_data)
- db.save_final_p111(p111_records, file_info, filepath, survey["epsg"])
+ db.save_final_p111(p111_records, file_info, logical_filepath, survey["epsg"])
else:
print("Already in DB")
if pending:
diff --git a/bin/import_preplots.py b/bin/import_preplots.py
index 8c89cee..10c5c1b 100755
--- a/bin/import_preplots.py
+++ b/bin/import_preplots.py
@@ -17,29 +17,31 @@ from datastore import Datastore
if __name__ == '__main__':
- print("Reading configuration")
- surveys = configuration.surveys()
- file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
-
print("Connecting to database")
db = Datastore()
+ surveys = db.surveys()
+
+ print("Reading configuration")
+ file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
print("Reading surveys")
for survey in surveys:
print(f'Survey: {survey["id"]} ({survey["schema"]})')
db.set_survey(survey["schema"])
for file in survey["preplots"]:
+ realpath = configuration.translate_path(file["path"])
+
print(f"Preplot: {file['path']}")
if not db.file_in_db(file["path"]):
- age = time.time() - os.path.getmtime(file["path"])
+ age = time.time() - os.path.getmtime(realpath)
if age < file_min_age:
print("Skipping file because too new", file["path"])
continue
print("Importing")
try:
- preplot = preplots.from_file(file)
+ preplot = preplots.from_file(file, realpath)
except FileNotFoundError:
print(f"File does not exist: {file['path']}", file=sys.stderr)
continue
diff --git a/bin/import_raw_p111.py b/bin/import_raw_p111.py
index bd59696..c6a38ab 100755
--- a/bin/import_raw_p111.py
+++ b/bin/import_raw_p111.py
@@ -20,12 +20,11 @@ from datastore import Datastore
if __name__ == '__main__':
print("Reading configuration")
- surveys = configuration.surveys()
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
print("Connecting to database")
db = Datastore()
- db.connect()
+ surveys = db.surveys()
print("Reading surveys")
for survey in surveys:
@@ -46,30 +45,32 @@ if __name__ == '__main__':
ntbpRx = re.compile(survey["raw"]["ntbp"]["pattern"]["regex"])
for fileprefix in raw_p111["paths"]:
- print(f"Path prefix: {fileprefix}")
+ realprefix = configuration.translate_path(fileprefix)
+ print(f"Path prefix: {fileprefix} → {realprefix}")
for globspec in raw_p111["globs"]:
- for filepath in pathlib.Path(fileprefix).glob(globspec):
- filepath = str(filepath)
- print(f"Found {filepath}")
+ for physical_filepath in pathlib.Path(realprefix).glob(globspec):
+ physical_filepath = str(physical_filepath)
+ logical_filepath = configuration.untranslate_path(physical_filepath)
+ print(f"Found {logical_filepath}")
if ntbpRx:
- ntbp = ntbpRx.search(filepath) is not None
+ ntbp = ntbpRx.search(physical_filepath) is not None
else:
ntbp = False
- if not db.file_in_db(filepath):
+ if not db.file_in_db(logical_filepath):
- age = time.time() - os.path.getmtime(filepath)
+ age = time.time() - os.path.getmtime(physical_filepath)
if age < file_min_age:
- print("Skipping file because too new", filepath)
+ print("Skipping file because too new", logical_filepath)
continue
print("Importing")
- match = rx.match(os.path.basename(filepath))
+ match = rx.match(os.path.basename(logical_filepath))
if not match:
- error_message = f"File path not match the expected format! ({filepath} ~ {pattern['regex']})"
+ error_message = f"File path not matching the expected format! ({logical_filepath} ~ {pattern['regex']})"
print(error_message, file=sys.stderr)
print("This file will be ignored!")
continue
@@ -77,7 +78,7 @@ if __name__ == '__main__':
file_info = dict(zip(pattern["captures"], match.groups()))
file_info["meta"] = {}
- p111_data = p111.from_file(filepath)
+ p111_data = p111.from_file(physical_filepath)
print("Saving")
@@ -85,7 +86,7 @@ if __name__ == '__main__':
if len(p111_records):
file_info["meta"]["lineName"] = p111.line_name(p111_data)
- db.save_raw_p111(p111_records, file_info, filepath, survey["epsg"], ntbp=ntbp)
+ db.save_raw_p111(p111_records, file_info, logical_filepath, survey["epsg"], ntbp=ntbp)
else:
print("No source records found in file")
else:
@@ -93,7 +94,7 @@ if __name__ == '__main__':
# Update the NTBP status to whatever the latest is,
# as it might have changed.
- db.set_ntbp(filepath, ntbp)
+ db.set_ntbp(logical_filepath, ntbp)
if ntbp:
print("Sequence is NTBP")
diff --git a/bin/import_smsrc.py b/bin/import_smsrc.py
index 8b5dc83..b3fd4e7 100755
--- a/bin/import_smsrc.py
+++ b/bin/import_smsrc.py
@@ -20,12 +20,11 @@ from datastore import Datastore
if __name__ == '__main__':
print("Reading configuration")
- surveys = configuration.surveys()
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
print("Connecting to database")
db = Datastore()
- db.connect()
+ surveys = db.surveys()
print("Reading surveys")
for survey in surveys:
@@ -47,36 +46,38 @@ if __name__ == '__main__':
rx = re.compile(pattern["regex"], flags)
for fileprefix in raw_smsrc["paths"]:
- print(f"Path prefix: {fileprefix}")
+ realprefix = configuration.translate_path(fileprefix)
+ print(f"Path prefix: {fileprefix} → {realprefix}")
for globspec in raw_smsrc["globs"]:
- for filepath in pathlib.Path(fileprefix).glob(globspec):
- filepath = str(filepath)
- print(f"Found {filepath}")
+ for physical_filepath in pathlib.Path(realprefix).glob(globspec):
+ physical_filepath = str(physical_filepath)
+ logical_filepath = configuration.untranslate_path(physical_filepath)
+ print(f"Found {logical_filepath}")
- if not db.file_in_db(filepath):
+ if not db.file_in_db(logical_filepath):
- age = time.time() - os.path.getmtime(filepath)
+ age = time.time() - os.path.getmtime(physical_filepath)
if age < file_min_age:
- print("Skipping file because too new", filepath)
+ print("Skipping file because too new", logical_filepath)
continue
print("Importing")
- match = rx.match(os.path.basename(filepath))
+ match = rx.match(os.path.basename(logical_filepath))
if not match:
- error_message = f"File path not matching the expected format! ({filepath} ~ {pattern['regex']})"
+ error_message = f"File path not matching the expected format! ({logical_filepath} ~ {pattern['regex']})"
print(error_message, file=sys.stderr)
print("This file will be ignored!")
continue
file_info = dict(zip(pattern["captures"], match.groups()))
- smsrc_records = smsrc.from_file(filepath)
+ smsrc_records = smsrc.from_file(physical_filepath)
print("Saving")
- db.save_raw_smsrc(smsrc_records, file_info, filepath)
+ db.save_raw_smsrc(smsrc_records, file_info, logical_filepath)
else:
print("Already in DB")
diff --git a/bin/import_survey_config.py b/bin/import_survey_config.py
index b0452f7..75fd149 100755
--- a/bin/import_survey_config.py
+++ b/bin/import_survey_config.py
@@ -15,25 +15,4 @@ from datastore import Datastore
if __name__ == '__main__':
- print("Reading configuration")
- configs = configuration.files(include_archived = True)
-
- print("Connecting to database")
- db = Datastore()
- #db.connect()
-
- print("Reading surveys")
- for config in configs:
- filepath = config[0]
- survey = config[1]
- print(f'Survey: {survey["id"]} ({filepath})')
- db.set_survey(survey["schema"])
- if not db.file_in_db(filepath):
- print("Saving to DB")
- db.save_file_data(filepath, json.dumps(survey))
- print("Applying survey configuration")
- db.apply_survey_configuration()
- else:
- print("Already in DB")
-
- print("Done")
+ print("This function is obsolete. Returning with no action")
diff --git a/bin/preplots.py b/bin/preplots.py
index 51f92de..58be431 100644
--- a/bin/preplots.py
+++ b/bin/preplots.py
@@ -4,9 +4,10 @@ import sps
Preplot importing functions.
"""
-def from_file (file):
+def from_file (file, realpath = None):
+ filepath = realpath or file["path"]
if not "type" in file or file["type"] == "sps":
- records = sps.from_file(file["path"], file["format"] if "format" in file else None )
+ records = sps.from_file(filepath, file["format"] if "format" in file else None )
else:
return "Not an SPS file"
diff --git a/bin/purge_deleted_files.py b/bin/purge_deleted_files.py
index 512bf2a..a8dd966 100755
--- a/bin/purge_deleted_files.py
+++ b/bin/purge_deleted_files.py
@@ -13,21 +13,27 @@ from datastore import Datastore
if __name__ == '__main__':
- print("Reading configuration")
- surveys = configuration.surveys()
-
print("Connecting to database")
db = Datastore()
+ print("Reading configuration")
+ surveys = db.surveys()
+
print("Reading surveys")
for survey in surveys:
print(f'Survey: {survey["id"]} ({survey["schema"]})')
db.set_survey(survey["schema"])
for file in db.list_files():
- path = file[0]
- if not os.path.exists(path):
- print(path, "NOT FOUND")
- db.del_file(path)
+ try:
+ path = configuration.translate_path(file[0])
+ if not os.path.exists(path):
+ print(path, "NOT FOUND")
+ db.del_file(file[0])
+ except TypeError:
+ # In case the logical path no longer matches
+ # the Dougal configuration.
+ print(file[0], "COULD NOT BE TRANSLATED TO A PHYSICAL PATH. DELETING")
+ db.del_file(file[0])
print("Done")
diff --git a/etc/config.yaml b/etc/config.yaml
index 75cec49..c4c6a16 100644
--- a/etc/config.yaml
+++ b/etc/config.yaml
@@ -39,6 +39,18 @@ imports:
mounts:
- /srv/mnt/Data
+ # These paths can be exposed to end users via the API. They should
+ # contain the locations were project data, or any other user data
+ # that needs to be accessible by Dougal, is located.
+ #
+ # This key can be either a string or an object:
+ # - If a string, it points to the root path for Dougal-accessible data.
+ # - If an object, there is an implicit root and the first-level
+ # paths are denoted by the keys, with the values being their
+ # respective physical paths.
+ # Non-absolute paths are relative to $DOUGAL_ROOT.
+ paths: /srv/mnt/Data
+
queues:
asaqc:
request:
diff --git a/lib/www/client/source/package.json b/lib/www/client/source/package.json
index 3b033af..ccc443d 100644
--- a/lib/www/client/source/package.json
+++ b/lib/www/client/source/package.json
@@ -3,7 +3,7 @@
"version": "0.0.0",
"private": true,
"scripts": {
- "serve": "NODE_OPTIONS=--openssl-legacy-provider vue-cli-service serve",
+ "serve": "NODE_OPTIONS=--openssl-legacy-provider vue-cli-service serve --host=0.0.0.0",
"build": "NODE_OPTIONS=--openssl-legacy-provider vue-cli-service build"
},
"dependencies": {
diff --git a/lib/www/client/source/src/store/modules/project/actions.js b/lib/www/client/source/src/store/modules/project/actions.js
index 57d47ed..436e88a 100644
--- a/lib/www/client/source/src/store/modules/project/actions.js
+++ b/lib/www/client/source/src/store/modules/project/actions.js
@@ -8,6 +8,10 @@ async function getProject ({commit, dispatch}, projectId) {
const recentProjects = JSON.parse(localStorage.getItem("recentProjects") || "[]")
recentProjects.unshift(res);
localStorage.setItem("recentProjects", JSON.stringify(recentProjects.slice(0, 3)));
+ } else {
+ commit('setProjectName', null);
+ commit('setProjectId', null);
+ commit('setProjectSchema', null);
}
}
diff --git a/lib/www/client/source/src/store/modules/project/getters.js b/lib/www/client/source/src/store/modules/project/getters.js
index e69de29..4ed9621 100644
--- a/lib/www/client/source/src/store/modules/project/getters.js
+++ b/lib/www/client/source/src/store/modules/project/getters.js
@@ -0,0 +1,14 @@
+
+function projectId (state) {
+ return state.projectId;
+}
+
+function projectName (state) {
+ return state.projectName;
+}
+
+function projectSchema (state) {
+ return state.projectSchema;
+}
+
+export default { projectId, projectName, projectSchema };
diff --git a/lib/www/client/source/src/views/Project.vue b/lib/www/client/source/src/views/Project.vue
index 5f4cc92..e7ba96c 100644
--- a/lib/www/client/source/src/views/Project.vue
+++ b/lib/www/client/source/src/views/Project.vue
@@ -1,18 +1,25 @@
-
+
+
+
+
+ Project does not exist.
+
+
+