mirror of
https://gitlab.com/wgp/dougal/software.git
synced 2025-12-06 12:17:08 +00:00
Compare commits
13 Commits
258-shortc
...
261-wrong-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c1b5196134 | ||
|
|
8a815ce3ef | ||
|
|
91076a50ad | ||
|
|
e624dcdde0 | ||
|
|
a25676122c | ||
|
|
e4dfbe2c9a | ||
|
|
78fb34d049 | ||
|
|
38c4125f4f | ||
|
|
04d6cbafe3 | ||
|
|
e6319172d8 | ||
|
|
5230ff63e3 | ||
|
|
2b364bbff7 | ||
|
|
c4b330b2bb |
@@ -12,6 +12,18 @@ surveys should be under $HOME/etc/surveys/*.yaml. In both cases,
|
||||
$HOME is the home directory of the user running this script.
|
||||
"""
|
||||
|
||||
def is_relative_to(it, other):
|
||||
"""
|
||||
is_relative_to() is not present version Python 3.9, so we
|
||||
need this kludge to get Dougal to run on OpenSUSE 15.4
|
||||
"""
|
||||
|
||||
if "is_relative_to" in dir(it):
|
||||
return it.is_relative_to(other)
|
||||
|
||||
return str(it.absolute()).startswith(str(other.absolute()))
|
||||
|
||||
|
||||
prefix = os.environ.get("DOUGAL_ROOT", os.environ.get("HOME", ".")+"/software")
|
||||
|
||||
DOUGAL_ROOT = os.environ.get("DOUGAL_ROOT", os.environ.get("HOME", ".")+"/software")
|
||||
@@ -142,7 +154,7 @@ def untranslate_path (file):
|
||||
|
||||
if filepath.is_absolute():
|
||||
if type(import_paths) == str:
|
||||
if filepath.is_relative_to(import_paths):
|
||||
if is_relative_to(filepath, import_paths):
|
||||
physical_root = pathlib.Path("/")
|
||||
physical_prefix = pathlib.Path(import_paths)
|
||||
return str(root.joinpath(filepath.relative_to(physical_prefix)))
|
||||
@@ -152,7 +164,7 @@ def untranslate_path (file):
|
||||
for key, value in import_paths.items():
|
||||
value = dougal_root.joinpath(value)
|
||||
physical_prefix = pathlib.Path(value)
|
||||
if filepath.is_relative_to(physical_prefix):
|
||||
if is_relative_to(filepath, physical_prefix):
|
||||
logical_prefix = physical_root.joinpath(pathlib.Path(key)).resolve()
|
||||
return str(logical_prefix.joinpath(filepath.relative_to(physical_prefix)))
|
||||
|
||||
|
||||
@@ -9,11 +9,9 @@ from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
\connect dougal
|
||||
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.12"}')
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.13"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.12"}' WHERE public.info.key = 'version';
|
||||
SET value = public.info.value || '{"db_schema": "0.3.13"}' WHERE public.info.key = 'version';
|
||||
|
||||
@@ -671,7 +671,7 @@ BEGIN
|
||||
id <> NEW.id
|
||||
AND label = NEW.label
|
||||
AND id IN (SELECT id FROM events_seq WHERE sequence = _sequence);
|
||||
|
||||
|
||||
DELETE
|
||||
FROM events_timed_labels
|
||||
WHERE
|
||||
@@ -854,7 +854,7 @@ CREATE FUNCTION _SURVEY__TEMPLATE_.ij_error(line double precision, point double
|
||||
DECLARE
|
||||
bp jsonb := binning_parameters();
|
||||
ij public.geometry := to_binning_grid(geom, bp);
|
||||
|
||||
|
||||
theta numeric := (bp->>'theta')::numeric * pi() / 180;
|
||||
I_inc numeric DEFAULT 1;
|
||||
J_inc numeric DEFAULT 1;
|
||||
@@ -869,13 +869,13 @@ DECLARE
|
||||
yoff numeric := (bp->'origin'->>'J')::numeric;
|
||||
E0 numeric := (bp->'origin'->>'easting')::numeric;
|
||||
N0 numeric := (bp->'origin'->>'northing')::numeric;
|
||||
|
||||
|
||||
error_i double precision;
|
||||
error_j double precision;
|
||||
BEGIN
|
||||
error_i := (public.st_x(ij) - line) * I_width;
|
||||
error_j := (public.st_y(ij) - point) * J_width;
|
||||
|
||||
|
||||
RETURN public.ST_MakePoint(error_i, error_j);
|
||||
END
|
||||
$$;
|
||||
@@ -1488,9 +1488,9 @@ CREATE VIEW _SURVEY__TEMPLATE_.final_lines_summary AS
|
||||
s.ts1,
|
||||
(s.ts1 - s.ts0) AS duration,
|
||||
s.num_points,
|
||||
(( SELECT count(*) AS count
|
||||
FROM _SURVEY__TEMPLATE_.preplot_points
|
||||
WHERE ((preplot_points.line = fl.line) AND (((preplot_points.point >= s.fsp) AND (preplot_points.point <= s.lsp)) OR ((preplot_points.point >= s.lsp) AND (preplot_points.point <= s.fsp))))) - s.num_points) AS missing_shots,
|
||||
( SELECT count(*) AS count
|
||||
FROM _SURVEY__TEMPLATE_.missing_sequence_final_points
|
||||
WHERE missing_sequence_final_points.sequence = s.sequence) AS missing_shots,
|
||||
s.length,
|
||||
s.azimuth,
|
||||
fl.remarks,
|
||||
@@ -2137,9 +2137,9 @@ CREATE VIEW _SURVEY__TEMPLATE_.raw_lines_summary AS
|
||||
(s.ts1 - s.ts0) AS duration,
|
||||
s.num_points,
|
||||
s.num_preplots,
|
||||
(( SELECT count(*) AS count
|
||||
FROM _SURVEY__TEMPLATE_.preplot_points
|
||||
WHERE ((preplot_points.line = rl.line) AND (((preplot_points.point >= s.fsp) AND (preplot_points.point <= s.lsp)) OR ((preplot_points.point >= s.lsp) AND (preplot_points.point <= s.fsp))))) - s.num_preplots) AS missing_shots,
|
||||
(SELECT count(*) AS count
|
||||
FROM _SURVEY__TEMPLATE_.missing_sequence_raw_points
|
||||
WHERE missing_sequence_raw_points.sequence = s.sequence) AS missing_shots,
|
||||
s.length,
|
||||
s.azimuth,
|
||||
rl.remarks,
|
||||
|
||||
162
etc/db/upgrades/upgrade26-v0.3.13-fix-missing-shots-summary.sql
Normal file
162
etc/db/upgrades/upgrade26-v0.3.13-fix-missing-shots-summary.sql
Normal file
@@ -0,0 +1,162 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.3.13
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- Fixes a bug in the `final_lines_summary` and `raw_lines_summary` views
|
||||
-- which results in the number of missing shots being miscounted on jobs
|
||||
-- using three sources.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
|
||||
CREATE OR REPLACE VIEW raw_lines_summary AS
|
||||
WITH summary AS (
|
||||
SELECT DISTINCT rs.sequence,
|
||||
first_value(rs.point) OVER w AS fsp,
|
||||
last_value(rs.point) OVER w AS lsp,
|
||||
first_value(rs.tstamp) OVER w AS ts0,
|
||||
last_value(rs.tstamp) OVER w AS ts1,
|
||||
count(rs.point) OVER w AS num_points,
|
||||
count(pp.point) OVER w AS num_preplots,
|
||||
public.st_distance(first_value(rs.geometry) OVER w, last_value(rs.geometry) OVER w) AS length,
|
||||
((public.st_azimuth(first_value(rs.geometry) OVER w, last_value(rs.geometry) OVER w) * (180)::double precision) / pi()) AS azimuth
|
||||
FROM (raw_shots rs
|
||||
LEFT JOIN preplot_points pp USING (line, point))
|
||||
WINDOW w AS (PARTITION BY rs.sequence ORDER BY rs.tstamp ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
|
||||
)
|
||||
SELECT rl.sequence,
|
||||
rl.line,
|
||||
s.fsp,
|
||||
s.lsp,
|
||||
s.ts0,
|
||||
s.ts1,
|
||||
(s.ts1 - s.ts0) AS duration,
|
||||
s.num_points,
|
||||
s.num_preplots,
|
||||
(SELECT count(*) AS count
|
||||
FROM missing_sequence_raw_points
|
||||
WHERE missing_sequence_raw_points.sequence = s.sequence) AS missing_shots,
|
||||
s.length,
|
||||
s.azimuth,
|
||||
rl.remarks,
|
||||
rl.ntbp,
|
||||
rl.meta
|
||||
FROM (summary s
|
||||
JOIN raw_lines rl USING (sequence));
|
||||
|
||||
|
||||
CREATE OR REPLACE VIEW final_lines_summary AS
|
||||
WITH summary AS (
|
||||
SELECT DISTINCT fs.sequence,
|
||||
first_value(fs.point) OVER w AS fsp,
|
||||
last_value(fs.point) OVER w AS lsp,
|
||||
first_value(fs.tstamp) OVER w AS ts0,
|
||||
last_value(fs.tstamp) OVER w AS ts1,
|
||||
count(fs.point) OVER w AS num_points,
|
||||
public.st_distance(first_value(fs.geometry) OVER w, last_value(fs.geometry) OVER w) AS length,
|
||||
((public.st_azimuth(first_value(fs.geometry) OVER w, last_value(fs.geometry) OVER w) * (180)::double precision) / pi()) AS azimuth
|
||||
FROM final_shots fs
|
||||
WINDOW w AS (PARTITION BY fs.sequence ORDER BY fs.tstamp ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
|
||||
)
|
||||
SELECT fl.sequence,
|
||||
fl.line,
|
||||
s.fsp,
|
||||
s.lsp,
|
||||
s.ts0,
|
||||
s.ts1,
|
||||
(s.ts1 - s.ts0) AS duration,
|
||||
s.num_points,
|
||||
( SELECT count(*) AS count
|
||||
FROM missing_sequence_final_points
|
||||
WHERE missing_sequence_final_points.sequence = s.sequence) AS missing_shots,
|
||||
s.length,
|
||||
s.azimuth,
|
||||
fl.remarks,
|
||||
fl.meta
|
||||
FROM (summary s
|
||||
JOIN final_lines fl USING (sequence));
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.3.13' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.3.12' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.13"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.13"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
14
lib/www/client/source/package-lock.json
generated
14
lib/www/client/source/package-lock.json
generated
@@ -9,7 +9,7 @@
|
||||
"version": "0.0.0",
|
||||
"license": "UNLICENSED",
|
||||
"dependencies": {
|
||||
"@mdi/font": "^5.6.55",
|
||||
"@mdi/font": "^7.2.96",
|
||||
"core-js": "^3.6.5",
|
||||
"d3": "^7.0.1",
|
||||
"jwt-decode": "^3.0.0",
|
||||
@@ -1763,9 +1763,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@mdi/font": {
|
||||
"version": "5.9.55",
|
||||
"resolved": "https://registry.npmjs.org/@mdi/font/-/font-5.9.55.tgz",
|
||||
"integrity": "sha512-jswRF6q3eq8NWpWiqct6q+6Fg/I7nUhrxYJfiEM8JJpap0wVJLQdbKtyS65GdlK7S7Ytnx3TTi/bmw+tBhkGmg=="
|
||||
"version": "7.2.96",
|
||||
"resolved": "https://registry.npmjs.org/@mdi/font/-/font-7.2.96.tgz",
|
||||
"integrity": "sha512-e//lmkmpFUMZKhmCY9zdjRe4zNXfbOIJnn6xveHbaV2kSw5aJ5dLXUxcRt1Gxfi7ZYpFLUWlkG2MGSFAiqAu7w=="
|
||||
},
|
||||
"node_modules/@mrmlnc/readdir-enhanced": {
|
||||
"version": "2.2.1",
|
||||
@@ -16442,9 +16442,9 @@
|
||||
}
|
||||
},
|
||||
"@mdi/font": {
|
||||
"version": "5.9.55",
|
||||
"resolved": "https://registry.npmjs.org/@mdi/font/-/font-5.9.55.tgz",
|
||||
"integrity": "sha512-jswRF6q3eq8NWpWiqct6q+6Fg/I7nUhrxYJfiEM8JJpap0wVJLQdbKtyS65GdlK7S7Ytnx3TTi/bmw+tBhkGmg=="
|
||||
"version": "7.2.96",
|
||||
"resolved": "https://registry.npmjs.org/@mdi/font/-/font-7.2.96.tgz",
|
||||
"integrity": "sha512-e//lmkmpFUMZKhmCY9zdjRe4zNXfbOIJnn6xveHbaV2kSw5aJ5dLXUxcRt1Gxfi7ZYpFLUWlkG2MGSFAiqAu7w=="
|
||||
},
|
||||
"@mrmlnc/readdir-enhanced": {
|
||||
"version": "2.2.1",
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"build": "NODE_OPTIONS=--openssl-legacy-provider vue-cli-service build"
|
||||
},
|
||||
"dependencies": {
|
||||
"@mdi/font": "^5.6.55",
|
||||
"@mdi/font": "^7.2.96",
|
||||
"core-js": "^3.6.5",
|
||||
"d3": "^7.0.1",
|
||||
"jwt-decode": "^3.0.0",
|
||||
|
||||
@@ -16,7 +16,7 @@ async function api ({state, commit, dispatch}, [resource, init = {}, cb]) {
|
||||
const url = /^https?:\/\//i.test(resource) ? resource : (state.apiUrl + resource);
|
||||
const res = await fetch(url, init);
|
||||
if (typeof cb === 'function') {
|
||||
cb(null, res);
|
||||
await cb(null, res);
|
||||
}
|
||||
if (res.ok) {
|
||||
|
||||
@@ -35,7 +35,14 @@ async function api ({state, commit, dispatch}, [resource, init = {}, cb]) {
|
||||
throw err;
|
||||
}
|
||||
} else {
|
||||
await dispatch('showSnack', [res.statusText, "warning"]);
|
||||
let message = res.statusText;
|
||||
if (res.headers.get("Content-Type").match(/^application\/json/i)) {
|
||||
const body = await res.json();
|
||||
if (body.message) {
|
||||
message = body.message;
|
||||
}
|
||||
}
|
||||
await dispatch('showSnack', [message, "warning"]);
|
||||
}
|
||||
} catch (err) {
|
||||
if (err && err.name == "AbortError") return;
|
||||
|
||||
@@ -10,7 +10,7 @@ const mw = require('./middleware');
|
||||
const { ERROR, INFO, DEBUG } = require('DOUGAL_ROOT/debug')(__filename);
|
||||
const verbose = process.env.NODE_ENV != 'test';
|
||||
const app = express();
|
||||
app.locals.version = "0.3.1"; // API version
|
||||
app.locals.version = "0.4.0"; // API version
|
||||
|
||||
app.map = function(a, route){
|
||||
route = route || '';
|
||||
@@ -259,7 +259,7 @@ app.map({
|
||||
get: [ mw.auth.access.write, mw.files.get ]
|
||||
},
|
||||
'/files/?:path(*)': {
|
||||
get: [ mw.auth.access.write, mw.files.get ]
|
||||
get: [ mw.auth.access.write, mw.etag.noSave, mw.files.get ]
|
||||
},
|
||||
'/navdata/': {
|
||||
get: [ mw.navdata.get ],
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const { setSurvey } = require('../../connection');
|
||||
const { deepMerge } = require('../../../utils');
|
||||
const { deepMerge, removeNulls } = require('../../../utils');
|
||||
const { modify } = require('../create');
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ async function patch (projectId, payload, opts = {}) {
|
||||
}
|
||||
}
|
||||
|
||||
const dest = deepMerge(source, payload);
|
||||
const dest = removeNulls(deepMerge(source, payload));
|
||||
await modify(projectId, dest);
|
||||
return dest;
|
||||
|
||||
|
||||
@@ -56,14 +56,14 @@ async function getProjectQCConfig (projectId) {
|
||||
async function main () {
|
||||
// Fetch list of projects
|
||||
console.log("GET PROJECTS");
|
||||
const projects = await project.list();
|
||||
const projects = await project.get();
|
||||
console.log("PROJECTS", projects);
|
||||
|
||||
for (const proj of projects) {
|
||||
const projectId = proj.pid;
|
||||
console.log("PROJECT ID", projectId);
|
||||
|
||||
if (!project.archived) {
|
||||
if (!proj.archived) {
|
||||
const QCTstamp = new Date();
|
||||
|
||||
const currentQCHash = await projectHash(projectId);
|
||||
|
||||
@@ -4,5 +4,6 @@ module.exports = {
|
||||
dms: require('./dms'),
|
||||
replaceMarkers: require('./replaceMarkers'),
|
||||
flattenQCDefinitions: require('./flattenQCDefinitions'),
|
||||
deepMerge: require('./deepMerge')
|
||||
deepMerge: require('./deepMerge'),
|
||||
removeNulls: require('./removeNulls')
|
||||
};
|
||||
|
||||
23
lib/www/server/lib/utils/removeNulls.js
Normal file
23
lib/www/server/lib/utils/removeNulls.js
Normal file
@@ -0,0 +1,23 @@
|
||||
|
||||
/**
|
||||
* Delete keys whose value is null.
|
||||
*
|
||||
*/
|
||||
function removeNulls (obj) {
|
||||
|
||||
function getType (obj) {
|
||||
return Object.prototype.toString.call(obj).slice(8, -1).toLowerCase();
|
||||
}
|
||||
|
||||
for (let [key, value] of Object.entries(obj)) {
|
||||
if (value === null) {
|
||||
delete obj[key];
|
||||
} else if (getType(value) == "object") {
|
||||
removeNulls(value);
|
||||
}
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
module.exports = removeNulls;
|
||||
2
lib/www/server/package-lock.json
generated
2
lib/www/server/package-lock.json
generated
@@ -36,7 +36,7 @@
|
||||
"redoc-cli": "^0.13.9"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.0.0"
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@mapbox/node-pre-gyp": {
|
||||
|
||||
@@ -12,10 +12,10 @@
|
||||
"private": true,
|
||||
"config": {
|
||||
"db_schema": "^0.3.11",
|
||||
"api": "^0.3.0"
|
||||
"api": "^0.4.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.0.0"
|
||||
"node": ">=18.0.0"
|
||||
},
|
||||
"os": [
|
||||
"linux"
|
||||
|
||||
Reference in New Issue
Block a user