Merge branch '103-24-hour-lookahead-planning-report' into 'devel'

Resolve "24-hour lookahead planning report"

Closes #103

See merge request wgp/dougal/software!13
This commit is contained in:
D. Berge
2021-06-21 14:53:35 +00:00
13 changed files with 3523 additions and 36 deletions

View File

@@ -3,7 +3,7 @@
--
-- Dumped from database version 12.6
-- Dumped by pg_dump version 12.7
-- Dumped by pg_dump version 12.6
SET statement_timeout = 0;
SET lock_timeout = 0;
@@ -63,6 +63,185 @@ If the hash matches that of an existing entry, update the path of that entry to
If the path matches that of an existing entry, delete that entry (which cascades) and insert the new one.';
--
-- Name: adjust_planner(); Type: PROCEDURE; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--
CREATE PROCEDURE _SURVEY__TEMPLATE_.adjust_planner()
LANGUAGE plpgsql
AS $$
DECLARE
_planner_config jsonb;
_planned_line planned_lines%ROWTYPE;
_lag interval;
_last_sequence sequences_summary%ROWTYPE;
_deltatime interval;
_shotinterval interval;
_tstamp timestamptz;
_incr integer;
BEGIN
SET CONSTRAINTS planned_lines_pkey DEFERRED;
SELECT data->'planner'
INTO _planner_config
FROM file_data
WHERE data ? 'planner';
SELECT *
INTO _last_sequence
FROM sequences_summary
ORDER BY sequence DESC
LIMIT 1;
SELECT *
INTO _planned_line
FROM planned_lines
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
SELECT
COALESCE(
((lead(ts0) OVER (ORDER BY sequence)) - ts1),
make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer)
)
INTO _lag
FROM planned_lines
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
_incr = sign(_last_sequence.lsp - _last_sequence.fsp);
RAISE NOTICE '_planner_config: %', _planner_config;
RAISE NOTICE '_last_sequence: %', _last_sequence;
RAISE NOTICE '_planned_line: %', _planned_line;
RAISE NOTICE '_incr: %', _incr;
-- Does the latest sequence match a planned sequence?
IF _planned_line IS NULL THEN -- No it doesn't
RAISE NOTICE 'Latest sequence shot does not match a planned sequence';
SELECT * INTO _planned_line FROM planned_lines ORDER BY sequence ASC LIMIT 1;
RAISE NOTICE '_planned_line: %', _planned_line;
IF _planned_line.sequence <= _last_sequence.sequence THEN
RAISE NOTICE 'Renumbering the planned sequences starting from %', _planned_line.sequence + 1;
-- Renumber the planned sequences starting from last shot sequence number + 1
UPDATE planned_lines
SET sequence = sequence + _last_sequence.sequence - _planned_line.sequence + 1;
END IF;
-- The correction to make to the first planned line's ts0 will be based on either the last
-- sequence's EOL + default line change time or the current time, whichever is later.
_deltatime := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1) + make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer), current_timestamp) - _planned_line.ts0;
-- Is the first of the planned lines start time in the past? (±5 mins)
IF _planned_line.ts0 < (current_timestamp - make_interval(mins => 5)) THEN
RAISE NOTICE 'First planned line is in the past. Adjusting times by %', _deltatime;
-- Adjust the start / end time of the planned lines by assuming that we are at
-- `defaultLineChangeDuration` minutes away from SOL of the first planned line.
UPDATE planned_lines
SET
ts0 = ts0 + _deltatime,
ts1 = ts1 + _deltatime;
END IF;
ELSE -- Yes it does
RAISE NOTICE 'Latest sequence does match a planned sequence: %, %', _planned_line.sequence, _planned_line.line;
-- Is it online?
IF EXISTS(SELECT 1 FROM raw_lines_files WHERE sequence = _last_sequence.sequence AND hash = '*online*') THEN
-- Yes it is
RAISE NOTICE 'Sequence % is online', _last_sequence.sequence;
-- Let us get the SOL from the events log if we can
RAISE NOTICE 'Trying to set fsp, ts0 from events log FSP, FGSP';
WITH e AS (
SELECT * FROM events
WHERE
sequence = _last_sequence.sequence
AND ('FSP' = ANY(labels) OR 'FGSP' = ANY(labels))
ORDER BY tstamp LIMIT 1
)
UPDATE planned_lines
SET
fsp = COALESCE(e.point, fsp),
ts0 = COALESCE(e.tstamp, ts0)
FROM e
WHERE planned_lines.sequence = _last_sequence.sequence;
-- Shot interval
_shotinterval := (_last_sequence.ts1 - _last_sequence.ts0) / abs(_last_sequence.lsp - _last_sequence.fsp);
RAISE NOTICE 'Estimating EOL from current shot interval: %', _shotinterval;
SELECT (abs(lsp-fsp) * _shotinterval + ts0) - ts1
INTO _deltatime
FROM planned_lines
WHERE sequence = _last_sequence.sequence;
---- Set ts1 for the current sequence
--UPDATE planned_lines
--SET
--ts1 = (abs(lsp-fsp) * _shotinterval) + ts0
--WHERE sequence = _last_sequence.sequence;
RAISE NOTICE 'Adjustment is %', _deltatime;
IF EXTRACT(EPOCH FROM _deltatime) < 8 THEN
RAISE NOTICE 'Adjustment too small (< 8 s), so not applying it';
RETURN;
END IF;
-- Adjust ts1 for the current sequence
UPDATE planned_lines
SET ts1 = ts1 + _deltatime
WHERE sequence = _last_sequence.sequence;
-- Now shift all sequences after
UPDATE planned_lines
SET ts0 = ts0 + _deltatime, ts1 = ts1 + _deltatime
WHERE sequence > _last_sequence.sequence;
RAISE NOTICE 'Deleting planned sequences before %', _planned_line.sequence;
-- Remove all previous planner entries.
DELETE
FROM planned_lines
WHERE sequence < _last_sequence.sequence;
ELSE
-- No it isn't
RAISE NOTICE 'Sequence % is offline', _last_sequence.sequence;
-- We were supposed to finish at _planned_line.ts1 but we finished at:
_tstamp := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1), current_timestamp);
-- WARNING Next line is for testing only
--_tstamp := COALESCE(_last_sequence.ts1_final, _last_sequence.ts1);
-- So we need to adjust timestamps by:
_deltatime := _tstamp - _planned_line.ts1;
RAISE NOTICE 'Planned end: %, actual end: % (%, %)', _planned_line.ts1, _tstamp, _planned_line.sequence, _last_sequence.sequence;
RAISE NOTICE 'Shifting times by % for sequences > %', _deltatime, _planned_line.sequence;
-- NOTE: This won't work if sequences are not, err… sequential.
-- NOTE: This has been known to happen in 2020.
UPDATE planned_lines
SET
ts0 = ts0 + _deltatime,
ts1 = ts1 + _deltatime
WHERE sequence > _planned_line.sequence;
RAISE NOTICE 'Deleting planned sequences up to %', _planned_line.sequence;
-- Remove all previous planner entries.
DELETE
FROM planned_lines
WHERE sequence <= _last_sequence.sequence;
END IF;
END IF;
END;
$$;
ALTER PROCEDURE _SURVEY__TEMPLATE_.adjust_planner() OWNER TO postgres;
--
-- Name: assoc_tstamp(); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--
@@ -1884,7 +2063,7 @@ CREATE TRIGGER events_seq_labels_single_tg AFTER INSERT OR UPDATE ON _SURVEY__TE
-- Name: events_timed_labels events_seq_labels_single_tg; Type: TRIGGER; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--
CREATE TRIGGER events_timed_labels_single_tg AFTER INSERT OR UPDATE ON _SURVEY__TEMPLATE_.events_timed_labels FOR EACH ROW EXECUTE FUNCTION _SURVEY__TEMPLATE_.events_seq_labels_single();
CREATE TRIGGER events_seq_labels_single_tg AFTER INSERT OR UPDATE ON _SURVEY__TEMPLATE_.events_timed_labels FOR EACH ROW EXECUTE FUNCTION _SURVEY__TEMPLATE_.events_seq_labels_single();
--
@@ -1936,6 +2115,13 @@ CREATE TRIGGER final_shots_qc_tg AFTER INSERT OR DELETE OR UPDATE ON _SURVEY__TE
CREATE TRIGGER final_shots_tg AFTER INSERT OR DELETE OR UPDATE ON _SURVEY__TEMPLATE_.final_shots FOR EACH STATEMENT EXECUTE FUNCTION public.notify('final_shots');
--
-- Name: info info_tg; Type: TRIGGER; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--
CREATE TRIGGER info_tg AFTER INSERT OR DELETE OR UPDATE ON _SURVEY__TEMPLATE_.info FOR EACH ROW EXECUTE FUNCTION public.notify('info');
--
-- Name: planned_lines planned_lines_tg; Type: TRIGGER; Schema: _SURVEY__TEMPLATE_; Owner: postgres
--

View File

@@ -0,0 +1,207 @@
-- Upgrade the database from commit 0983abac to 81d9ea19.
--
-- NOTE: This upgrade must be applied to every schema in the database.
-- NOTE: Each application starts a transaction, which must be committed
-- or rolled back.
--
-- This defines a new procedure adjust_planner() which resolves some
-- conflicts between shot sequences and the planner, such as removing
-- sequences that have been shot, renumbering, or adjusting the planned
-- times.
--
-- It is meant to be called at regular intervals by an external process,
-- such as the runner (software/bin/runner.sh).
--
-- A trigger for changes to the schema's `info` table is also added.
--
-- To apply, run as the dougal user, for every schema in the database:
--
-- psql <<EOF
-- SET search_path TO survey_*,public;
-- \i $THIS_FILE
-- COMMIT;
-- EOF
--
-- NOTE: It can be applied multiple times without ill effect.
BEGIN;
CREATE OR REPLACE PROCEDURE adjust_planner ()
LANGUAGE plpgsql
AS $$
DECLARE
_planner_config jsonb;
_planned_line planned_lines%ROWTYPE;
_lag interval;
_last_sequence sequences_summary%ROWTYPE;
_deltatime interval;
_shotinterval interval;
_tstamp timestamptz;
_incr integer;
BEGIN
SET CONSTRAINTS planned_lines_pkey DEFERRED;
SELECT data->'planner'
INTO _planner_config
FROM file_data
WHERE data ? 'planner';
SELECT *
INTO _last_sequence
FROM sequences_summary
ORDER BY sequence DESC
LIMIT 1;
SELECT *
INTO _planned_line
FROM planned_lines
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
SELECT
COALESCE(
((lead(ts0) OVER (ORDER BY sequence)) - ts1),
make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer)
)
INTO _lag
FROM planned_lines
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
_incr = sign(_last_sequence.lsp - _last_sequence.fsp);
RAISE NOTICE '_planner_config: %', _planner_config;
RAISE NOTICE '_last_sequence: %', _last_sequence;
RAISE NOTICE '_planned_line: %', _planned_line;
RAISE NOTICE '_incr: %', _incr;
-- Does the latest sequence match a planned sequence?
IF _planned_line IS NULL THEN -- No it doesn't
RAISE NOTICE 'Latest sequence shot does not match a planned sequence';
SELECT * INTO _planned_line FROM planned_lines ORDER BY sequence ASC LIMIT 1;
RAISE NOTICE '_planned_line: %', _planned_line;
IF _planned_line.sequence <= _last_sequence.sequence THEN
RAISE NOTICE 'Renumbering the planned sequences starting from %', _planned_line.sequence + 1;
-- Renumber the planned sequences starting from last shot sequence number + 1
UPDATE planned_lines
SET sequence = sequence + _last_sequence.sequence - _planned_line.sequence + 1;
END IF;
-- The correction to make to the first planned line's ts0 will be based on either the last
-- sequence's EOL + default line change time or the current time, whichever is later.
_deltatime := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1) + make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer), current_timestamp) - _planned_line.ts0;
-- Is the first of the planned lines start time in the past? (±5 mins)
IF _planned_line.ts0 < (current_timestamp - make_interval(mins => 5)) THEN
RAISE NOTICE 'First planned line is in the past. Adjusting times by %', _deltatime;
-- Adjust the start / end time of the planned lines by assuming that we are at
-- `defaultLineChangeDuration` minutes away from SOL of the first planned line.
UPDATE planned_lines
SET
ts0 = ts0 + _deltatime,
ts1 = ts1 + _deltatime;
END IF;
ELSE -- Yes it does
RAISE NOTICE 'Latest sequence does match a planned sequence: %, %', _planned_line.sequence, _planned_line.line;
-- Is it online?
IF EXISTS(SELECT 1 FROM raw_lines_files WHERE sequence = _last_sequence.sequence AND hash = '*online*') THEN
-- Yes it is
RAISE NOTICE 'Sequence % is online', _last_sequence.sequence;
-- Let us get the SOL from the events log if we can
RAISE NOTICE 'Trying to set fsp, ts0 from events log FSP, FGSP';
WITH e AS (
SELECT * FROM events
WHERE
sequence = _last_sequence.sequence
AND ('FSP' = ANY(labels) OR 'FGSP' = ANY(labels))
ORDER BY tstamp LIMIT 1
)
UPDATE planned_lines
SET
fsp = COALESCE(e.point, fsp),
ts0 = COALESCE(e.tstamp, ts0)
FROM e
WHERE planned_lines.sequence = _last_sequence.sequence;
-- Shot interval
_shotinterval := (_last_sequence.ts1 - _last_sequence.ts0) / abs(_last_sequence.lsp - _last_sequence.fsp);
RAISE NOTICE 'Estimating EOL from current shot interval: %', _shotinterval;
SELECT (abs(lsp-fsp) * _shotinterval + ts0) - ts1
INTO _deltatime
FROM planned_lines
WHERE sequence = _last_sequence.sequence;
---- Set ts1 for the current sequence
--UPDATE planned_lines
--SET
--ts1 = (abs(lsp-fsp) * _shotinterval) + ts0
--WHERE sequence = _last_sequence.sequence;
RAISE NOTICE 'Adjustment is %', _deltatime;
IF abs(EXTRACT(EPOCH FROM _deltatime)) < 8 THEN
RAISE NOTICE 'Adjustment too small (< 8 s), so not applying it';
RETURN;
END IF;
-- Adjust ts1 for the current sequence
UPDATE planned_lines
SET ts1 = ts1 + _deltatime
WHERE sequence = _last_sequence.sequence;
-- Now shift all sequences after
UPDATE planned_lines
SET ts0 = ts0 + _deltatime, ts1 = ts1 + _deltatime
WHERE sequence > _last_sequence.sequence;
RAISE NOTICE 'Deleting planned sequences before %', _planned_line.sequence;
-- Remove all previous planner entries.
DELETE
FROM planned_lines
WHERE sequence < _last_sequence.sequence;
ELSE
-- No it isn't
RAISE NOTICE 'Sequence % is offline', _last_sequence.sequence;
-- We were supposed to finish at _planned_line.ts1 but we finished at:
_tstamp := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1), current_timestamp);
-- WARNING Next line is for testing only
--_tstamp := COALESCE(_last_sequence.ts1_final, _last_sequence.ts1);
-- So we need to adjust timestamps by:
_deltatime := _tstamp - _planned_line.ts1;
RAISE NOTICE 'Planned end: %, actual end: % (%, %)', _planned_line.ts1, _tstamp, _planned_line.sequence, _last_sequence.sequence;
RAISE NOTICE 'Shifting times by % for sequences > %', _deltatime, _planned_line.sequence;
-- NOTE: This won't work if sequences are not, err… sequential.
-- NOTE: This has been known to happen in 2020.
UPDATE planned_lines
SET
ts0 = ts0 + _deltatime,
ts1 = ts1 + _deltatime
WHERE sequence > _planned_line.sequence;
RAISE NOTICE 'Deleting planned sequences up to %', _planned_line.sequence;
-- Remove all previous planner entries.
DELETE
FROM planned_lines
WHERE sequence <= _last_sequence.sequence;
END IF;
END IF;
END;
$$;
DROP TRIGGER IF EXISTS info_tg ON info;
CREATE TRIGGER info_tg AFTER INSERT OR DELETE OR UPDATE ON info FOR EACH ROW EXECUTE FUNCTION public.notify('info');
--
--NOTE Run `COMMIT;` now if all went well
--

File diff suppressed because one or more lines are too long

View File

@@ -4,6 +4,39 @@
<v-card-title>
<v-toolbar flat>
<v-toolbar-title>Plan</v-toolbar-title>
<v-menu v-if="items">
<template v-slot:activator="{on, attrs}">
<v-btn class="ml-5" small v-on="on" v-bind="attrs">
<span class="d-none d-lg-inline">Download as</span>
<v-icon right small>mdi-cloud-download</v-icon>
</v-btn>
</template>
<v-list>
<v-list-item
:href="`/api/project/${$route.params.project}/plan/?mime=text%2Fcsv&download`"
title="Download as a comma-separated values file."
>CSV</v-list-item>
<v-list-item
:href="`/api/project/${$route.params.project}/plan/?mime=application%2Fgeo%2Bjson&download`"
title="Download as a QGIS-compatible GeoJSON file"
>GeoJSON</v-list-item>
<v-list-item
:href="`/api/project/${$route.params.project}/plan/?mime=application%2Fjson&download`"
title="Download as a generic JSON file"
>JSON</v-list-item>
<v-list-item
:href="`/api/project/${$route.params.project}/plan/?mime=text%2Fhtml&download`"
title="Download as an HTML formatted file"
>HTML</v-list-item>
<v-list-item
:href="`/api/project/${$route.params.project}/plan/?mime=application%2Fpdf&download`"
title="Download as a Portable Document File"
>PDF</v-list-item>
</v-list>
</v-menu>
<v-spacer></v-spacer>
<v-text-field
v-model="filter"
@@ -31,6 +64,47 @@
</v-list>
</v-menu>
<v-card class="mb-5" flat>
<v-card-title class="text-overline">
Comments
<template v-if="writeaccess">
<v-btn v-if="!editRemarks"
class="ml-3"
small
icon
title="Edit comments"
@click="editRemarks=true"
>
<v-icon small>mdi-square-edit-outline</v-icon>
</v-btn>
<v-btn v-else
class="ml-3"
small
icon
title="Save comments"
@click="saveRemarks"
>
<v-icon>mdi-content-save-edit-outline</v-icon>
</v-btn>
</template>
</v-card-title>
<v-card-text v-if="editRemarks">
<v-textarea
v-model="remarks"
class="markdown"
placeholder="Plan comments"
dense
auto-grow
rows="1"
></v-textarea>
</v-card-text>
<v-card-text v-else v-html="$options.filters.markdown(remarks || '*(nil)*')"></v-card-text>
</v-card>
<v-data-table
:headers="headers"
:items="items"
@@ -336,6 +410,8 @@ export default {
}
],
items: [],
remarks: null,
editRemarks: false,
filter: null,
num_lines: null,
activeItem: null,
@@ -409,6 +485,10 @@ export default {
} else {
this.queuedReload = true;
}
} else if (event.channel == "info" && event.payload.pid == this.$route.params.project) {
if (event.payload?.new?.key == "plan" && ("remarks" in (event.payload?.new?.value || {}))) {
this.remarks = event.payload?.new.value.remarks;
}
}
},
@@ -613,6 +693,27 @@ export default {
}
},
async saveRemarks () {
const url = `/project/${this.$route.params.project}/info/plan/remarks`;
let res;
if (this.remarks) {
const init = {
method: "PUT",
headers: { "Content-Type": "text/plain" },
body: this.remarks
};
await this.api([url, init, (e, r) => res = r]);
} else {
const init = {
method: "DELETE"
};
await this.api([url, init, (e, r) => res = r]);
}
if (res && res.ok) {
this.editRemarks = false;
}
},
async getPlannedLines () {
const url = `/project/${this.$route.params.project}/plan`;
@@ -635,6 +736,11 @@ export default {
}
},
async getPlannerRemarks () {
const url = `/project/${this.$route.params.project}/info/plan/remarks`;
this.remarks = await this.api([url]) || "";
},
async getSequences () {
const url = `/project/${this.$route.params.project}/sequence`;
this.sequences = await this.api([url]) || [];
@@ -652,6 +758,7 @@ export default {
async mounted () {
await this.getPlannerConfig();
this.getPlannedLines();
this.getPlannerRemarks();
}
}

View File

@@ -158,7 +158,9 @@ app.map({
},
'/project/:project/info/:path(*)': {
get: [ mw.info.get ],
// post: [ mw.info.post ],
post: [ mw.auth.access.write, mw.info.post ],
put: [ mw.auth.access.write, mw.info.put ],
delete: [ mw.auth.access.write, mw.info.delete ]
},
'/project/:project/meta/': {
put: [ mw.auth.access.write, mw.meta.put ],

View File

@@ -0,0 +1,42 @@
const { AsyncParser } = require('json2csv');
const { plan } = require('../../../../lib/db');
const json = async function (req, res, next) {
try {
const response = await plan.list(req.params.project, req.query);
if ("download" in req.query || "d" in req.query) {
const extension = "html";
const filename = `${req.params.project.toUpperCase()}-Plan.${extension}`;
res.set("Content-Disposition", `attachment; filename="${filename}"`);
}
const transforms = (i) => {
i.lon0 = Number(((i?.geometry?.coordinates||[])[0]||[])[0]).toFixed(6)*1;
i.lat0 = Number(((i?.geometry?.coordinates||[])[0]||[])[1]).toFixed(6)*1;
i.lon1 = Number(((i?.geometry?.coordinates||[])[1]||[])[0]).toFixed(6)*1;
i.lat1 = Number(((i?.geometry?.coordinates||[])[1]||[])[1]).toFixed(6)*1;
i.duration = i.duration?.hours*3600 + i.duration?.minutes*60 + i.duration?.seconds;
delete i.class;
delete i.geometry;
delete i.meta;
return i;
};
const csv = new AsyncParser({transforms}, {objectMode: true});
csv.processor.on('error', (err) => { throw err; });
csv.processor.on('end', () => {
res.end();
next();
});
res.status(200);
csv.processor.pipe(res);
response.forEach(row => csv.input.push(row));
csv.input.push(null);
} catch (err) {
next(err);
}
};
module.exports = json;

View File

@@ -0,0 +1,83 @@
// const { configuration } = require('../../../../lib/db');
const { plan, gis, info } = require('../../../../lib/db');
const leafletMap = require('../../../../lib/map');
const render = require('../../../../lib/render');
// FIXME Refactor when able
const defaultTemplatePath = require('path').resolve(__dirname, "../../../../../../../etc/default/templates/plan.html.njk");
const html = async function (req, res, next) {
try {
const planInfo = await info.get(req.params.project, "plan", req.query);
const lines = await plan.list(req.params.project, req.query);
const preplotGeoJSON = await gis.project.preplot.lines(req.params.project, {class: "V", ...req.query});
const linesGeoJSON = lines.filter(plan => plan.geometry).map(plan => {
const feature = {
type: "Feature",
geometry: plan.geometry,
properties: plan
};
delete feature.properties.geometry;
return feature;
});
// const template = (await configuration.get(req.params.project, "sse/templates/0/template")) || defaultTemplatePath;
const template = defaultTemplatePath;
const mapConfig = {
size: { width: 500, height: 500 },
layers: [
{
features: preplotGeoJSON,
options: {
style (feature) {
return {
opacity: feature.properties.ntba ? 0.2 : 0.5,
color: "gray",
weight: 1
}
}
}
},
{
features: linesGeoJSON,
options: {
style (feature) {
return {
color: "magenta",
weight: 2
}
}
}
}
]
}
const map = leafletMap(mapConfig);
const data = {
projectId: req.params.project,
info: planInfo,
lines,
map: await map.getImageData()
}
const response = await render(data, template);
if ("download" in req.query || "d" in req.query) {
const extension = "html";
const filename = `${req.params.project.toUpperCase()}-Plan.${extension}`;
res.set("Content-Disposition", `attachment; filename="${filename}"`);
}
res.status(200).send(response);
next();
} catch (err) {
if (err.message.startsWith("template")) {
next({message: err.message});
} else {
next(err);
}
}
};
module.exports = html;

View File

@@ -1,14 +1,20 @@
const json = require('./json');
const geojson = require('./geojson');
const html = require('./html');
const pdf = require('./pdf');
const csv = require('./csv');
module.exports = async function (req, res, next) {
try {
const handlers = {
"application/json": json,
"application/geo+json": geojson,
"text/csv": csv,
"text/html": html,
"application/pdf": pdf
};
const mimetype = req.accepts(Object.keys(handlers));
const mimetype = (handlers[req.query.mime] && req.query.mime) || req.accepts(Object.keys(handlers));
if (mimetype) {
res.set("Content-Type", mimetype);

View File

@@ -0,0 +1,97 @@
const fs = require('fs/promises');
const Path = require('path');
const crypto = require('crypto');
const { configuration } = require('../../../../lib/db');
const { plan, gis, info } = require('../../../../lib/db');
const leafletMap = require('../../../../lib/map');
const render = require('../../../../lib/render');
const { url2pdf } = require('../../../../lib/selenium');
// FIXME Refactor when able
const defaultTemplatePath = require('path').resolve(__dirname, "../../../../../../../etc/default/templates/plan.html.njk");
function tmpname (tmpdir="/dev/shm") {
return Path.join(tmpdir, crypto.randomBytes(16).toString('hex')+".tmp");
}
const pdf = async function (req, res, next) {
const fname = tmpname();
try {
const planInfo = await info.get(req.params.project, "plan", req.query);
const lines = await plan.list(req.params.project, req.query);
const preplotGeoJSON = await gis.project.preplot.lines(req.params.project, {class: "V", ...req.query});
const linesGeoJSON = lines.filter(plan => plan.geometry).map(plan => {
const feature = {
type: "Feature",
geometry: plan.geometry,
properties: plan
};
delete feature.properties.geometry;
return feature;
});
// const template = (await configuration.get(req.params.project, "sse/templates/0/template")) || defaultTemplatePath;
const template = defaultTemplatePath;
const mapConfig = {
size: { width: 500, height: 500 },
layers: [
{
features: preplotGeoJSON,
options: {
style (feature) {
return {
opacity: feature.properties.ntba ? 0.2 : 0.5,
color: "gray",
weight: 1
}
}
}
},
{
features: linesGeoJSON,
options: {
style (feature) {
return {
color: "magenta",
weight: 2
}
}
}
}
]
}
const map = leafletMap(mapConfig);
const data = {
projectId: req.params.project,
info: planInfo,
lines,
map: await map.getImageData()
}
const html = await render(data, template);
await fs.writeFile(fname, html);
const pdf = Buffer.from(await url2pdf("file://"+fname), "base64");
if ("download" in req.query || "d" in req.query) {
const extension = "pdf";
const filename = `${req.params.project.toUpperCase()}-Plan.${extension}`;
res.set("Content-Disposition", `attachment; filename="${filename}"`);
}
res.status(200).send(pdf);
next();
} catch (err) {
if (err.message.startsWith("template")) {
next({message: err.message});
} else {
next(err);
}
} finally {
await fs.unlink(fname);
}
};
module.exports = pdf;

View File

@@ -0,0 +1,22 @@
const L = require('leaflet-headless');
function leafletMap (cfg) {
const container = document.createElement("div");
container.style.setProperty("background-color", "transparent");
let map = L.map(container);
const bbox = cfg.bbox || L.geoJSON(cfg.layers.map(i => i.features)).getBounds();
map.fitBounds(bbox);
map.setSize(cfg.size?.width || 500, cfg.size?.height || 500);
for (let layer of cfg.layers) {
L.geoJSON(layer.features, layer.options).addTo(map);
}
map.fitBounds(bbox); // again
return map;
}
module.exports = leafletMap;

View File

@@ -29,6 +29,10 @@ function njkCollect (entries, key, collectables) {
return out;
}
function njkGet (entries, key) {
return entries.map(i => i[key]);
}
function njkUnique (entries) {
return entries.filter((element, index, array) => array.indexOf(element) === index);
}
@@ -37,14 +41,31 @@ function njkPadStart (str, len, chr) {
return String(str).padStart(len, chr);
}
function njkTimestamp (arg) {
function njkTimestamp (arg, precision = "seconds") {
let str;
if (arg) {
if (typeof arg.toISOString === "function") {
return arg.toISOString();
str = arg.toISOString();
} else if (arg == "now") {
str = (new Date()).toISOString();
}
const ts = new Date(arg);
if (!isNaN(ts)) {
return ts.toISOString();
str = ts.toISOString();
}
if (str) {
str = str.replace("T", " ");
if (precision.toLowerCase().startsWith("s")) {
str = str.substr(0, 19)+"Z";
} else if (precision.toLowerCase().startsWith("m")) {
str = str.substr(0, 16)+"Z";
} else if (precision.toLowerCase().startsWith("h")) {
str = str.substr(0, 13)+"Z";
} else if (precision.toLowerCase().startsWith("d")) {
str = str.substr(0, 10)+"Z";
}
return str;
}
}
return arg;
@@ -64,6 +85,7 @@ async function render (data, template) {
nenv.addFilter('find', njkFind);
nenv.addFilter('unique', njkUnique);
nenv.addFilter('collect', njkCollect);
nenv.addFilter('get', njkGet);
nenv.addFilter('padStart', njkPadStart);
nenv.addFilter('timestamp', njkTimestamp);
nenv.addFilter('markdown', njkMarkdown);

File diff suppressed because it is too large Load Diff

View File

@@ -13,7 +13,9 @@
"cookie-parser": "^1.4.5",
"express": "^4.17.1",
"express-jwt": "^6.0.0",
"json2csv": "^5.0.6",
"jsonwebtoken": "^8.5.1",
"leaflet-headless": "gitlab:aaltronav/contrib/leaflet-headless#devel",
"marked": "^2.0.3",
"netmask": "^1.0.6",
"node-fetch": "^2.6.1",