Files
dougal-software/lib/www/server/lib/db/event/import.js
D. Berge 09e4cd2467 Add CSV event import.
Closes #336
2025-08-14 13:33:30 +02:00

106 lines
2.8 KiB
JavaScript

const { DEBUG, ERROR } = require('DOUGAL_ROOT/debug')(__filename);
const { setSurvey, transaction } = require('../connection');
/** Remove a previous import from the database.
*
* ATTENTION!
*
* This will not just mark the events as deleted but actually
* remove them.
*/
async function bulk_unimport (projectId, filename, opts = {}) {
const client = opts.client ?? await setSurvey(projectId);
try {
const text = `
DELETE
FROM event_log
WHERE meta ? 'author'
AND meta->(meta->>'author')->>'filename' = $1;
`;
const values = [ filename ];
DEBUG("Removing all event data imported from filename '%s'", filename);
await client.query(text, values);
} catch (err) {
err.origin = __filename;
throw err;
} finally {
if (client !== opts.client) client.release();
}
return;
}
async function bulk_import (projectId, payload, opts = {}) {
const client = opts.client ?? await setSurvey(projectId);
try {
if (!payload.length) {
DEBUG("Called with no rows to be imported. Returning");
return [];
}
const filename = payload[0].meta[payload[0].meta.author].filename;
// Delete previous data from this file
await transaction.begin(client);
await bulk_unimport(projectId, filename, {client});
// Prepare arrays for each column
const tstamps = [];
const sequences = [];
const points = [];
const remarks = [];
const labels = [];
const metas = [];
for (const event of payload) {
tstamps.push(event.tstamp ? new Date(event.tstamp) : null);
sequences.push(Number.isInteger(event.sequence) ? event.sequence : null);
points.push(Number.isInteger(event.point) ? event.point : null);
remarks.push(event.remarks || '');
labels.push(Array.isArray(event.labels) && event.labels.length
? `{${event.labels.map(l => `"${l.replace(/"/g, '""')}"`).join(',')}}`
: '{}'
);
metas.push(event.meta ? JSON.stringify(event.meta) : '{}');
}
const text = `
INSERT INTO event_log (tstamp, sequence, point, remarks, labels, meta)
SELECT
UNNEST($1::TIMESTAMP[]) AS tstamp,
UNNEST($2::INTEGER[]) AS sequence,
UNNEST($3::INTEGER[]) AS point,
replace_placeholders(UNNEST($4::TEXT[]), UNNEST($1::TIMESTAMP[]), UNNEST($2::INTEGER[]), UNNEST($3::INTEGER[])) AS remarks,
UNNEST($5::TEXT[])::TEXT[] AS labels,
UNNEST($6::JSONB[]) AS meta
RETURNING id;
`;
const values = [ tstamps, sequences, points, remarks, labels, metas ];
DEBUG("Importing %d rows from filename '%s'", payload.length, filename);
const res = await client.query(text, values);
transaction.commit(client);
return res.rows.map(row => row.id);
} catch (err) {
err.origin = __filename;
throw err;
} finally {
if (client !== opts.client) client.release();
}
return;
}
module.exports = { import: bulk_import, unimport: bulk_unimport };