Refactor events middleware.

The reason for refactoring was to accommodate
Multiseis / client sequence exports, which will be
served by this endpoint via a specific Content-Type.

In the process, the cache has been fixed and redesigned.

Related to #12.
This commit is contained in:
D. Berge
2020-09-26 17:41:47 +02:00
parent ae8a25f240
commit b76f1f166b
7 changed files with 148 additions and 46 deletions

View File

@@ -105,7 +105,7 @@ app.map({
// },
//
'/project/:project/event/': {
get: [ mw.event.list ],
get: [ mw.event.cache.get, mw.event.list, mw.event.cache.save ],
post: [ mw.event.post ],
put: [ mw.event.put ],
delete: [ mw.event.delete ],

View File

@@ -0,0 +1,83 @@
const { listen } = require('../../../ws/db');
// Event responses take a long time as we are querying a view
// which is the union of other views and non-optimised tables,
// so to speed things up a bit for the user we cache the
// results here.
// We do this by indexing each result by its ETag value and
// storing the ID of the project it belongs to as well as the
// timestamp of the request. If the events for a project are
// modified in any way (addition/deletion/change) we immediately
// invalidate all cached responses for that project, else we
// delete them when they're older than maxAge (plus a delay).
// When the user sends a request with an ETag, we search for
// the ETag in our cache and return that, if present, instead
// of hitting the database.
const cache = {};
const maxAge = 90*60*1000; // 1.5 hours
setInterval(() => {
const now = Date.now();
for (const key in cache) {
const value = cache[key];
if ((now - value.tstamp) > maxAge) {
// console.log("CLEARING", key);
delete cache[key];
}
}
}, 5*60*1000); // Run every five minutes
listen(["event"], (data) => {
for (const key in cache) {
const value = cache[key];
if (value.pid == data.payload.pid) {
delete cache[key];
}
}
});
function get (req, res, next) {
try {
// console.log(cache);
const etag = req.get('if-none-match');
// console.log("ETag", etag);
if (etag && cache[etag]) {
// console.log("In cache");
if (cache[etag].headers) {
for (const header in cache[etag].headers) {
const value = cache[etag].headers[header];
if (header && value) {
res.set(header, value);
}
}
}
// 304s have no body
// https://tools.ietf.org/html/rfc7232#section-4.1
res.status(304).send();
next('route');
} else {
// console.log("Not in cache");
next();
}
} catch (err) {
next(err);
}
}
function save (req, res, next) {
const etag = res.getHeader("etag");
if (etag) {
cache[etag] = {
headers: {
"Content-Type": res.getHeader("content-type") || "application/json"
},
pid: req.params.project,
tstamp: Date.now()
}
// console.log("CACHE", cache);
}
next();
}
module.exports = { get, save };

View File

@@ -4,5 +4,6 @@ module.exports = {
get: require('./get'),
post: require('./post'),
put: require('./put'),
delete: require('./delete')
delete: require('./delete'),
cache: require('./cache')
}

View File

@@ -1,44 +0,0 @@
const { event } = require('../../../lib/db');
const { listen } = require('../../../ws/db');
const cache = {};
const maxAge = 1.5*60*1000;
setInterval(() => {
const now = Date.now();
for (const key in cache) {
const value = cache[key];
if ((now - value.tstamp) > maxAge) {
// console.log("CLEARING", key);
delete cache[key];
}
}
}, 90*60*1000);
listen(["event"], (data) => {
for (const key in cache) {
const value = cache[key];
if (value.pid == data.payload.pid) {
delete cache[key];
}
}
});
module.exports = async function (req, res, next) {
try {
// console.log(cache);
const etag = req.get('if-none-match');
if (etag && cache[etag]) {
res.status(304).send(cache[etag].response);
} else {
const response = await event.list(req.params.project, req.query);
res.status(200).send(response);
cache[res.getHeaders()["etag"]] = {response, pid: req.params.project, tstamp: Date.now()}
}
next();
} catch (err) {
next(err);
}
};

View File

@@ -0,0 +1,23 @@
const { event } = require('../../../../lib/db');
const geojson = async function (req, res, next) {
try {
const events = await event.list(req.params.project, req.query);
const response = events.filter(event => event.geometry).map(event => {
const feature = {
type: "Feature",
geometry: event.geometry,
properties: event
};
delete feature.properties.geometry;
return feature;
});
res.status(200).send(response);
next();
} catch (err) {
next(err);
}
}
module.exports = geojson;

View File

@@ -0,0 +1,25 @@
const json = require('./json');
const geojson = require('./geojson');
// const seis = require('./seis');
module.exports = async function (req, res, next) {
try {
const handlers = {
"application/json": json,
"application/geo+json": geojson,
// "application/vnd.seis+json": seis
};
const mimetype = req.accepts(Object.keys(handlers));
if (mimetype) {
res.set("Content-Type", mimetype);
await handlers[mimetype](req, res, next);
} else {
res.status(406).send();
next();
}
} catch (err) {
next(err);
}
}

View File

@@ -0,0 +1,14 @@
const { event } = require('../../../../lib/db');
const json = async function (req, res, next) {
try {
const response = await event.list(req.params.project, req.query);
res.status(200).send(response);
next();
} catch (err) {
next(err);
}
};
module.exports = json;