Compare commits

...

339 Commits

Author SHA1 Message Date
D. Berge
673c60a359 Add error handling 2025-08-22 16:40:06 +02:00
D. Berge
99e425270c getGroup() returns all comparisons.
Just like saveGroup() saves all.
2025-08-22 16:39:22 +02:00
D. Berge
63633715e2 Guard against underpopulated groups 2025-08-22 16:16:10 +02:00
D. Berge
8afac5c150 Fix indentation 2025-08-22 16:01:20 +02:00
D. Berge
11168def68 Fix typos 2025-08-22 16:01:20 +02:00
D. Berge
0f477b8e65 Replace tilt icons 2025-08-22 16:01:20 +02:00
D. Berge
03b00a4ea7 Remove dead code 2025-08-22 16:01:20 +02:00
D. Berge
c5faa53bee Add more view controls to group map 2025-08-22 16:01:20 +02:00
D. Berge
46b2512530 Add more view controls to map 2025-08-22 16:01:20 +02:00
D. Berge
db4c9a0235 Add script to update comparison groups.
This should be run at regular intervals (via cron or so) to keep
the comparisons up to date.

It is not necessarily a good idea to run this as part of the
runner.sh script as it will delay other tasks trying to
update the active project every time.

Probably OK to put it on a cronjbo every 2‒24 hours. If two
copies are running concurrently that should not break anything
but it will increase the server load.
2025-08-22 16:01:20 +02:00
D. Berge
1a12ea13ed Return project timestamps 2025-08-22 16:01:20 +02:00
D. Berge
81717c37f1 Add option to return project timestamp 2025-08-22 16:01:20 +02:00
D. Berge
6377e8854c Updated wanted db schema 2025-08-22 16:01:20 +02:00
D. Berge
d3446d03bd Add database upgrade file 44 2025-08-22 16:01:20 +02:00
D. Berge
a52f7811f2 Clean up dead code 2025-08-22 16:01:20 +02:00
D. Berge
ef2bd4888e Update the required schema version.
This is necessary for the comparisons code to work.
2025-08-22 16:01:20 +02:00
D. Berge
8801442c92 Don't show monitor lines by default 2025-08-22 16:01:20 +02:00
D. Berge
30f65dbeaa Make loading indicator spin when 0% 2025-08-22 16:01:20 +02:00
D. Berge
c2f53ac150 Remove unneded dependency 2025-08-22 16:01:20 +02:00
D. Berge
4328fc4d2a Fix typo 2025-08-22 16:01:20 +02:00
D. Berge
2c2eb8fceb Add group map view 2025-08-22 16:01:20 +02:00
D. Berge
767c2f2cb1 Add support for type 4 decoding 2025-08-22 16:01:20 +02:00
D. Berge
57a73f7d1c Fix component paths 2025-08-22 16:01:20 +02:00
D. Berge
9f299056d8 Move components to subdirectory 2025-08-22 16:01:20 +02:00
D. Berge
5d3c59867c Return type 4 sequence data 2025-08-22 16:01:20 +02:00
D. Berge
76b8355ede Add encoding type 4 to bundle 2025-08-22 16:01:20 +02:00
D. Berge
76b55f514d Link from group summary to individual projects 2025-08-22 16:01:20 +02:00
D. Berge
4e1d3209df Don't request summaries in ProjectList.
Those will be populated directly by Vuex.
2025-08-22 16:01:20 +02:00
D. Berge
f21ff7ee38 Try to improve responsiveness when refreshing project list 2025-08-22 16:01:20 +02:00
D. Berge
2446b42785 Expand groups router definition 2025-08-22 16:01:20 +02:00
D. Berge
196e772004 Make event handler more specific 2025-08-22 16:01:20 +02:00
D. Berge
674d818fee Rework comparison components.
More focused on error ellipses.
2025-08-22 16:01:20 +02:00
D. Berge
5527576679 Refresh comparisons when notified of changes 2025-08-22 16:01:20 +02:00
D. Berge
fe7c016dea Add control to reset comparisons view 2025-08-22 16:01:20 +02:00
D. Berge
b7543aa6c4 Add overlays when loading / data error 2025-08-22 16:01:20 +02:00
D. Berge
b48a060dc0 Don't cache comparisons in the API 2025-08-22 16:01:20 +02:00
D. Berge
c0f9a2de5a Don't save comparison samples 2025-08-22 16:01:20 +02:00
D. Berge
32a9c7a5f2 Add comparisons channel to notifications 2025-08-22 16:01:20 +02:00
D. Berge
f1f74080f6 Add database upgrade file 43 2025-08-22 16:01:20 +02:00
D. Berge
c5eb8e45f1 Add database upgrade file 42 2025-08-22 16:01:20 +02:00
D. Berge
caab968fd6 Add database upgrade file 41 2025-08-22 16:01:20 +02:00
D. Berge
5f28d1be7b Don't overwrite existing comparisons unless forced.
opts.overwrite = true will cause existing comparisons to be
recomputed.
2025-08-22 16:01:20 +02:00
D. Berge
22c9537889 Fix non-existent method 2025-08-22 16:01:20 +02:00
D. Berge
e95aaa7de7 Add link to group comparison from project list 2025-08-22 16:01:20 +02:00
D. Berge
4f44f5a10c Add frontend route for 4D comparisons 2025-08-22 16:01:20 +02:00
D. Berge
0ba467d34c Add 4D comparisons list Vue component 2025-08-22 16:01:20 +02:00
D. Berge
2b5b302e54 Add 4D comparisons Vue component 2025-08-22 16:01:20 +02:00
D. Berge
28938e27a9 Add utilities for transforming duration objects 2025-08-22 16:01:20 +02:00
D. Berge
97f96fdc1e Add Vue components for 4D comparisons 2025-08-22 16:01:20 +02:00
D. Berge
1e3ce35f76 Add set operations utilities 2025-08-22 16:01:20 +02:00
D. Berge
619a886781 Add comparison API endpoints 2025-08-22 16:01:20 +02:00
D. Berge
c054e63325 Add two new bundle types.
Of which 0xa is not actually used and 0xc is used for geometric
comparison data ([ line, point, δi, δj ]).
2025-08-22 16:01:20 +02:00
D. Berge
fd94b3b6f4 Add comparison functions to server/lib 2025-08-22 16:01:20 +02:00
D. Berge
7b67b4afc9 Fix bug trying to get project info for undefined 2025-08-22 16:01:20 +02:00
D. Berge
7c52ada922 Add project group info to Vuex 2025-08-22 16:01:20 +02:00
D. Berge
9072bbe389 Add iterators 2025-08-22 16:01:20 +02:00
D. Berge
6639b7110b Add sequence navigation controls to log.
Closes #135
2025-08-22 15:57:49 +02:00
D. Berge
be6652b539 Name Shotlog route 2025-08-22 15:56:59 +02:00
D. Berge
bf054d3902 Persist event log user preferences 2025-08-22 15:56:12 +02:00
D. Berge
2734870871 Fix errors when loading graphs.
Errors due to the parent element having zero width / height or
rendering too early.
2025-08-22 15:54:17 +02:00
D. Berge
52f49e6799 Fix log entries pagination.
Fixes #340
2025-08-22 12:31:19 +02:00
D. Berge
30150a8728 Kill runner if it hangs around for too long.
This gives the import processes a chance to run.
2025-08-21 15:33:05 +02:00
D. Berge
ef8466992c Add automatic event icon to log.
So that the user can visually see which events were created by
Dougal (not including QC events).
2025-08-18 11:22:58 +02:00
D. Berge
8e4e70cbdc Add server status info to help dialogue 2025-08-17 13:19:51 +02:00
D. Berge
4dadffbbe7 Refactor Selenium to make it more robust.
It should stop runaway Firefox processes.
2025-08-17 13:18:04 +02:00
D. Berge
24dcebd0d9 Remove logging statements 2025-08-17 13:17:22 +02:00
D. Berge
12a762f44f Fix typo in @dougal/binary 2025-08-16 14:55:53 +02:00
D. Berge
ebf13abc28 Merge branch '337-fix-event-queue' into 'devel'
Resolve "Automatic event detection fault: soft start on every shot during line"

Closes #337

See merge request wgp/dougal/software!61
2025-08-16 12:55:15 +00:00
D. Berge
b3552db02f Add error checking to ETag logic 2025-08-16 11:36:43 +02:00
D. Berge
cd882c0611 Add debug info to soft start detection 2025-08-16 11:36:43 +02:00
D. Berge
6fc9c020a4 Fix off-by-one error in LGSP detection 2025-08-16 11:36:43 +02:00
D. Berge
75284322f1 Modify full volume detection on Smartsource
The Smartsource firmware seems to have changed rendering the old
test invalid.
2025-08-16 11:36:43 +02:00
D. Berge
e849c47f01 Remove old queue implementation 2025-08-16 11:36:43 +02:00
D. Berge
387d20a4f0 Rewrite automatic event handling system 2025-08-16 11:36:43 +02:00
D. Berge
2fab06d340 Don't send timestamp when patching seq+point events.
Closes #339.
2025-08-16 11:35:35 +02:00
D. Berge
7d2fb5558a Hide switches to enable additional graphs.
All violin plots as well as position scatter plots and histograms
are shown by default. This is due to #338.

For some reason, having them enabled from the get go does not
cause any problems.
2025-08-15 18:09:51 +02:00
D. Berge
764e2cfb23 Rename endpoint 2025-08-14 13:34:36 +02:00
D. Berge
bf1af1f76c Make it explicit that :id is numeric 2025-08-14 13:34:27 +02:00
D. Berge
09e4cd2467 Add CSV event import.
Closes #336
2025-08-14 13:33:30 +02:00
D. Berge
2009d73a2b Fix action registration and unregistration 2025-08-13 17:03:00 +02:00
D. Berge
083ee812de Use cookies for authentication as a last resort.
Fixes #335
2025-08-13 16:54:38 +02:00
D. Berge
84510e8dc9 Add proper logging 2025-08-13 15:42:49 +02:00
D. Berge
7205ec42a8 Fix handler registration.
The way it was being done meant that unregisterHandlers would not
have worked.
2025-08-13 15:42:49 +02:00
D. Berge
73d85ef81f Fix scheduling of token refresh via websocket 2025-08-13 12:58:36 +02:00
D. Berge
6c4dc35461 Fix bad status on preplot lines tab
If there were no raw / final sequences on a line, planned sequences
would not show either.
2025-08-13 12:45:50 +02:00
D. Berge
a5ebff077d Fix authentication middleware erroring on IPv6 2025-08-13 11:50:20 +02:00
D. Berge
2a894692ce Throttle snack notifications 2025-08-12 00:22:09 +02:00
D. Berge
25690eeb52 Fix showSnack in main.js 2025-08-11 23:48:08 +02:00
D. Berge
3f9776b61d Let the user know when we're getting gateway errors 2025-08-11 23:47:25 +02:00
D. Berge
8c81daefc0 Move the two /configuration endpoints next to each other 2025-08-11 22:20:46 +02:00
D. Berge
c173610e87 Simplify middleware 2025-08-11 22:19:51 +02:00
D. Berge
301e5c0731 Set headers only on 304 2025-08-11 22:06:51 +02:00
D. Berge
48d9f45fe0 Clean up debug messages 2025-08-11 22:06:20 +02:00
D. Berge
cd23a78592 Merge branch '190-refactor-map' into 'devel'
Resolve "Refactor map"

Closes #190, #322, #323, #324, #325, #326, and #321

See merge request wgp/dougal/software!25
2025-08-11 13:01:00 +00:00
D. Berge
e368183bf0 Show release notes for previous versions too 2025-08-11 14:59:22 +02:00
D. Berge
02477b071b Compress across the board.
It's still subject to the compression module's filters, but now
we try to compress every response in principle.
2025-08-11 13:57:11 +02:00
D. Berge
6651868ea7 Enable compression for vessel track responses 2025-08-11 13:40:53 +02:00
D. Berge
c0b52a8245 Be more aggressive about what gets compressed 2025-08-11 12:42:48 +02:00
D. Berge
90ce6f063e Remove dead code 2025-08-11 02:31:43 +02:00
D. Berge
b2fa0c3d40 Flatten vesselTrackConfig for better reactivity 2025-08-11 02:31:12 +02:00
D. Berge
83ecaad4fa Change vessel colour 2025-08-11 01:57:40 +02:00
D. Berge
1c5fd2e34d Calculate properly first / last timestamps of vessel tracks 2025-08-11 01:56:46 +02:00
D. Berge
aabcc74891 Add compression to some endpoints.
Consideration will be given to adding (conditional) compression
to all endpoints.
2025-08-11 01:53:50 +02:00
D. Berge
2a7b51b995 Squash another cookie 2025-08-11 01:52:04 +02:00
D. Berge
5d19ca7ca7 Add authentication to vessel track request 2025-08-10 22:03:25 +02:00
D. Berge
910195fc0f Comment out "Map settings" control on map.
Not sure it will actually be used, after all.
2025-08-10 21:53:55 +02:00
D. Berge
6e5570aa7c Add missing require 2025-08-10 21:53:04 +02:00
D. Berge
595c20f504 Add vessel position to map.
Updates via websocket using the `realtime` channel notification
message.
2025-08-10 21:52:02 +02:00
D. Berge
40d0038d80 Add vessel track layer to map.
Track length may be changed by clicking on the appropriate icon.
2025-08-10 21:47:43 +02:00
D. Berge
acdf118a67 Add new /vessel/track endpoints.
This is a variation on /navdata but returns data more suitable
for plotting vessel tracks on the map.
2025-08-10 21:39:35 +02:00
D. Berge
b9e0975d3d Add clone routine to project DB lib (WIP).
This relates to #333.
2025-08-10 21:37:12 +02:00
D. Berge
39d9c9d748 Fix GeoJSON returned by /navdata endpoint 2025-08-10 21:36:37 +02:00
D. Berge
b8b25dcd62 Update IP getter script to return LAN address.
get-ip.sh internet: returns the first IP address found that has
internet access.

get-ip.sh local (or no argument): returns the list of non-loopback
IPs minus the one that has internet access.

This means that update-dns.sh now sends the first IP address that
does *not* have internet access.
2025-08-09 22:27:23 +02:00
D. Berge
db97382758 Add scripts to automatically update the LAN DNS records.
./sbin/update-dns.sh may be called at regular intervals (one hour
or so) via crontab.

It will automatically detect:
- its local host name (*.lan.dougal.aaltronav.eu); and
- which IP has internet access, if any.

Armed with that information and with the dynamic DNS API password
stored in DYNDNS_PASSWD in ~/.dougalrc, it will update the relevant
DNS record.

For this to work, the first `lan.dougal` hostname in the Nginx
configuration must be the one that is set up for dynamic update.
Other `lan.dougal` hostnames should be CNAME records pointing to
the first one.
2025-08-09 18:37:15 +02:00
D. Berge
ae8e5d4ef6 Do not use cookies for backend authentication 2025-08-09 12:43:17 +02:00
D. Berge
2c1a24e4a5 Do not store JWT in document.cookie 2025-08-09 12:14:17 +02:00
D. Berge
0b83187372 Provide authorisation details to Deck.gl layers.
Those layers that call API endpoints directly no longer need to
rely on cookies as they use the JWT token directly via the
`Authorization` header.
2025-08-09 12:12:24 +02:00
D. Berge
3dd51c82ea Adapt map links to new format 2025-08-08 18:54:25 +02:00
D. Berge
17e6564e70 Implement map crosshairs.
These are coordinates that are supplied in the fragment part of the
URL. When available, a marker is shown at the given positions.
Labels may also be given and are also shown.
2025-08-08 18:51:54 +02:00
D. Berge
3a769e7fd0 Adapt to new map implementation.
Note: if we implement a fallback to the old Leaflet code, the new
hash format will need to be accepted in Leaflet too.
2025-08-08 16:10:17 +02:00
D. Berge
7dde0a15c6 Fix handling of view state and layers in URL hash 2025-08-08 16:09:32 +02:00
D. Berge
2872af8d60 Refresh sequence line data on every render 2025-08-08 13:48:49 +02:00
D. Berge
4e581d5664 Add final-raw heatmap 2025-08-08 13:47:30 +02:00
D. Berge
a188e9a099 Tweak colour scales 2025-08-08 13:45:54 +02:00
D. Berge
cd6ad92d5c Use the same names in the user interface as in the code 2025-08-08 13:44:42 +02:00
D. Berge
08dfe7ef0a Add notification handlers to Map.
They reload any sequence data on notification of changes.
2025-08-08 12:45:15 +02:00
D. Berge
6a5238496e Add possibility to refresh points map while loading binary data 2025-08-08 12:44:21 +02:00
D. Berge
bc237cb685 Add final data points layer to map 2025-08-08 12:43:27 +02:00
D. Berge
4957142fb1 Refactor sequenceBinaryData.
It is no longer a computed property but actual data. It gets
recalculated on demand via getSequenceBinaryData().
2025-08-08 12:42:38 +02:00
D. Berge
5a19c81ed1 Unregister notification handlers.
When leaving the Project component, all its notification handlers
will be unregistered, otherwise we end up with a memory leak.
2025-08-08 12:22:56 +02:00
D. Berge
b583dc6c02 Support unregistering notification handlers 2025-08-08 12:20:58 +02:00
D. Berge
134e3bce4e Add client-side support for type 3 bundles (final data) 2025-08-08 12:20:04 +02:00
D. Berge
f5ad9d7182 Use sequenceBinaryData for raw data points layer.
Saves us from ending up with an extra copy of the data.
2025-08-08 12:18:07 +02:00
D. Berge
07874ffe0b Filter missing values out of JSON data for binary packing 2025-08-08 12:15:39 +02:00
D. Berge
695add5da6 Increase the resolution of position errors in bundle.
Note: this does not actually matter as of this commit as we are
storing those values as Float32 but it will become relevant when
we start packing them as Int16.
2025-08-08 12:15:05 +02:00
D. Berge
6a94287cba Add type 3 binary bundle.
Consisting of final positions + errors.
2025-08-08 11:24:16 +02:00
D. Berge
c2ec2970f0 Remove dead code 2025-08-08 11:20:03 +02:00
D. Berge
95d6d0054b Adapt GIS endpoint to new preplots tables structure 2025-08-07 22:02:04 +02:00
D. Berge
5070be5ff3 Handle event changes 2025-08-07 20:18:18 +02:00
D. Berge
d5e77bc946 Move API action option to the correct argument 2025-08-07 19:20:27 +02:00
D. Berge
f6faad17db Fix Python's idiotic syntax 2025-08-07 17:17:43 +02:00
D. Berge
94cdf83b13 Change access permissions to files endpoints 2025-08-07 16:23:55 +02:00
D. Berge
6a788ae28b Add logging statements 2025-08-07 16:23:14 +02:00
D. Berge
544117eec3 Fix retrieval of preplot previews 2025-08-07 16:20:00 +02:00
D. Berge
e5679ec14b Move API action option to the correct argument 2025-08-07 16:19:13 +02:00
D. Berge
a1c174994c Remove debugging statements 2025-08-07 13:03:43 +02:00
D. Berge
2db8cc3116 Tweak wording 2025-08-07 12:38:04 +02:00
D. Berge
99b1a841c5 Let the user know when using a remote frontend.
Note: this relies on the gateway Nginx server configurations
including an X-Dougal-Server header, as follows:

add_header X-Dougal-Server "remote-frontend" always;
2025-08-07 12:30:28 +02:00
D. Berge
6629e25644 Do not error if version history is undefined 2025-08-07 11:03:07 +02:00
D. Berge
7f5f64acb1 Check for lineNameInfo when importing P1/11 2025-08-07 11:00:42 +02:00
D. Berge
8f87df1e2f Comment out debug output 2025-08-07 10:52:13 +02:00
D. Berge
8399782409 Set response auth headers conditionally 2025-08-07 10:42:37 +02:00
D. Berge
9c86018653 Auto-refresh materialised view if necessary 2025-08-07 10:42:08 +02:00
D. Berge
a15c97078b Fix typo in access middleware 2025-08-07 10:41:29 +02:00
D. Berge
d769ec48dd Request fresh responses when refreshing data from the server 2025-08-07 10:40:23 +02:00
D. Berge
fe421f545c Add data integrity check 2025-08-06 22:54:01 +02:00
D. Berge
caa8fec8cc Log warning 2025-08-06 22:52:06 +02:00
D. Berge
49fc260ace Clear cookie on logout 2025-08-06 22:51:44 +02:00
D. Berge
b7038f542c Fix storage of JWT in localStorage 2025-08-06 22:51:20 +02:00
D. Berge
40ad0e7650 Fix database upgrades 38, 39, 40.
Ensure the changes are applied to the public schema.
2025-08-06 22:50:20 +02:00
D. Berge
9006deb8be Change error notifications 2025-08-06 12:01:03 +02:00
D. Berge
6e19b8e18f Do not fail if old / new missing from notifications.
The server will actually remove those when the notification
would exceed a certain size, so it's expected that those might
be null.
2025-08-06 11:59:52 +02:00
D. Berge
3d474ad8f8 Update package-lock.json 2025-08-06 11:31:51 +02:00
D. Berge
821af18f29 Removed planned line points layer control.
Not necessary as we already have the preplots layer
2025-08-06 11:25:44 +02:00
D. Berge
9cf15ce9dd Edit code comments 2025-08-06 11:24:39 +02:00
D. Berge
78838cbc41 Implement planned lines layer 2025-08-06 11:20:40 +02:00
D. Berge
8855da743b Handle refresh on data change for some layers.
Binary layers not included yet.
2025-08-06 11:17:37 +02:00
D. Berge
c67a60a7e6 Fix labels handling in events map layer 2025-08-06 11:14:20 +02:00
D. Berge
81e06930f0 Silence console error 2025-08-06 11:05:15 +02:00
D. Berge
0263eab6d1 Add extra mutations to plan Vuex module.
They're not actually needed though. 🙄
2025-08-06 11:03:11 +02:00
D. Berge
931219850e Fix wrong freezing of Vuex data.
It's the sequence items themselves that benefit from freezing,
not the sequence array itself.
2025-08-06 11:01:57 +02:00
D. Berge
12369d5419 Support Markdown-formatted snack messages 2025-08-06 11:01:10 +02:00
D. Berge
447003c3b5 Implement pub-sub handler system for ws notifications. 2025-08-06 10:59:17 +02:00
D. Berge
be7157b62c Downgrade gracefully if window.caches is not available.
This should not happen in production, as the Cache API is
widely implemented as of the date of this commit, but it
will not be available if the user is not in a secure
context. That should only happen during testing.
2025-08-06 10:45:05 +02:00
D. Berge
8ef56f9946 Pass a clone of Response to API callback 2025-08-06 10:42:34 +02:00
D. Berge
f2df16fe55 Fix getting project configuration data 2025-08-06 10:41:42 +02:00
D. Berge
96db6b1376 Add a more helpful message if cause of failure is known 2025-08-06 10:41:08 +02:00
D. Berge
36d86c176a Only send websocket notifications to authenticated users 2025-08-06 10:40:16 +02:00
D. Berge
9c38af4bc0 Improve handling of JWT over websocket.
When a valid `token` message is received from a client, the
socket server will automatically push refreshed tokens at
about half lifetime of the received JWT.

If an invalid token is received the connection is closed.

See #304.
2025-08-06 10:26:53 +02:00
D. Berge
be5c6f1fa3 Fix user authentication.
* Use X-JWT header for sending authentication info
  both from server to client and from client to server.
* Send token in body of login response.
* Also use Set-Cookie: JWT=… so that calls that are
  not issued directly by Dougal (e.g. Deck.gl layers
  with a URL `data` property) work without having to
  jump through hoops.

Closes #321
2025-08-06 10:21:37 +02:00
D. Berge
17b9d60715 Make sourceLayer optional in getPickingInfo 2025-08-04 18:47:15 +02:00
D. Berge
e2dd563054 Save changed to package-lock.json 2025-08-03 13:50:59 +02:00
D. Berge
67dcc2922b Fix binary bundling of delta spread 2025-08-03 13:49:55 +02:00
D. Berge
11e84f47eb Fix refresh to remove only data for current project 2025-08-03 13:48:51 +02:00
D. Berge
1066a03b25 Leave layer menu open when still focused 2025-08-03 13:48:04 +02:00
D. Berge
08440e3e21 Add tooltip to heatmap control 2025-08-03 13:47:48 +02:00
D. Berge
d46eb3b455 Add gun misfire options to menu 2025-08-03 13:47:07 +02:00
D. Berge
864b430320 Fix no fire / autofire values (they're not boolean!) 2025-08-03 13:45:56 +02:00
D. Berge
61cbefd0e9 Tweak heatmap parameters 2025-08-03 13:45:31 +02:00
D. Berge
29c484affa Add misfire options to heatmap 2025-08-03 13:45:07 +02:00
D. Berge
0806b80445 Remove dead code 2025-08-03 13:43:53 +02:00
D. Berge
b5a3a22892 Add full screen control to map 2025-08-03 11:57:59 +02:00
D. Berge
c13aa23e2f Add heatmaps to map (various data facets) 2025-08-03 11:57:12 +02:00
D. Berge
3366377ab0 Use preplot point layers on map 2025-08-03 11:56:05 +02:00
D. Berge
59a90e352c Add tooltips for preplot layers 2025-08-03 11:53:55 +02:00
D. Berge
0f207f8c2d Add heatmap layer 2025-08-03 11:53:24 +02:00
D. Berge
c97eaa64f5 Add preplot point layers (sail / source line) 2025-08-03 11:52:48 +02:00
D. Berge
5b82f8540d Use DougalBinaryLoader for sequence points layers 2025-08-03 11:51:47 +02:00
D. Berge
d977d9c40b Add support for udv values 0 and 1 to DougalSequenceLayer.
udv = 0 → sail line points
udv = 1 → source line points
2025-08-03 11:44:42 +02:00
D. Berge
d16fb41f24 Add DougalBinaryLoader Deck.gl loader 2025-08-03 11:39:03 +02:00
D. Berge
c376896ea6 Also serve preplot source/sail points as binary.
This commit adds the ability to pack preplot points in Dougal
binary format. Sail line points take udv=0 and source line points
take udv=1 – udv=2 remains sequence data.

Endpoints for retrieving the data in JSON, GeoJSON and binary
formats have also been added. Data may be retrieved as a single
line or for a whole project.
2025-08-03 11:17:31 +02:00
D. Berge
2bcdee03d5 Further refactor Map component.
Map.sequencesBinaryData is now a single object instead of an
array of objects.

DougalSequenceLayer has been greatly simplified. It now
inherits from ScatterplotLayer rather than CompositeLayer.

DougalEventsLayer added. It shows either a ScatteplotLayer
or a ColumnsLayer depending on zoom level.
2025-08-02 16:00:54 +02:00
D. Berge
44113c89c0 Further refactor Map component.
Layer and tooltip definitions have been split out into different
files as mixins.

Uses Dougal binary bundles.
2025-08-01 17:18:16 +02:00
D. Berge
17c6d9d1e5 Add DougalSequenceLayer 2025-08-01 17:16:36 +02:00
D. Berge
06cc16721f Remove SequenceDataLayer 2025-08-01 17:15:27 +02:00
D. Berge
af7485370c Limit number of simultaneous requests to the API 2025-08-01 17:11:34 +02:00
D. Berge
ad013ea642 Add additional formats for sequence list endpoint.
The original and default "Accept: application/json" will return
a sequence summary.

"Accept: application/geo+json" will return a GeoJSON of the
entire project.

"Accept: application/vnd.aaltronav.dougal+octet-stream" will
return the entire project in Dougal's binary format.
2025-08-01 17:07:37 +02:00
D. Berge
48d5986415 Change handling of sequence parameter.
Allow `null` to be used in addition to `0` in
db.sequence.get() to return all sequences.
2025-08-01 17:05:38 +02:00
D. Berge
471f4e8e64 Add synonyms to db.sequence.get() options 2025-08-01 17:05:05 +02:00
D. Berge
4be99370e6 Change the MIME type of binary responses 2025-08-01 16:50:32 +02:00
D. Berge
e464f5f887 Refactor code handling binary sequence requests.
Instead of the user giving the recipe for the payload, it now
only handles predefined payload configurations. Those are
denoted by the `type` query parameter. The only valid value
as of this commit is `type=2`.

Look at lib/binary/bundle.js for the definition of a type 2
bundle.
2025-08-01 16:47:50 +02:00
D. Berge
cc8d790ad8 Remove dead code (replaced by @dougal/binary) 2025-08-01 16:43:22 +02:00
D. Berge
32c6e2c79f Add @dougal/concurrency module 2025-08-01 11:22:30 +02:00
D. Berge
ba7221ae10 Implement getData*() functions in DougalBinaryBundle 2025-07-30 17:41:17 +02:00
D. Berge
1cb9d4b1e2 Add @dougal/binary module.
It encodes / decodes sequence / preplot data using an efficient
binary format for sending large amounts of data across the wire
and for (relatively) memory efficient client-side use.
2025-07-30 17:37:00 +02:00
D. Berge
2a0025cdbf Try to fix FSP / LSP times for the third time 2025-07-29 13:31:17 +02:00
D. Berge
f768f31b62 Aesthetic changes to map layers control 2025-07-28 12:09:02 +02:00
D. Berge
9f91b1317f Add map settings control (mock up).
This is not yet implemented but left visible for demo purposes.

Intended to configure things such as vessel track length, possibly
whether the latest track or the track within the current prospect
is shown, etc.
2025-07-28 12:06:56 +02:00
D. Berge
3b69a15703 Add manual refresh control to map.
It may or may not be permanenet, once tasks #322, #323, #324, #325
are implemented.

Closes #326
2025-07-28 12:05:10 +02:00
D. Berge
cd3bd8ab79 Fix FSP/LSP times (again) 2025-07-28 12:04:27 +02:00
D. Berge
df193a99cd Add sleep() method to main.js.
Useful when the UI needs to "pause" for UX reasons. Can be called
from any component with `this.$root.sleep(ms)`.
2025-07-28 12:02:49 +02:00
D. Berge
580e94a591 Await on binary data download requests 2025-07-28 11:09:55 +02:00
D. Berge
3413641c10 Fix first and last shotpoint times in map tooltip 2025-07-28 11:01:38 +02:00
D. Berge
f092aff015 Fix navdata URL 2025-07-28 11:01:08 +02:00
D. Berge
94c6406ea2 Add missing dependency 2025-07-28 10:37:58 +02:00
D. Berge
244d84a3bd Add more layers to Map component.
This commits adds back the vessel track as well as other layers,
gives the option to load both point and line versions of the plan,
raw, and final sequences, and adds heatmaps showing positioning
error of raw and final data relative to preplots.

The implementation in this commit relies on translating the binary
sequence data into JSON (for the heatmaps) which is inefficient
but adequate as an initial proof of concept.
2025-07-28 10:14:41 +02:00
D. Berge
89c565a0f5 Protect against out of bounds array condition 2025-07-28 10:10:05 +02:00
D. Berge
31ac8d3c01 Add toJSON() function to binary decoder 2025-07-28 10:07:49 +02:00
D. Berge
3bb78040b0 Set correct Content-Type 2025-07-28 10:06:21 +02:00
D. Berge
1433bda14e Make the iterator more robust against failures.
If a sequence fails to be fetched, it will keep iterating rather
than throwing an error or returning invalid data.
2025-07-27 11:16:47 +02:00
D. Berge
c0ae033de8 Use Cache API to cache binary sequence data.
This speeds up loading when the user moves away from and then
revisits the map tab.

NOTE: As of this commit, there is no way to refresh or invalidate
the cache.
2025-07-27 11:15:09 +02:00
D. Berge
05eed7ef26 Comment out Norwegian nautical charts layer.
It has apparently become inaccessible in recent times.
2025-07-27 11:07:08 +02:00
D. Berge
5d2ca513a6 Add check for WebGL support.
The intention is to fall back to the legacy map if WebGL is not
supported on a client.
2025-07-27 11:06:12 +02:00
D. Berge
b9c8069828 Add an error overlay.
Assinging to `error` on the Map componenent will cause an overlay
with an error <v-alert/> to be shown.
2025-07-27 11:03:26 +02:00
D. Berge
b80b8ffb52 Add cache option to api Vuex action.
It allows the caching and retrieval of requests using Cache API.
2025-07-27 11:01:34 +02:00
D. Berge
c2eb82ffe7 Modify view on map link 2025-07-26 19:14:29 +02:00
D. Berge
e517e2f771 Refactor map component.
Uses Deck.gl rather than Leaflet.
2025-07-26 19:13:58 +02:00
D. Berge
0afd54447f Add SequenceDataLayer Deck.gl class.
It takes the typed arrays returned by the binary-encoded
endpoints.
2025-07-26 19:06:56 +02:00
D. Berge
e6004dd62f Add link to binary library.
Same library is used server and client side.
2025-07-26 19:06:56 +02:00
D. Berge
f623954399 Adapt to new calling convention for Vuex action 2025-07-26 19:06:56 +02:00
D. Berge
f8d882da5d Replace text parameter by format in Vuex API call.
Instead of { text: true } as a Fetch option, one can
now specify { format: "text" }, as well as any of these
other options, which call the corresponding Fetch method:

* "arrayBuffer",
* "blob",
* "formData",
* "json",
* "text"
2025-07-26 19:06:56 +02:00
D. Berge
808c9987af Add binary format middleware for sequence data.
It responds to the MIME type:
application/dougal-map-sequence+octet-stream
2025-07-26 19:05:00 +02:00
D. Berge
4db6d8dd7a Add custom binary format packing / unpacking.
This series of custom binary messages are an alternative to JSON /
GeoJSON when huge amounts of data needs to be transferred to and
processed by the client, such as a GPU-based map view showing all
the points for a prospect, or QC graphs, etc.
2025-07-26 19:05:00 +02:00
D. Berge
9a47977f5f Improve help dialogue.
- Shows frontend and backend versions
- Shows version release notes
2025-07-26 10:59:40 +02:00
D. Berge
a58cce8565 Add /version/history endpoint to API.
Retrieves Git tag annotations.
2025-07-26 10:58:42 +02:00
D. Berge
5487a3a49b Catch JWT expiration.
Closes #321
2025-07-26 10:56:23 +02:00
D. Berge
731778206c Show front and backend version on help dialogue 2025-07-25 23:15:07 +02:00
D. Berge
08e65b512d Inject frontend version as environment variable 2025-07-25 23:14:30 +02:00
D. Berge
9b05388113 Add database upgrade file 40 2025-07-25 21:17:20 +02:00
D. Berge
1b44389a1a Allow configuring the API URL via environment variable.
The environment variable DOUGAL_API_URL takes precedence
over the hard-coded value. For instance:

DOUGAL_API_URL=http://127.0.0.1:2999 will cause /api to
be proxied to the above URL (websockets are correctly
handled too) instead of the default.
2025-07-25 20:08:38 +02:00
D. Berge
0b3711b759 Fix typo 2025-07-25 20:08:08 +02:00
D. Berge
5a523d4941 Make projects table sorted by default 2025-07-25 20:07:40 +02:00
D. Berge
122951e3a2 Fix expected DB version for upgrade 38 2025-07-25 18:11:19 +02:00
D. Berge
90216c12e4 Rename database upgrades 2025-07-25 18:08:47 +02:00
D. Berge
9c26909a59 Fix npm run scripts 2025-07-25 17:54:56 +02:00
D. Berge
0427a3c18c Use Node workspaces to manage repo dependencies 2025-07-25 17:48:30 +02:00
D. Berge
c32e6f2b38 Make map API calls silent.
Otherwise we get spurious 404s and such.
2025-07-25 17:17:36 +02:00
D. Berge
546d199c52 Remove annooying Leaflet attribution control 2025-07-25 17:17:36 +02:00
D. Berge
6562de97b9 Make the CSS import from package not relative 2025-07-25 17:17:36 +02:00
D. Berge
c666a6368e Fix copy/paste logic for lineNameInfo widget 2025-07-25 14:41:21 +02:00
D. Berge
d5af6df052 Merge branch '177-refactor-users-code' into 'devel'
Refactor users code

Closes #177 and #176

See merge request wgp/dougal/software!57
2025-07-25 12:26:39 +00:00
D. Berge
0c5ea7f30a Merge branch '178-add-api-endpoints-for-user-management' into '177-refactor-users-code'
Add API endpoints for user management

See merge request wgp/dougal/software!58
2025-07-25 12:25:45 +00:00
D. Berge
302642f88d Fix JWT renewal over websocket 2025-07-25 14:21:26 +02:00
D. Berge
48e1369088 Fix host based authentication 2025-07-25 14:03:43 +02:00
D. Berge
daa700e7dc Add (temporarily disabled) menu option for vessel config.
The idea is to have a frontend access to a screen where duly
authorised users can modify vessel-wide configuration parameters.
2025-07-25 14:01:49 +02:00
D. Berge
8db2c8ce25 Use access rights mixin in Equipment view 2025-07-25 13:36:16 +02:00
D. Berge
890e48e078 Revert "Don't refresh projects if no user is logged in."
This reverts commit 3a0f720f2f.
2025-07-25 13:35:35 +02:00
D. Berge
11829555cf Add <v-tooltip/> showing permissions.
Hovering over the user avatar or a project name in the breadcrumbs
shows a tooltip with the relevant permissions.
2025-07-25 13:33:59 +02:00
D. Berge
07d8e97f74 Fix Markdown functions in root component 2025-07-25 13:32:30 +02:00
D. Berge
fc379aba14 Silence errors when refreshing projects.
We use this endpoint also to do autologins, so HTTP 403's are not
unexpected.
2025-07-25 13:31:28 +02:00
D. Berge
8cbacb9aa7 Allow silencing API request errors.
The {silent: true} option in the new `opts` argument to the
`api` action does the trick.
2025-07-25 13:30:26 +02:00
D. Berge
acb59035e4 Add missing file 2025-07-25 13:29:39 +02:00
D. Berge
b7d0ee7da7 Remove dead code from the frontend 2025-07-25 11:02:24 +02:00
D. Berge
3a0f720f2f Don't refresh projects if no user is logged in.
Avoids a 403.
2025-07-25 10:43:08 +02:00
D. Berge
6cf6fe29f4 Improve presentation of organisation component in project settings 2025-07-24 23:04:44 +02:00
D. Berge
6f0f2dadcc Add "actions" slot to DougalOrganisations component 2025-07-24 23:04:15 +02:00
D. Berge
64fba1adc3 Add project permissions tooltip to breadcrumbs 2025-07-24 23:03:41 +02:00
D. Berge
3ea82cb660 Fix reading of credentials for issuing JWT 2025-07-24 23:03:05 +02:00
D. Berge
84c1385f88 Refactor class User (clean up) 2025-07-24 23:02:30 +02:00
D. Berge
b1b7332216 Add access mixin to Project and use in child component 2025-07-24 20:43:22 +02:00
D. Berge
8e7451e17a Adapt the access rights mixin to new user management code 2025-07-24 20:42:25 +02:00
D. Berge
bdeb2b8742 Show organisation membership in user avatar 2025-07-24 20:41:07 +02:00
D. Berge
ccfabf84f7 Add user management page to frontend 2025-07-24 20:40:18 +02:00
D. Berge
5d4e219403 Refactor Vuex store to adapt to new User class 2025-07-24 20:38:51 +02:00
D. Berge
3b7e4c9f0b Add client-side User class derived from @dougal/user.
Adds methods to communicate with the backend.
2025-07-24 20:37:50 +02:00
D. Berge
683f5680b1 Add organisations configuration section to project settings UI 2025-07-24 20:36:45 +02:00
D. Berge
ce901a03a1 Add component for editing users 2025-07-24 20:35:46 +02:00
D. Berge
f8e5b74c1a Add components for editing organisations settings 2025-07-24 20:35:17 +02:00
D. Berge
ec41d26a7a Use @dougal/user, @dougal/organisations modules in frontend 2025-07-24 20:32:25 +02:00
D. Berge
386fd59900 Update API to handle permissions checks on most endpoints 2025-07-24 19:24:40 +02:00
D. Berge
e47020a21e Add /user endpoints to API 2025-07-24 19:23:43 +02:00
D. Berge
b8f58ac67c Add FIXME 2025-07-24 19:20:58 +02:00
D. Berge
b3e27ed1b9 Refactor auth.authentify.
We now get the user's details directly from the JWT token.
2025-07-24 19:15:36 +02:00
D. Berge
f5441d186f Refactor auth.access middleware.
It users @dougal/user and @dougal/organisations classes.
2025-07-24 19:14:19 +02:00
D. Berge
d58bc4d62e Remove unused code 2025-07-24 19:13:17 +02:00
D. Berge
01d1691def Fix login endpoint (checkValidCredentials is now async) 2025-07-24 19:09:39 +02:00
D. Berge
bc444fc066 Add dependency to project organisations cache 2025-07-24 18:48:22 +02:00
D. Berge
989ec84852 Refactor JWT credentials check to use class User 2025-07-24 18:36:34 +02:00
D. Berge
065f6617af Add class ServerUser derived from User.
Used on the backend. Adds methods to hash and check passwords and
to read from and save user data to the database.
2025-07-24 18:31:51 +02:00
D. Berge
825530c1fe Use @dougal/user, @dougal/organisations modules in backend 2025-07-24 18:27:59 +02:00
D. Berge
1ef8eb871f Add @dougal/user NodeJS module.
Abstracts the concept of User in the new permissions model.
2025-07-24 18:22:44 +02:00
D. Berge
2e9c603ab8 Add @dougal/organisations NodeJS module.
Abstracts the concept of Organisations in the new permissions model.
2025-07-24 18:21:02 +02:00
D. Berge
7f067ff760 Add contextual info about sailline CSV files.
The information that has to go on those and their layout is not
very obvious so adding a contextual help dialogue and an example
file puts the user on the right track.

Closes #319
2025-07-20 11:03:10 +02:00
D. Berge
487c297747 Add database upgrade file 37.
Fixes database upgrade file 35.
2025-07-19 12:20:55 +02:00
D. Berge
cfa771a830 Skip info for saillines with no preplot.
It may happen that the sailline info file has data for more lines
than are actually in the preplot (e.g., if importing a reduced
preplot file). In this case, we don't want a constraint violation
error due to missing corresponding lines in `preplot_lines` so we
check for that and only import lines that do exist in `preplot_lines`
2025-07-19 11:31:52 +02:00
D. Berge
3905e6f5d8 Update OpenAPI specification 2025-07-13 11:15:41 +02:00
D. Berge
2657c42dcc Fix export statement 2025-07-13 11:13:31 +02:00
D. Berge
63e6af545a Fix typo 2025-07-13 11:13:09 +02:00
D. Berge
d6fb7404b1 Adapt version.get middleware to new permissions approach 2025-07-13 00:07:52 +02:00
D. Berge
8188766a81 Refactor access to info table.
To adapt to new permissions system.
2025-07-13 00:07:05 +02:00
D. Berge
b7ae657137 Add auth.operations middleware.
Adds an array of allowed operations on given context to the request
under `req.user.operations`.
2025-07-13 00:02:48 +02:00
D. Berge
1295ec2ee3 Add function to return allowed operations in a given context 2025-07-13 00:01:15 +02:00
D. Berge
7c6d3fe5ee Check permissions against vessel if not on a project endpoint 2025-07-12 16:49:10 +02:00
D. Berge
15570e0f3d orgAccess(user, null, op) returns vessel access permissions.
If instead of a project ID, orgAccess receives `null`, it will
check permissions against the installation's own vessel rather
than against a specific project.
2025-07-12 16:47:39 +02:00
D. Berge
d551e67042 Add vesselOrganisations() function 2025-07-12 16:47:10 +02:00
D. Berge
6b216f7406 Add library function to retrieve vessel information.
In the `keystore` table, we now store information for our own
vessel (usually, where the Dougal server is installed). This
is an access function to retrieve that information.

The info stored for the vessel looks like this:

```yaml
type: vessel
key: ego
data:
    imo: 9631890
    mmsi: 257419000
    name: Havila Charisma
    contacts:
        -
            name: HC OM
            phone: tel:+47123456789
            email: hc.om@magseisfairfield.com
    organisations:
        Havila Charisma:
            read: true
            write: true
            edit: true
```
2025-07-12 16:42:28 +02:00
D. Berge
a7e02c526b Add function argument defaults.
This will cause the function to return a safe (false) value
rather than erroring.
2025-07-12 16:40:18 +02:00
D. Berge
55855d66e9 Remove dead code 2025-07-12 12:14:12 +02:00
D. Berge
ae79d90fef Remove obsolete Vuex getters 2025-07-12 11:31:38 +02:00
D. Berge
c8b2047483 Refactor client-side access checks.
Go from a Vuex based to a mixin based approach.
2025-07-12 11:31:38 +02:00
D. Berge
d21cde20fc Add mixin to check access rights client-side.
This replaces the Vuex getters approach (writeaccess, adminaccess)
which, as access rights are no longer global but dependent on each
project's settings, are no longer appropriate.
2025-07-12 11:31:38 +02:00
D. Berge
10580ea3ec Create server-side organisations module 2025-07-12 11:31:38 +02:00
D. Berge
25f83d1eb3 Share access() function between front and back end.
This is so that any changes to the code are reflected on both sides.
2025-07-12 11:31:38 +02:00
D. Berge
dc294b5b50 Change prefix used for storing user preferences.
The `role` value no longer exists; we're replacing that with the
user ID.
2025-07-12 11:31:38 +02:00
D. Berge
b035d3481c Ensure users have at least read access to most endpoints 2025-07-11 22:49:28 +02:00
D. Berge
ca4a14ffd9 Use new orgs based method for authorisation 2025-07-11 22:48:44 +02:00
D. Berge
d77f7f66db Refresh organisations cache on project update 2025-07-11 22:48:06 +02:00
D. Berge
6b6f545b9f Filter list of projects to only those readable by user 2025-07-11 22:47:32 +02:00
D. Berge
bdf62e2d8b Show project orgs in projects list 2025-07-11 22:46:47 +02:00
D. Berge
1895168889 Show user orgs in avatar 2025-07-11 22:46:47 +02:00
D. Berge
8c875ea2f9 Return organisations as part of the projects listing 2025-07-11 22:46:47 +02:00
D. Berge
addbe2d572 Refactor user authentication code to use database 2025-07-11 22:46:47 +02:00
D. Berge
85f092b9e1 Upgrade minimum required database version 2025-07-11 22:46:47 +02:00
D. Berge
eb99d74e4a Add database upgrade file 38.
Adds default user (superuser).
2025-07-11 22:46:47 +02:00
D. Berge
e65afdcaa1 Add database upgrade file 37.
Creates `keystore` table.
2025-07-11 22:46:47 +02:00
D. Berge
0b7e9e1d01 Add functions to check operation access via organisations 2025-07-11 22:46:47 +02:00
D. Berge
9ad17de4cb Merge branch '76-add-configuration-gui' into 'devel'
Resolve "Add configuration GUI"

Closes #294, #295, #296, #298, #76, #297, #129, #313, #312, #305, #264, #307, #303, #300, #301, #302, #290, #291, #292, and #293

See merge request wgp/dougal/software!17
2025-07-09 18:11:50 +00:00
D. Berge
dc22bb95fd Disable 'no_fire' test due to changes in Smartsource software 2025-07-03 11:48:42 +02:00
D. Berge
3189a06d75 Change tcpdump flags to capture on any interface 2025-06-27 00:05:23 +02:00
230 changed files with 442481 additions and 32895 deletions

View File

@@ -274,11 +274,27 @@ class Datastore:
with self.conn.cursor() as cursor:
cursor.execute("BEGIN;")
# Check which preplot lines we actually have already imported,
# as the line info file may contain extra lines.
qry = """
SELECT line, class
FROM preplot_lines
ORDER BY line, class;
"""
cursor.execute(qry)
preplot_lines = cursor.fetchall()
hash = self.add_file(filepath, cursor)
count=0
for line in lines:
count += 1
print(f"\u001b[2KSaving line {count} / {len(lines)}", end="\r", flush=True)
if not (line["sail_line"], "V") in preplot_lines:
print(f"\u001b[2KSkipping line {count} / {len(lines)}", end="\n", flush=True)
continue
print(f"\u001b[2KSaving line {count} / {len(lines)} ", end="\n", flush=True)
sail_line = line["sail_line"]
incr = line.get("incr", True)

View File

@@ -23,6 +23,7 @@ transform = {
}
def parse_line (line, fields, fixed = None):
# print("parse_line", line, fields, fixed)
data = dict()
if fixed:
@@ -51,6 +52,7 @@ def parse_line (line, fields, fixed = None):
data[key] = value
# print("parse_line data =", data)
return data

View File

@@ -73,6 +73,12 @@ if __name__ == '__main__':
lineNameInfo = final_p111.get("lineNameInfo")
pattern = final_p111.get("pattern")
if not lineNameInfo:
if not pattern:
print("ERROR! Missing final.p111.lineNameInfo in project configuration. Cannot import final P111")
raise Exception("Missing final.p111.lineNameInfo")
else:
print("WARNING! No `lineNameInfo` in project configuration (final.p111). You should add it to the settings.")
rx = None
if pattern and pattern.get("regex"):
rx = re.compile(pattern["regex"])
@@ -114,27 +120,27 @@ if __name__ == '__main__':
file_info = dict(zip(pattern["captures"], match.groups()))
file_info["meta"] = {}
if lineNameInfo:
basename = os.path.basename(physical_filepath)
fields = lineNameInfo.get("fields", {})
fixed = lineNameInfo.get("fixed")
try:
parsed_line = fwr.parse_line(basename, fields, fixed)
except ValueError as err:
parsed_line = "Line format error: " + str(err)
if type(parsed_line) == str:
print(parsed_line, file=sys.stderr)
print("This file will be ignored!")
continue
if lineNameInfo:
basename = os.path.basename(physical_filepath)
fields = lineNameInfo.get("fields", {})
fixed = lineNameInfo.get("fixed")
try:
parsed_line = fwr.parse_line(basename, fields, fixed)
except ValueError as err:
parsed_line = "Line format error: " + str(err)
if type(parsed_line) == str:
print(parsed_line, file=sys.stderr)
print("This file will be ignored!")
continue
file_info = {}
file_info["sequence"] = parsed_line["sequence"]
file_info["line"] = parsed_line["line"]
del(parsed_line["sequence"])
del(parsed_line["line"])
file_info["meta"] = {
"fileInfo": parsed_line
}
file_info = {}
file_info["sequence"] = parsed_line["sequence"]
file_info["line"] = parsed_line["line"]
del(parsed_line["sequence"])
del(parsed_line["line"])
file_info["meta"] = {
"fileInfo": parsed_line
}
if pending:
print("Skipping / removing final file because marked as PENDING", logical_filepath)

View File

@@ -41,6 +41,12 @@ if __name__ == '__main__':
lineNameInfo = raw_p111.get("lineNameInfo")
pattern = raw_p111.get("pattern")
if not lineNameInfo:
if not pattern:
print("ERROR! Missing raw.p111.lineNameInfo in project configuration. Cannot import raw P111")
raise Exception("Missing raw.p111.lineNameInfo")
else:
print("WARNING! No `lineNameInfo` in project configuration (raw.p111). You should add it to the settings.")
rx = None
if pattern and pattern.get("regex"):
rx = re.compile(pattern["regex"])
@@ -96,14 +102,15 @@ if __name__ == '__main__':
print("This file will be ignored!")
continue
file_info = {}
file_info["sequence"] = parsed_line["sequence"]
file_info["line"] = parsed_line["line"]
del(parsed_line["sequence"])
del(parsed_line["line"])
file_info["meta"] = {
"fileInfo": parsed_line
}
file_info = {}
file_info["sequence"] = parsed_line["sequence"]
file_info["line"] = parsed_line["line"]
del(parsed_line["sequence"])
del(parsed_line["line"])
file_info["meta"] = {
"fileInfo": parsed_line
}
p111_data = p111.from_file(physical_filepath)
print("Saving")

View File

@@ -1,5 +1,7 @@
#!/bin/bash
# Maximum runtime in seconds before killing an overdue instance (e.g., 10 minutes)
MAX_RUNTIME_SECONDS=$((15 * 60))
DOUGAL_ROOT=${DOUGAL_ROOT:-$(dirname "$0")/..}
@@ -80,8 +82,9 @@ function run () {
# DESCRIPTION=""
SERVICE="deferred_imports"
$BINDIR/send_alert.py -t "$TITLE" -s "$SERVICE" -l "critical" \
-O "$(cat $STDOUTLOG)" -E "$(cat $STDERRLOG)"
# Disable GitLab alerts. They're just not very practical
# $BINDIR/send_alert.py -t "$TITLE" -s "$SERVICE" -l "critical" \
# -O "$(cat $STDOUTLOG)" -E "$(cat $STDERRLOG)"
exit 2
}
@@ -97,14 +100,37 @@ function cleanup () {
}
if [[ -f $LOCKFILE ]]; then
PID=$(cat "$LOCKFILE")
if pgrep -F "$LOCKFILE"; then
print_warning $(printf "The previous process is still running (%d)" $PID)
exit 1
else
rm "$LOCKFILE"
print_warning $(printf "Previous process (%d) not found. Must have died unexpectedly" $PID)
fi
PID=$(cat "$LOCKFILE")
if kill -0 "$PID" 2>/dev/null; then # Check if process is running
# Get elapsed time in D-HH:MM:SS format and convert to seconds
ELAPSED_STR=$(ps -p "$PID" -o etime= | tr -d '[:space:]')
if [ -n "$ELAPSED_STR" ]; then
# Convert D-HH:MM:SS to seconds
ELAPSED_SECONDS=$(echo "$ELAPSED_STR" | awk -F'[-:]' '{
seconds = 0
if (NF == 4) { seconds += $1 * 86400 } # Days
if (NF >= 3) { seconds += $NF-2 * 3600 } # Hours
if (NF >= 2) { seconds += $NF-1 * 60 } # Minutes
seconds += $NF # Seconds
print seconds
}')
if [ "$ELAPSED_SECONDS" -gt "$MAX_RUNTIME_SECONDS" ]; then
# Kill the overdue process (SIGTERM; use -9 for SIGKILL if needed)
kill "$PID" 2>/dev/null
print_warning $(printf "Killed overdue process (%d) that ran for %s (%d seconds)" "$PID" "$ELAPSED_STR" "$ELAPSED_SECONDS")
rm "$LOCKFILE"
else
print_warning $(printf "Previous process is still running (%d) for %s (%d seconds)" "$PID" "$ELAPSED_STR" "$ELAPSED_SECONDS")
exit 1
fi
else
print_warning $(printf "Could not retrieve elapsed time for process (%d)" "$PID")
exit 1
fi
else
rm "$LOCKFILE"
print_warning $(printf "Previous process (%d) not found. Must have died unexpectedly" "$PID")
fi
fi
echo "$$" > "$LOCKFILE" || {

89
bin/update_comparisons.js Executable file
View File

@@ -0,0 +1,89 @@
#!/usr/bin/node
const cmp = require('../lib/www/server/lib/comparisons');
async function purgeComparisons () {
const groups = await cmp.groups();
const comparisons = await cmp.getGroup();
const pids = new Set(Object.values(groups).flat().map( p => p.pid ));
const comparison_pids = new Set(comparisons.map( c => [ c.baseline_pid, c.monitor_pid ] ).flat());
for (const pid of comparison_pids) {
if (!pids.has(pid)) {
console.log(`${pid} no longer par of a group. Deleting comparisons`);
staleComps = comparisons.filter( c => c.baseline_pid == pid || c.monitor_pid == pid );
for (c of staleComps) {
console.log(`Deleting comparison ${c.baseline_pid}${c.monitor_pid}`);
await cmp.remove(c.baseline_pid, c.monitor_pid);
}
}
}
}
async function main () {
console.log("Looking for unreferenced comparisons to purge");
await purgeComparisons();
console.log("Retrieving project groups");
const groups = await cmp.groups();
if (!Object.keys(groups??{})?.length) {
console.log("No groups found");
return 0;
}
console.log(`Found ${Object.keys(groups)?.length} groups: ${Object.keys(groups).join(", ")}`);
for (const groupName of Object.keys(groups)) {
const projects = groups[groupName];
console.log(`Fetching saved comparisons for ${groupName}`);
const comparisons = await cmp.getGroup(groupName);
if (!comparisons || !comparisons.length) {
console.log(`No comparisons found for ${groupName}`);
continue;
}
// Check if there are any projects that have been modified since last comparison
// or if there are any pairs that are no longer part of the group
const outdated = comparisons.filter( c => {
const baseline_tstamp = projects.find( p => p.pid === c.baseline_pid )?.tstamp;
const monitor_tstamp = projects.find( p => p.pid === c.monitor_pid )?.tstamp;
return (c.tstamp < baseline_tstamp) || (c.tstamp < monitor_tstamp) ||
baseline_tstamp == null || monitor_tstamp == null;
});
for (const comparison of outdated) {
console.log(`Removing stale comparison: ${comparison.baseline_pid}${comparison.monitor_pid}`);
await cmp.remove(comparison.baseline_pid, comparison.monitor_pid);
}
if (projects?.length < 2) {
console.log(`Group ${groupName} has less than two projects. No comparisons are possible`);
continue;
}
// Re-run the comparisons that are not in the database. They may
// be missing either beacause they were not there to start with
// or because we just removed them due to being stale
console.log(`Recalculating group ${groupName}`);
await cmp.saveGroup(groupName);
}
console.log("Comparisons update done");
return 0;
}
if (require.main === module) {
main();
} else {
module.exports = main;
}

View File

@@ -2,6 +2,9 @@
--
-- New schema version: 0.5.2
--
-- WARNING: This update is buggy and does not give the desired
-- results. Schema version 0.5.4 fixes this.
--
-- ATTENTION:
--
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.

View File

@@ -0,0 +1,145 @@
-- Fix preplot_lines_summary view
--
-- New schema version: 0.5.4
--
-- ATTENTION:
--
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
--
--
-- NOTE: This upgrade affects all schemas in the database.
-- NOTE: Each application starts a transaction, which must be committed
-- or rolled back.
--
-- Fixes upgrade 35 (0.5.2). The original description of 0.5.2 is included
-- below for ease of reference:
--
-- Following introduction of `preplot_saillines` (0.5.0), the incr and
-- ntba statuses are stored in a separate table, not in `preplot_lines`
-- (TODO: a future upgrade should remove those columns from `preplot_lines`)
--
-- Now any views referencing `incr` and `ntba` must be updated to point to
-- the new location of those attributes.
--
-- This update fixes #312.
--
-- To apply, run as the dougal user:
--
-- psql <<EOF
-- \i $THIS_FILE
-- COMMIT;
-- EOF
--
-- NOTE: It can be applied multiple times without ill effect.
--
BEGIN;
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
BEGIN
RAISE NOTICE '%', notice;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
BEGIN
RAISE NOTICE 'Updating schema %', schema_name;
-- We need to set the search path because some of the trigger
-- functions reference other tables in survey schemas assuming
-- they are in the search path.
EXECUTE format('SET search_path TO %I,public', schema_name);
CREATE OR REPLACE VIEW preplot_lines_summary
AS
WITH summary AS (
SELECT DISTINCT pp.line,
pp.class,
first_value(pp.point) OVER w AS p0,
last_value(pp.point) OVER w AS p1,
count(pp.point) OVER w AS num_points,
st_distance(first_value(pp.geometry) OVER w, last_value(pp.geometry) OVER w) AS length,
st_azimuth(first_value(pp.geometry) OVER w, last_value(pp.geometry) OVER w) * 180::double precision / pi() AS azimuth0,
st_azimuth(last_value(pp.geometry) OVER w, first_value(pp.geometry) OVER w) * 180::double precision / pi() AS azimuth1
FROM preplot_points pp
WHERE pp.class = 'V'::bpchar
WINDOW w AS (PARTITION BY pp.line ORDER BY pp.point ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
)
SELECT DISTINCT psl.sailline AS line,
CASE
WHEN psl.incr THEN s.p0
ELSE s.p1
END AS fsp,
CASE
WHEN psl.incr THEN s.p1
ELSE s.p0
END AS lsp,
s.num_points,
s.length,
CASE
WHEN psl.incr THEN s.azimuth0
ELSE s.azimuth1
END AS azimuth,
psl.incr,
psl.remarks
FROM summary s
JOIN preplot_saillines psl ON psl.sailline_class = s.class AND s.line = psl.sailline
ORDER BY psl.sailline, psl.incr;
ALTER TABLE preplot_lines_summary
OWNER TO postgres;
COMMENT ON VIEW preplot_lines_summary
IS 'Summarises ''V'' (vessel sailline) preplot lines.';
END;
$outer$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
DECLARE
row RECORD;
current_db_version TEXT;
BEGIN
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
IF current_db_version >= '0.5.4' THEN
RAISE EXCEPTION
USING MESSAGE='Patch already applied';
END IF;
IF current_db_version != '0.5.3' THEN
RAISE EXCEPTION
USING MESSAGE='Invalid database version: ' || current_db_version,
HINT='Ensure all previous patches have been applied.';
END IF;
FOR row IN
SELECT schema_name FROM information_schema.schemata
WHERE schema_name LIKE 'survey_%'
ORDER BY schema_name
LOOP
CALL pg_temp.upgrade_survey_schema(row.schema_name);
END LOOP;
END;
$outer$ LANGUAGE plpgsql;
CALL pg_temp.upgrade();
CALL pg_temp.show_notice('Cleaning up');
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
DROP PROCEDURE pg_temp.upgrade ();
CALL pg_temp.show_notice('Updating db_schema version');
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.5.4"}')
ON CONFLICT (key) DO UPDATE
SET value = public.info.value || '{"db_schema": "0.5.4"}' WHERE public.info.key = 'version';
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
DROP PROCEDURE pg_temp.show_notice (notice text);
--
--NOTE Run `COMMIT;` now if all went well
--

View File

@@ -0,0 +1,110 @@
-- Fix final_lines_summary view
--
-- New schema version: 0.6.0
--
-- ATTENTION:
--
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
--
--
-- NOTE: This upgrade only affects the `public` schema.
-- NOTE: Each application starts a transaction, which must be committed
-- or rolled back.
--
-- This update adds a `keystore` table, intended for storing arbitrary
-- key / value pairs which, unlike, the `info` tables, is not meant to
-- be directly accessible via the API. Its main purpose as of this writing
-- is to store user definitions (see #176, #177, #180).
--
-- To apply, run as the dougal user:
--
-- psql <<EOF
-- \i $THIS_FILE
-- COMMIT;
-- EOF
--
-- NOTE: It can be applied multiple times without ill effect.
--
BEGIN;
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
BEGIN
RAISE NOTICE '%', notice;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
BEGIN
RAISE NOTICE 'Updating schema %', 'public';
SET search_path TO public;
CREATE TABLE IF NOT EXISTS keystore (
type TEXT NOT NULL, -- A class of data to be stored
key TEXT NOT NULL, -- A key that is unique for the class and access type
last_modified TIMESTAMP -- To detect update conflicts
DEFAULT CURRENT_TIMESTAMP,
data jsonb,
PRIMARY KEY (type, key) -- Composite primary key
);
-- Create a function to update the last_modified timestamp
CREATE OR REPLACE FUNCTION update_last_modified()
RETURNS TRIGGER AS $$
BEGIN
NEW.last_modified = CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
-- Create a trigger that calls the function before each update
CREATE OR REPLACE TRIGGER update_keystore_last_modified
BEFORE UPDATE ON keystore
FOR EACH ROW
EXECUTE FUNCTION update_last_modified();
END;
$outer$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
DECLARE
row RECORD;
current_db_version TEXT;
BEGIN
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
IF current_db_version >= '0.6.0' THEN
RAISE EXCEPTION
USING MESSAGE='Patch already applied';
END IF;
IF current_db_version != '0.5.4' THEN
RAISE EXCEPTION
USING MESSAGE='Invalid database version: ' || current_db_version,
HINT='Ensure all previous patches have been applied.';
END IF;
CALL pg_temp.upgrade_database();
END;
$outer$ LANGUAGE plpgsql;
CALL pg_temp.upgrade();
CALL pg_temp.show_notice('Cleaning up');
DROP PROCEDURE pg_temp.upgrade_database ();
DROP PROCEDURE pg_temp.upgrade ();
CALL pg_temp.show_notice('Updating db_schema version');
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.0"}')
ON CONFLICT (key) DO UPDATE
SET value = public.info.value || '{"db_schema": "0.6.0"}' WHERE public.info.key = 'version';
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
DROP PROCEDURE pg_temp.show_notice (notice text);
--
--NOTE Run `COMMIT;` now if all went well
--

View File

@@ -0,0 +1,108 @@
-- Fix final_lines_summary view
--
-- New schema version: 0.6.1
--
-- ATTENTION:
--
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
--
--
-- NOTE: This upgrade only affects the `public` schema.
-- NOTE: Each application starts a transaction, which must be committed
-- or rolled back.
--
-- This update adds a default user to the system (see #176, #177, #180).
-- The default user can only be invoked by connecting from localhost.
--
-- This user has full access to every project via the organisations
-- permissions wildcard: `{"*": {read: true, write: true, edit: true}}`
-- and can be used to bootstrap the system by creating other users
-- and assigning organisational permissions.
--
-- To apply, run as the dougal user:
--
-- psql <<EOF
-- \i $THIS_FILE
-- COMMIT;
-- EOF
--
-- NOTE: It can be applied multiple times without ill effect.
--
BEGIN;
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
BEGIN
RAISE NOTICE '%', notice;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
BEGIN
RAISE NOTICE 'Updating schema %', 'public';
SET search_path TO public;
INSERT INTO keystore (type, key, data)
VALUES ('user', '6f1e7159-4ca0-4ae4-ab4e-89078166cc10', '
{
"id": "6f1e7159-4ca0-4ae4-ab4e-89078166cc10",
"ip": "127.0.0.0/24",
"name": "☠️",
"colour": "red",
"active": true,
"organisations": {
"*": {
"read": true,
"write": true,
"edit": true
}
}
}
'::jsonb)
ON CONFLICT (type, key) DO NOTHING;
END;
$outer$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
DECLARE
row RECORD;
current_db_version TEXT;
BEGIN
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
IF current_db_version >= '0.6.1' THEN
RAISE EXCEPTION
USING MESSAGE='Patch already applied';
END IF;
IF current_db_version != '0.6.0' THEN
RAISE EXCEPTION
USING MESSAGE='Invalid database version: ' || current_db_version,
HINT='Ensure all previous patches have been applied.';
END IF;
CALL pg_temp.upgrade_database();
END;
$outer$ LANGUAGE plpgsql;
CALL pg_temp.upgrade();
CALL pg_temp.show_notice('Cleaning up');
DROP PROCEDURE pg_temp.upgrade_database ();
DROP PROCEDURE pg_temp.upgrade ();
CALL pg_temp.show_notice('Updating db_schema version');
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.1"}')
ON CONFLICT (key) DO UPDATE
SET value = public.info.value || '{"db_schema": "0.6.1"}' WHERE public.info.key = 'version';
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
DROP PROCEDURE pg_temp.show_notice (notice text);
--
--NOTE Run `COMMIT;` now if all went well
--

View File

@@ -0,0 +1,106 @@
-- Fix final_lines_summary view
--
-- New schema version: 0.6.2
--
-- ATTENTION:
--
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
--
--
-- NOTE: This upgrade only affects the `public` schema.
-- NOTE: Each application starts a transaction, which must be committed
-- or rolled back.
--
-- This update adds an "organisations" section to the configuration,
-- with a default configured organisation of "WGP" with full access.
-- This is so that projects can be made accessible after migrating
-- to the new permissions architecture.
--
-- In addition, projects with an id starting with "eq" are assumed to
-- be Equinor projects, and an additional organisation is added with
-- read-only access. This is intended for clients, which should be
-- assigned to the "Equinor organisation".
--
-- Finally, we assign the vessel to the "WGP" organisation (full access)
-- so that we can actually use administrative endpoints.
--
-- To apply, run as the dougal user:
--
-- psql <<EOF
-- \i $THIS_FILE
-- COMMIT;
-- EOF
--
-- NOTE: It can be applied multiple times without ill effect.
--
BEGIN;
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
BEGIN
RAISE NOTICE '%', notice;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
BEGIN
RAISE NOTICE 'Updating schema %', 'public';
SET search_path TO public;
-- Add "organisations" section to configurations, if not already present
UPDATE projects
SET
meta = jsonb_set(meta, '{organisations}', '{"WGP": {"read": true, "write": true, "edit": true}}'::jsonb, true)
WHERE meta->'organisations' IS NULL;
-- Add (or overwrite!) "organisations.Equinor" giving read-only access (can be changed later via API)
UPDATE projects
SET
meta = jsonb_set(meta, '{organisations, Equinor}', '{"read": true, "write": false, "edit": false}'::jsonb, true)
WHERE pid LIKE 'eq%';
END;
$outer$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
DECLARE
row RECORD;
current_db_version TEXT;
BEGIN
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
IF current_db_version >= '0.6.2' THEN
RAISE EXCEPTION
USING MESSAGE='Patch already applied';
END IF;
IF current_db_version != '0.6.1' THEN
RAISE EXCEPTION
USING MESSAGE='Invalid database version: ' || current_db_version,
HINT='Ensure all previous patches have been applied.';
END IF;
CALL pg_temp.upgrade_database();
END;
$outer$ LANGUAGE plpgsql;
CALL pg_temp.upgrade();
CALL pg_temp.show_notice('Cleaning up');
DROP PROCEDURE pg_temp.upgrade_database ();
DROP PROCEDURE pg_temp.upgrade ();
CALL pg_temp.show_notice('Updating db_schema version');
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.2"}')
ON CONFLICT (key) DO UPDATE
SET value = public.info.value || '{"db_schema": "0.6.2"}' WHERE public.info.key = 'version';
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
DROP PROCEDURE pg_temp.show_notice (notice text);
--
--NOTE Run `COMMIT;` now if all went well
--

View File

@@ -0,0 +1,109 @@
-- Add procedure to decimate old nav data
--
-- New schema version: 0.6.3
--
-- ATTENTION:
--
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
--
--
-- NOTE: This upgrade creates a new schema called `comparisons`.
-- NOTE: Each application starts a transaction, which must be committed
-- or rolled back.
--
-- This update adds a `comparisons` table to a `comparisons` schema.
-- The `comparisons.comparisons` table holds 4D prospect comparison data.
--
-- To apply, run as the dougal user:
--
-- psql <<EOF
-- \i $THIS_FILE
-- COMMIT;
-- EOF
--
-- NOTE: It can be applied multiple times without ill effect.
--
BEGIN;
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
BEGIN
RAISE NOTICE '%', notice;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
BEGIN
RAISE NOTICE 'Updating schema %', 'public';
SET search_path TO public;
-- BEGIN
CREATE SCHEMA IF NOT EXISTS comparisons
AUTHORIZATION postgres;
COMMENT ON SCHEMA comparisons
IS 'Holds 4D comparison data and logic';
CREATE TABLE IF NOT EXISTS comparisons.comparisons
(
type text COLLATE pg_catalog."default" NOT NULL,
baseline_pid text COLLATE pg_catalog."default" NOT NULL,
monitor_pid text COLLATE pg_catalog."default" NOT NULL,
data bytea,
meta jsonb NOT NULL DEFAULT '{}'::jsonb,
CONSTRAINT comparisons_pkey PRIMARY KEY (baseline_pid, monitor_pid, type)
)
TABLESPACE pg_default;
ALTER TABLE IF EXISTS comparisons.comparisons
OWNER to postgres;
-- END
END;
$outer$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
DECLARE
row RECORD;
current_db_version TEXT;
BEGIN
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
IF current_db_version >= '0.6.3' THEN
RAISE EXCEPTION
USING MESSAGE='Patch already applied';
END IF;
IF current_db_version != '0.6.2' THEN
RAISE EXCEPTION
USING MESSAGE='Invalid database version: ' || current_db_version,
HINT='Ensure all previous patches have been applied.';
END IF;
CALL pg_temp.upgrade_database();
END;
$outer$ LANGUAGE plpgsql;
CALL pg_temp.upgrade();
CALL pg_temp.show_notice('Cleaning up');
DROP PROCEDURE pg_temp.upgrade_database ();
DROP PROCEDURE pg_temp.upgrade ();
CALL pg_temp.show_notice('Updating db_schema version');
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.3"}')
ON CONFLICT (key) DO UPDATE
SET value = public.info.value || '{"db_schema": "0.6.3"}' WHERE public.info.key = 'version';
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
DROP PROCEDURE pg_temp.show_notice (notice text);
--
--NOTE Run `COMMIT;` now if all went well
--

View File

@@ -0,0 +1,169 @@
-- Add procedure to decimate old nav data
--
-- New schema version: 0.6.4
--
-- ATTENTION:
--
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
--
--
-- NOTE: This upgrade affects the public schema only.
-- NOTE: Each application starts a transaction, which must be committed
-- or rolled back.
--
-- This update modifies notify() to accept, as optional arguments, the
-- names of columns that are to be *excluded* from the notification.
-- It is intended for tables with large columns which are however of
-- no particular interest in a notification.
--
-- To apply, run as the dougal user:
--
-- psql <<EOF
-- \i $THIS_FILE
-- COMMIT;
-- EOF
--
-- NOTE: It can be applied multiple times without ill effect.
--
BEGIN;
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
BEGIN
RAISE NOTICE '%', notice;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
BEGIN
RAISE NOTICE 'Updating schema %', 'public';
SET search_path TO public;
-- BEGIN
CREATE OR REPLACE FUNCTION public.notify()
RETURNS trigger
LANGUAGE 'plpgsql'
COST 100
VOLATILE NOT LEAKPROOF
AS $BODY$
DECLARE
channel text := TG_ARGV[0];
pid text;
payload text;
notification text;
payload_id integer;
old_json jsonb;
new_json jsonb;
excluded_col text;
i integer;
BEGIN
-- Fetch pid
SELECT projects.pid INTO pid FROM projects WHERE schema = TG_TABLE_SCHEMA;
-- Build old and new as jsonb, excluding specified columns if provided
IF OLD IS NOT NULL THEN
old_json := row_to_json(OLD)::jsonb;
FOR i IN 1 .. TG_NARGS - 1 LOOP
excluded_col := TG_ARGV[i];
old_json := old_json - excluded_col;
END LOOP;
ELSE
old_json := NULL;
END IF;
IF NEW IS NOT NULL THEN
new_json := row_to_json(NEW)::jsonb;
FOR i IN 1 .. TG_NARGS - 1 LOOP
excluded_col := TG_ARGV[i];
new_json := new_json - excluded_col;
END LOOP;
ELSE
new_json := NULL;
END IF;
-- Build payload
payload := json_build_object(
'tstamp', CURRENT_TIMESTAMP,
'operation', TG_OP,
'schema', TG_TABLE_SCHEMA,
'table', TG_TABLE_NAME,
'old', old_json,
'new', new_json,
'pid', pid
)::text;
-- Handle large payloads
IF octet_length(payload) < 1000 THEN
PERFORM pg_notify(channel, payload);
ELSE
-- Store large payload and notify with ID (as before)
INSERT INTO notify_payloads (payload) VALUES (payload) RETURNING id INTO payload_id;
notification := json_build_object(
'tstamp', CURRENT_TIMESTAMP,
'operation', TG_OP,
'schema', TG_TABLE_SCHEMA,
'table', TG_TABLE_NAME,
'pid', pid,
'payload_id', payload_id
)::text;
PERFORM pg_notify(channel, notification);
RAISE INFO 'Payload over limit';
END IF;
RETURN NULL;
END;
$BODY$;
ALTER FUNCTION public.notify()
OWNER TO postgres;
-- END
END;
$outer$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
DECLARE
row RECORD;
current_db_version TEXT;
BEGIN
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
IF current_db_version >= '0.6.4' THEN
RAISE EXCEPTION
USING MESSAGE='Patch already applied';
END IF;
IF current_db_version != '0.6.3' THEN
RAISE EXCEPTION
USING MESSAGE='Invalid database version: ' || current_db_version,
HINT='Ensure all previous patches have been applied.';
END IF;
CALL pg_temp.upgrade_database();
END;
$outer$ LANGUAGE plpgsql;
CALL pg_temp.upgrade();
CALL pg_temp.show_notice('Cleaning up');
DROP PROCEDURE pg_temp.upgrade_database ();
DROP PROCEDURE pg_temp.upgrade ();
CALL pg_temp.show_notice('Updating db_schema version');
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.4"}')
ON CONFLICT (key) DO UPDATE
SET value = public.info.value || '{"db_schema": "0.6.4"}' WHERE public.info.key = 'version';
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
DROP PROCEDURE pg_temp.show_notice (notice text);
--
--NOTE Run `COMMIT;` now if all went well
--

View File

@@ -0,0 +1,96 @@
-- Add procedure to decimate old nav data
--
-- New schema version: 0.6.5
--
-- ATTENTION:
--
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
--
--
-- NOTE: This upgrade affects the public schema only.
-- NOTE: Each application starts a transaction, which must be committed
-- or rolled back.
--
-- This update modifies notify() to accept, as optional arguments, the
-- names of columns that are to be *excluded* from the notification.
-- It is intended for tables with large columns which are however of
-- no particular interest in a notification.
--
-- To apply, run as the dougal user:
--
-- psql <<EOF
-- \i $THIS_FILE
-- COMMIT;
-- EOF
--
-- NOTE: It can be applied multiple times without ill effect.
--
BEGIN;
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
BEGIN
RAISE NOTICE '%', notice;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
BEGIN
RAISE NOTICE 'Updating schema %', 'public';
SET search_path TO public;
-- BEGIN
CREATE OR REPLACE TRIGGER comparisons_tg
AFTER INSERT OR DELETE OR UPDATE
ON comparisons.comparisons
FOR EACH ROW
EXECUTE FUNCTION public.notify('comparisons', 'data');
-- END
END;
$outer$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
DECLARE
row RECORD;
current_db_version TEXT;
BEGIN
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
IF current_db_version >= '0.6.5' THEN
RAISE EXCEPTION
USING MESSAGE='Patch already applied';
END IF;
IF current_db_version != '0.6.4' THEN
RAISE EXCEPTION
USING MESSAGE='Invalid database version: ' || current_db_version,
HINT='Ensure all previous patches have been applied.';
END IF;
CALL pg_temp.upgrade_database();
END;
$outer$ LANGUAGE plpgsql;
CALL pg_temp.upgrade();
CALL pg_temp.show_notice('Cleaning up');
DROP PROCEDURE pg_temp.upgrade_database ();
DROP PROCEDURE pg_temp.upgrade ();
CALL pg_temp.show_notice('Updating db_schema version');
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.5"}')
ON CONFLICT (key) DO UPDATE
SET value = public.info.value || '{"db_schema": "0.6.5"}' WHERE public.info.key = 'version';
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
DROP PROCEDURE pg_temp.show_notice (notice text);
--
--NOTE Run `COMMIT;` now if all went well
--

View File

@@ -0,0 +1,157 @@
-- Add procedure to decimate old nav data
--
-- New schema version: 0.6.6
--
-- ATTENTION:
--
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
--
--
-- NOTE: This upgrade affects the public schema only.
-- NOTE: Each application starts a transaction, which must be committed
-- or rolled back.
--
-- This adds a last_project_update(pid) function. It takes a project ID
-- and returns the last known timestamp from that project. Timestamps
-- are derived from multiple sources:
--
-- - raw_shots table
-- - final_shots table
-- - events_log_full table
-- - info table where key = 'qc'
-- - files table, from the hashes (which contain the file's mtime)
-- - project configuration, looking for an _updatedOn property
--
-- To apply, run as the dougal user:
--
-- psql <<EOF
-- \i $THIS_FILE
-- COMMIT;
-- EOF
--
-- NOTE: It can be applied multiple times without ill effect.
--
BEGIN;
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
BEGIN
RAISE NOTICE '%', notice;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
BEGIN
RAISE NOTICE 'Updating schema %', 'public';
SET search_path TO public;
-- BEGIN
CREATE OR REPLACE FUNCTION public.last_project_update(p_pid text)
RETURNS timestamp with time zone
LANGUAGE plpgsql
AS $function$
DECLARE
v_last_ts timestamptz := NULL;
v_current_ts timestamptz;
v_current_str text;
v_current_unix numeric;
v_sid_rec record;
BEGIN
-- From raw_shots, final_shots, info, and files
FOR v_sid_rec IN SELECT schema FROM public.projects WHERE pid = p_pid
LOOP
-- From raw_shots
EXECUTE 'SELECT max(tstamp) FROM ' || v_sid_rec.schema || '.raw_shots' INTO v_current_ts;
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
v_last_ts := v_current_ts;
END IF;
-- From final_shots
EXECUTE 'SELECT max(tstamp) FROM ' || v_sid_rec.schema || '.final_shots' INTO v_current_ts;
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
v_last_ts := v_current_ts;
END IF;
-- From info where key = 'qc'
EXECUTE 'SELECT value->>''updatedOn'' FROM ' || v_sid_rec.schema || '.info WHERE key = ''qc''' INTO v_current_str;
IF v_current_str IS NOT NULL THEN
v_current_ts := v_current_str::timestamptz;
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
v_last_ts := v_current_ts;
END IF;
END IF;
-- From files hash second part, only for valid colon-separated hashes
EXECUTE 'SELECT max( split_part(hash, '':'', 2)::numeric ) FROM ' || v_sid_rec.schema || '.files WHERE hash ~ ''^[0-9]+:[0-9]+\\.[0-9]+:[0-9]+\\.[0-9]+:[0-9a-f]+$''' INTO v_current_unix;
IF v_current_unix IS NOT NULL THEN
v_current_ts := to_timestamp(v_current_unix);
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
v_last_ts := v_current_ts;
END IF;
END IF;
-- From event_log_full
EXECUTE 'SELECT max(tstamp) FROM ' || v_sid_rec.schema || '.event_log_full' INTO v_current_ts;
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
v_last_ts := v_current_ts;
END IF;
END LOOP;
-- From projects.meta->_updatedOn
SELECT (meta->>'_updatedOn')::timestamptz FROM public.projects WHERE pid = p_pid INTO v_current_ts;
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
v_last_ts := v_current_ts;
END IF;
RETURN v_last_ts;
END;
$function$;
-- END
END;
$outer$ LANGUAGE plpgsql;
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
DECLARE
row RECORD;
current_db_version TEXT;
BEGIN
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
IF current_db_version >= '0.6.6' THEN
RAISE EXCEPTION
USING MESSAGE='Patch already applied';
END IF;
IF current_db_version != '0.6.5' THEN
RAISE EXCEPTION
USING MESSAGE='Invalid database version: ' || current_db_version,
HINT='Ensure all previous patches have been applied.';
END IF;
CALL pg_temp.upgrade_database();
END;
$outer$ LANGUAGE plpgsql;
CALL pg_temp.upgrade();
CALL pg_temp.show_notice('Cleaning up');
DROP PROCEDURE pg_temp.upgrade_database ();
DROP PROCEDURE pg_temp.upgrade ();
CALL pg_temp.show_notice('Updating db_schema version');
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.6"}')
ON CONFLICT (key) DO UPDATE
SET value = public.info.value || '{"db_schema": "0.6.6"}' WHERE public.info.key = 'version';
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
DROP PROCEDURE pg_temp.show_notice (notice text);
--
--NOTE Run `COMMIT;` now if all went well
--

View File

@@ -45,11 +45,13 @@
name: "No fire"
id: no_fire
check: |
const currentShot = currentItem;
const gunData = currentItem._("raw_meta.smsrc");
(gunData && gunData.guns && gunData.guns.length != gunData.num_active)
? `Source ${gunData.src_number}: No fire (${gunData.guns.length - gunData.num_active} guns)`
: true;
// const currentShot = currentItem;
// const gunData = currentItem._("raw_meta.smsrc");
// (gunData && gunData.guns && gunData.guns.length != gunData.num_active)
// ? `Source ${gunData.src_number}: No fire (${gunData.guns.length - gunData.num_active} guns)`
// : true;
// Disabled due to changes in Smartsource software. It now returns all guns on every shot, not just active ones.
true
-
name: "Pressure errors"

View File

@@ -0,0 +1,968 @@
const codeToType = {
0: Int8Array,
1: Uint8Array,
2: Int16Array,
3: Uint16Array,
4: Int32Array,
5: Uint32Array,
7: Float32Array,
8: Float64Array,
9: BigInt64Array,
10: BigUint64Array
};
const typeToBytes = {
Int8Array: 1,
Uint8Array: 1,
Int16Array: 2,
Uint16Array: 2,
Int32Array: 4,
Uint32Array: 4,
Float32Array: 4,
Float64Array: 8,
BigInt64Array: 8,
BigUint64Array: 8
};
function readTypedValue(view, offset, type) {
switch (type) {
case Int8Array: return view.getInt8(offset);
case Uint8Array: return view.getUint8(offset);
case Int16Array: return view.getInt16(offset, true);
case Uint16Array: return view.getUint16(offset, true);
case Int32Array: return view.getInt32(offset, true);
case Uint32Array: return view.getUint32(offset, true);
case Float32Array: return view.getFloat32(offset, true);
case Float64Array: return view.getFloat64(offset, true);
case BigInt64Array: return view.getBigInt64(offset, true);
case BigUint64Array: return view.getBigUint64(offset, true);
default: throw new Error(`Unsupported type: ${type.name}`);
}
}
function writeTypedValue(view, offset, value, type) {
switch (type) {
case Int8Array: view.setInt8(offset, value); break;
case Uint8Array: view.setUint8(offset, value); break;
case Int16Array: view.setInt16(offset, value, true); break;
case Uint16Array: view.setUint16(offset, value, true); break;
case Int32Array: view.setInt32(offset, value, true); break;
case Uint32Array: view.setUint32(offset, value, true); break;
case Float32Array: view.setFloat32(offset, value, true); break;
case Float64Array: view.setFloat64(offset, value, true); break;
case BigInt64Array: view.setBigInt64(offset, BigInt(value), true); break;
case BigUint64Array: view.setBigUint64(offset, BigInt(value), true); break;
default: throw new Error(`Unsupported type: ${type.name}`);
}
}
class DougalBinaryBundle extends ArrayBuffer {
static HEADER_LENGTH = 4; // Length of a bundle header
/** Clone an existing ByteArray into a DougalBinaryBundle
*/
static clone (buffer) {
const clone = new DougalBinaryBundle(buffer.byteLength);
const uint8Array = new Uint8Array(buffer);
const uint8ArrayClone = new Uint8Array(clone);
uint8ArrayClone.set(uint8Array);
return clone;
}
constructor (length, options) {
super (length, options);
}
/** Get the count of bundles in this ByteArray.
*
* Stops at the first non-bundle looking offset
*/
get bundleCount () {
let count = 0;
let currentBundleOffset = 0;
const view = new DataView(this);
while (currentBundleOffset < this.byteLength) {
const currentBundleHeader = view.getUint32(currentBundleOffset, true);
if ((currentBundleHeader & 0xff) !== 0x1c) {
// This is not a bundle
return count;
}
let currentBundleLength = currentBundleHeader >>> 8;
currentBundleOffset += currentBundleLength + DougalBinaryBundle.HEADER_LENGTH;
count++;
}
return count;
}
/** Get the number of chunks in the bundles of this ByteArray
*/
get chunkCount () {
let count = 0;
let bundleOffset = 0;
const view = new DataView(this);
while (bundleOffset < this.byteLength) {
const header = view.getUint32(bundleOffset, true);
if ((header & 0xFF) !== 0x1C) break;
const length = header >>> 8;
if (bundleOffset + 4 + length > this.byteLength) break;
let chunkOffset = bundleOffset + 4; // relative to buffer start
while (chunkOffset < bundleOffset + 4 + length) {
const chunkType = view.getUint8(chunkOffset);
if (chunkType !== 0x11 && chunkType !== 0x12) break;
const cCount = view.getUint16(chunkOffset + 2, true);
const ΔelemC = view.getUint8(chunkOffset + 10);
const elemC = view.getUint8(chunkOffset + 11);
let localOffset = 12; // header size
localOffset += ΔelemC + elemC; // preface
// initial values
for (let k = 0; k < ΔelemC; k++) {
const typeByte = view.getUint8(chunkOffset + 12 + k);
const baseCode = typeByte & 0xF;
const baseType = codeToType[baseCode];
if (!baseType) throw new Error('Invalid base type code');
localOffset += typeToBytes[baseType.name];
}
// pad after initial
while (localOffset % 4 !== 0) localOffset++;
if (chunkType === 0x11) { // Sequential
// record data: Δelems incrs
for (let k = 0; k < ΔelemC; k++) {
const typeByte = view.getUint8(chunkOffset + 12 + k);
const incrCode = typeByte >> 4;
const incrType = codeToType[incrCode];
if (!incrType) throw new Error('Invalid incr type code');
localOffset += cCount * typeToBytes[incrType.name];
}
// elems
for (let k = 0; k < elemC; k++) {
const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k);
const type = codeToType[typeCode];
if (!type) throw new Error('Invalid elem type code');
localOffset += cCount * typeToBytes[type.name];
}
} else { // Interleaved
// Compute exact stride for interleaved record data
let ΔelemStride = 0;
for (let k = 0; k < ΔelemC; k++) {
const typeByte = view.getUint8(chunkOffset + 12 + k);
const incrCode = typeByte >> 4;
const incrType = codeToType[incrCode];
if (!incrType) throw new Error('Invalid incr type code');
ΔelemStride += typeToBytes[incrType.name];
}
let elemStride = 0;
for (let k = 0; k < elemC; k++) {
const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k);
const type = codeToType[typeCode];
if (!type) throw new Error('Invalid elem type code');
elemStride += typeToBytes[type.name];
}
const recordStride = ΔelemStride + elemStride;
localOffset += cCount * recordStride;
}
// pad after record
while (localOffset % 4 !== 0) localOffset++;
chunkOffset += localOffset;
count++;
}
bundleOffset += 4 + length;
}
return count;
}
/** Return an array of DougalBinaryChunkSequential or DougalBinaryChunkInterleaved instances
*/
chunks () {
const chunks = [];
let bundleOffset = 0;
const view = new DataView(this);
while (bundleOffset < this.byteLength) {
const header = view.getUint32(bundleOffset, true);
if ((header & 0xFF) !== 0x1C) break;
const length = header >>> 8;
if (bundleOffset + 4 + length > this.byteLength) break;
let chunkOffset = bundleOffset + 4;
while (chunkOffset < bundleOffset + 4 + length) {
const chunkType = view.getUint8(chunkOffset);
if (chunkType !== 0x11 && chunkType !== 0x12) break;
const cCount = view.getUint16(chunkOffset + 2, true);
const ΔelemC = view.getUint8(chunkOffset + 10);
const elemC = view.getUint8(chunkOffset + 11);
let localOffset = 12;
localOffset += ΔelemC + elemC;
// initial values
for (let k = 0; k < ΔelemC; k++) {
const typeByte = view.getUint8(chunkOffset + 12 + k);
const baseCode = typeByte & 0xF;
const baseType = codeToType[baseCode];
if (!baseType) throw new Error('Invalid base type code');
localOffset += typeToBytes[baseType.name];
}
// pad after initial
while (localOffset % 4 !== 0) localOffset++;
if (chunkType === 0x11) { // Sequential
// record data: Δelems incrs
for (let k = 0; k < ΔelemC; k++) {
const typeByte = view.getUint8(chunkOffset + 12 + k);
const incrCode = typeByte >> 4;
const incrType = codeToType[incrCode];
if (!incrType) throw new Error('Invalid incr type code');
localOffset += cCount * typeToBytes[incrType.name];
}
// elems
for (let k = 0; k < elemC; k++) {
const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k);
const type = codeToType[typeCode];
if (!type) throw new Error('Invalid elem type code');
localOffset += cCount * typeToBytes[type.name];
}
} else { // Interleaved
// Compute exact stride for interleaved record data
let ΔelemStride = 0;
for (let k = 0; k < ΔelemC; k++) {
const typeByte = view.getUint8(chunkOffset + 12 + k);
const incrCode = typeByte >> 4;
const incrType = codeToType[incrCode];
if (!incrType) throw new Error('Invalid incr type code');
ΔelemStride += typeToBytes[incrType.name];
}
let elemStride = 0;
for (let k = 0; k < elemC; k++) {
const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k);
const type = codeToType[typeCode];
if (!type) throw new Error('Invalid elem type code');
elemStride += typeToBytes[type.name];
}
const recordStride = ΔelemStride + elemStride;
localOffset += cCount * recordStride;
}
// pad after record
while (localOffset % 4 !== 0) localOffset++;
switch (chunkType) {
case 0x11:
chunks.push(new DougalBinaryChunkSequential(this, chunkOffset, localOffset));
break;
case 0x12:
chunks.push(new DougalBinaryChunkInterleaved(this, chunkOffset, localOffset));
break;
default:
throw new Error('Invalid chunk type');
}
chunkOffset += localOffset;
}
bundleOffset += 4 + length;
}
return chunks;
}
/** Return a ByteArray containing all data from all
* chunks including reconstructed i, j and incremental
* values as follows:
*
* <i_0> <i_1> … <i_x> // i values (constant)
* <j_0> <j_1> … <j_x> // j values (j0 + Δj*i)
* <Δelem_0_0> <Δelem_0_1> … <Δelem_0_x> // reconstructed Δelem0 (uses baseType)
* <Δelem_1_0> <Δelem_1_1> … <Δelem_1_x> // reconstructed Δelem1
* …
* <Δelem_y_0> <Δelem_y_1> … <Δelem_y_x> // reconstructed Δelem1
* <elem_0_0> <elem_0_1> … <elem_0_x> // First elem
* <elem_1_0> <elem_1_1> … <elem_1_x> // Second elem
* …
* <elem_z_0> <elem_z_1> … <elem_z_x> // Last elem
*
* It does not matter whether the underlying chunks are
* sequential or interleaved. This function will transform
* as necessary.
*
*/
getDataSequentially () {
const chunks = this.chunks();
if (chunks.length === 0) return new ArrayBuffer(0);
const firstChunk = chunks[0];
const ΔelemC = firstChunk.ΔelemCount;
const elemC = firstChunk.elemCount;
// Check consistency across chunks
for (const chunk of chunks) {
if (chunk.ΔelemCount !== ΔelemC || chunk.elemCount !== elemC) {
throw new Error('Inconsistent chunk structures');
}
}
// Get types from first chunk
const view = new DataView(firstChunk);
const ΔelemBaseTypes = [];
for (let k = 0; k < ΔelemC; k++) {
const typeByte = view.getUint8(12 + k);
const baseCode = typeByte & 0xF;
const baseType = codeToType[baseCode];
if (!baseType) throw new Error('Invalid base type code');
ΔelemBaseTypes.push(baseType);
}
const elemTypes = [];
for (let k = 0; k < elemC; k++) {
const typeCode = view.getUint8(12 + ΔelemC + k);
const type = codeToType[typeCode];
if (!type) throw new Error('Invalid elem type code');
elemTypes.push(type);
}
// Compute total records
const totalN = chunks.reduce((sum, c) => sum + c.jCount, 0);
// Compute sizes
const size_i = totalN * 2; // Uint16 for i
const size_j = totalN * 4; // Int32 for j
let size_Δelems = 0;
for (const t of ΔelemBaseTypes) {
size_Δelems += totalN * typeToBytes[t.name];
}
let size_elems = 0;
for (const t of elemTypes) {
size_elems += totalN * typeToBytes[t.name];
}
const totalSize = size_i + size_j + size_Δelems + size_elems;
const ab = new ArrayBuffer(totalSize);
const dv = new DataView(ab);
// Write i's
let off = 0;
for (const chunk of chunks) {
const i = chunk.i;
for (let idx = 0; idx < chunk.jCount; idx++) {
dv.setUint16(off, i, true);
off += 2;
}
}
// Write j's
off = size_i;
for (const chunk of chunks) {
const j0 = chunk.j0;
const Δj = chunk.Δj;
for (let idx = 0; idx < chunk.jCount; idx++) {
const j = j0 + idx * Δj;
dv.setInt32(off, j, true);
off += 4;
}
}
// Write Δelems
off = size_i + size_j;
for (let m = 0; m < ΔelemC; m++) {
const type = ΔelemBaseTypes[m];
const bytes = typeToBytes[type.name];
for (const chunk of chunks) {
const arr = chunk.Δelem(m);
for (let idx = 0; idx < chunk.jCount; idx++) {
writeTypedValue(dv, off, arr[idx], type);
off += bytes;
}
}
}
// Write elems
for (let m = 0; m < elemC; m++) {
const type = elemTypes[m];
const bytes = typeToBytes[type.name];
for (const chunk of chunks) {
const arr = chunk.elem(m);
for (let idx = 0; idx < chunk.jCount; idx++) {
writeTypedValue(dv, off, arr[idx], type);
off += bytes;
}
}
}
return ab;
}
/** Return a ByteArray containing all data from all
* chunks including reconstructed i, j and incremental
* values, interleaved as follows:
*
* <i_0> <j_0> <Δelem_0_0> <Δelem_1_0> … <Δelem_y_0> <elem_0_0> <elem_1_0> … <elem_z_0>
* <i_1> <j_1> <Δelem_0_1> <Δelem_1_1> … <Δelem_y_1> <elem_0_1> <elem_1_1> … <elem_z_1>
* <i_x> <j_x> <Δelem_0_x> <Δelem_1_x> … <Δelem_y_x> <elem_0_x> <elem_1_x> … <elem_z_x>
*
* It does not matter whether the underlying chunks are
* sequential or interleaved. This function will transform
* as necessary.
*
*/
getDataInterleaved () {
const chunks = this.chunks();
if (chunks.length === 0) return new ArrayBuffer(0);
const firstChunk = chunks[0];
const ΔelemC = firstChunk.ΔelemCount;
const elemC = firstChunk.elemCount;
// Check consistency across chunks
for (const chunk of chunks) {
if (chunk.ΔelemCount !== ΔelemC || chunk.elemCount !== elemC) {
throw new Error('Inconsistent chunk structures');
}
}
// Get types from first chunk
const view = new DataView(firstChunk);
const ΔelemBaseTypes = [];
for (let k = 0; k < ΔelemC; k++) {
const typeByte = view.getUint8(12 + k);
const baseCode = typeByte & 0xF;
const baseType = codeToType[baseCode];
if (!baseType) throw new Error('Invalid base type code');
ΔelemBaseTypes.push(baseType);
}
const elemTypes = [];
for (let k = 0; k < elemC; k++) {
const typeCode = view.getUint8(12 + ΔelemC + k);
const type = codeToType[typeCode];
if (!type) throw new Error('Invalid elem type code');
elemTypes.push(type);
}
// Compute total records
const totalN = chunks.reduce((sum, c) => sum + c.jCount, 0);
// Compute record size
const recordSize = 2 + 4 + // i (Uint16) + j (Int32)
ΔelemBaseTypes.reduce((sum, t) => sum + typeToBytes[t.name], 0) +
elemTypes.reduce((sum, t) => sum + typeToBytes[t.name], 0);
const totalSize = totalN * recordSize;
const ab = new ArrayBuffer(totalSize);
const dv = new DataView(ab);
let off = 0;
for (const chunk of chunks) {
const i = chunk.i;
const j0 = chunk.j0;
const Δj = chunk.Δj;
for (let idx = 0; idx < chunk.jCount; idx++) {
dv.setUint16(off, i, true);
off += 2;
const j = j0 + idx * Δj;
dv.setInt32(off, j, true);
off += 4;
for (let m = 0; m < ΔelemC; m++) {
const type = ΔelemBaseTypes[m];
const bytes = typeToBytes[type.name];
const arr = chunk.Δelem(m);
writeTypedValue(dv, off, arr[idx], type);
off += bytes;
}
for (let m = 0; m < elemC; m++) {
const type = elemTypes[m];
const bytes = typeToBytes[type.name];
const arr = chunk.elem(m);
writeTypedValue(dv, off, arr[idx], type);
off += bytes;
}
}
}
return ab;
}
get records () {
const data = [];
for (const record of this) {
data.push(record.slice(1));
}
return data;
}
[Symbol.iterator]() {
const chunks = this.chunks();
let chunkIndex = 0;
let chunkIterator = chunks.length > 0 ? chunks[0][Symbol.iterator]() : null;
return {
next() {
if (!chunkIterator) {
return { done: true };
}
let result = chunkIterator.next();
while (result.done && chunkIndex < chunks.length - 1) {
chunkIndex++;
chunkIterator = chunks[chunkIndex][Symbol.iterator]();
result = chunkIterator.next();
}
return result;
}
};
}
}
class DougalBinaryChunkSequential extends ArrayBuffer {
constructor (buffer, offset, length) {
super(length);
new Uint8Array(this).set(new Uint8Array(buffer, offset, length));
this._ΔelemCaches = new Array(this.ΔelemCount);
this._elemCaches = new Array(this.elemCount);
this._ΔelemBlockOffsets = null;
this._elemBlockOffsets = null;
this._recordOffset = null;
}
_getRecordOffset() {
if (this._recordOffset !== null) return this._recordOffset;
const view = new DataView(this);
const ΔelemC = this.ΔelemCount;
const elemC = this.elemCount;
let recordOffset = 12 + ΔelemC + elemC;
for (let k = 0; k < ΔelemC; k++) {
const tb = view.getUint8(12 + k);
const bc = tb & 0xF;
const bt = codeToType[bc];
recordOffset += typeToBytes[bt.name];
}
while (recordOffset % 4 !== 0) recordOffset++;
this._recordOffset = recordOffset;
return recordOffset;
}
_initBlockOffsets() {
if (this._ΔelemBlockOffsets !== null) return;
const view = new DataView(this);
const count = this.jCount;
const ΔelemC = this.ΔelemCount;
const elemC = this.elemCount;
const recordOffset = this._getRecordOffset();
this._ΔelemBlockOffsets = [];
let o = recordOffset;
for (let k = 0; k < ΔelemC; k++) {
this._ΔelemBlockOffsets[k] = o;
const tb = view.getUint8(12 + k);
const ic = tb >> 4;
const it = codeToType[ic];
o += count * typeToBytes[it.name];
}
this._elemBlockOffsets = [];
for (let k = 0; k < elemC; k++) {
this._elemBlockOffsets[k] = o;
const tc = view.getUint8(12 + ΔelemC + k);
const t = codeToType[tc];
o += count * typeToBytes[t.name];
}
}
/** Return the user-defined value
*/
get udv () {
return new DataView(this).getUint8(1);
}
/** Return the number of j elements in this chunk
*/
get jCount () {
return new DataView(this).getUint16(2, true);
}
/** Return the i value in this chunk
*/
get i () {
return new DataView(this).getUint16(4, true);
}
/** Return the j0 value in this chunk
*/
get j0 () {
return new DataView(this).getUint16(6, true);
}
/** Return the Δj value in this chunk
*/
get Δj () {
return new DataView(this).getInt16(8, true);
}
/** Return the Δelem_count value in this chunk
*/
get ΔelemCount () {
return new DataView(this).getUint8(10);
}
/** Return the elem_count value in this chunk
*/
get elemCount () {
return new DataView(this).getUint8(11);
}
/** Return a TypedArray (e.g., Uint16Array, …) for the n-th Δelem in the chunk
*/
Δelem (n) {
if (this._ΔelemCaches[n]) return this._ΔelemCaches[n];
if (n < 0 || n >= this.ΔelemCount) throw new Error(`Invalid Δelem index: ${n}`);
const view = new DataView(this);
const count = this.jCount;
const ΔelemC = this.ΔelemCount;
const typeByte = view.getUint8(12 + n);
const baseCode = typeByte & 0xF;
const incrCode = typeByte >> 4;
const baseType = codeToType[baseCode];
const incrType = codeToType[incrCode];
if (!baseType || !incrType) throw new Error('Invalid type codes for Δelem');
// Find offset for initial value of this Δelem
let initialOffset = 12 + ΔelemC + this.elemCount;
for (let k = 0; k < n; k++) {
const tb = view.getUint8(12 + k);
const bc = tb & 0xF;
const bt = codeToType[bc];
initialOffset += typeToBytes[bt.name];
}
let current = readTypedValue(view, initialOffset, baseType);
// Advance to start of record data (after all initials and pad)
const recordOffset = this._getRecordOffset();
// Find offset for deltas of this Δelem (skip previous Δelems' delta blocks)
this._initBlockOffsets();
const deltaOffset = this._ΔelemBlockOffsets[n];
// Reconstruct the array
const arr = new baseType(count);
const isBigInt = baseType === BigInt64Array || baseType === BigUint64Array;
arr[0] = current;
for (let idx = 1; idx < count; idx++) {
let delta = readTypedValue(view, deltaOffset + idx * typeToBytes[incrType.name], incrType);
if (isBigInt) {
delta = BigInt(delta);
current += delta;
} else {
current += delta;
}
arr[idx] = current;
}
this._ΔelemCaches[n] = arr;
return arr;
}
/** Return a TypedArray (e.g., Uint16Array, …) for the n-th elem in the chunk
*/
elem (n) {
if (this._elemCaches[n]) return this._elemCaches[n];
if (n < 0 || n >= this.elemCount) throw new Error(`Invalid elem index: ${n}`);
const view = new DataView(this);
const count = this.jCount;
const ΔelemC = this.ΔelemCount;
const elemC = this.elemCount;
const typeCode = view.getUint8(12 + ΔelemC + n);
const type = codeToType[typeCode];
if (!type) throw new Error('Invalid type code for elem');
// Find offset for this elem's data block
this._initBlockOffsets();
const elemOffset = this._elemBlockOffsets[n];
// Create and populate the array
const arr = new type(count);
const bytes = typeToBytes[type.name];
for (let idx = 0; idx < count; idx++) {
arr[idx] = readTypedValue(view, elemOffset + idx * bytes, type);
}
this._elemCaches[n] = arr;
return arr;
}
getRecord (index) {
if (index < 0 || index >= this.jCount) throw new Error(`Invalid record index: ${index}`);
const arr = [this.udv, this.i, this.j0 + index * this.Δj];
for (let m = 0; m < this.ΔelemCount; m++) {
const values = this.Δelem(m);
arr.push(values[index]);
}
for (let m = 0; m < this.elemCount; m++) {
const values = this.elem(m);
arr.push(values[index]);
}
return arr;
}
[Symbol.iterator]() {
let index = 0;
const chunk = this;
return {
next() {
if (index < chunk.jCount) {
return { value: chunk.getRecord(index++), done: false };
} else {
return { done: true };
}
}
};
}
}
class DougalBinaryChunkInterleaved extends ArrayBuffer {
constructor(buffer, offset, length) {
super(length);
new Uint8Array(this).set(new Uint8Array(buffer, offset, length));
this._incrStrides = [];
this._elemStrides = [];
this._incrOffsets = [];
this._elemOffsets = [];
this._recordStride = 0;
this._recordOffset = null;
this._initStrides();
this._ΔelemCaches = new Array(this.ΔelemCount);
this._elemCaches = new Array(this.elemCount);
}
_getRecordOffset() {
if (this._recordOffset !== null) return this._recordOffset;
const view = new DataView(this);
const ΔelemC = this.ΔelemCount;
const elemC = this.elemCount;
let recordOffset = 12 + ΔelemC + elemC;
for (let k = 0; k < ΔelemC; k++) {
const tb = view.getUint8(12 + k);
const bc = tb & 0xF;
const bt = codeToType[bc];
recordOffset += typeToBytes[bt.name];
}
while (recordOffset % 4 !== 0) recordOffset++;
this._recordOffset = recordOffset;
return recordOffset;
}
_initStrides() {
const view = new DataView(this);
const ΔelemC = this.ΔelemCount;
const elemC = this.elemCount;
// Compute incr strides and offsets
let incrOffset = 0;
for (let k = 0; k < ΔelemC; k++) {
const typeByte = view.getUint8(12 + k);
const incrCode = typeByte >> 4;
const incrType = codeToType[incrCode];
if (!incrType) throw new Error('Invalid incr type code');
this._incrOffsets.push(incrOffset);
const bytes = typeToBytes[incrType.name];
this._incrStrides.push(bytes);
incrOffset += bytes;
this._recordStride += bytes;
}
// Compute elem strides and offsets
let elemOffset = incrOffset;
for (let k = 0; k < elemC; k++) {
const typeCode = view.getUint8(12 + ΔelemC + k);
const type = codeToType[typeCode];
if (!type) throw new Error('Invalid elem type code');
this._elemOffsets.push(elemOffset);
const bytes = typeToBytes[type.name];
this._elemStrides.push(bytes);
elemOffset += bytes;
this._recordStride += bytes;
}
}
get udv() {
return new DataView(this).getUint8(1);
}
get jCount() {
return new DataView(this).getUint16(2, true);
}
get i() {
return new DataView(this).getUint16(4, true);
}
get j0() {
return new DataView(this).getUint16(6, true);
}
get Δj() {
return new DataView(this).getInt16(8, true);
}
get ΔelemCount() {
return new DataView(this).getUint8(10);
}
get elemCount() {
return new DataView(this).getUint8(11);
}
Δelem(n) {
if (this._ΔelemCaches[n]) return this._ΔelemCaches[n];
if (n < 0 || n >= this.ΔelemCount) throw new Error(`Invalid Δelem index: ${n}`);
const view = new DataView(this);
const count = this.jCount;
const ΔelemC = this.ΔelemCount;
const typeByte = view.getUint8(12 + n);
const baseCode = typeByte & 0xF;
const incrCode = typeByte >> 4;
const baseType = codeToType[baseCode];
const incrType = codeToType[incrCode];
if (!baseType || !incrType) throw new Error('Invalid type codes for Δelem');
// Find offset for initial value of this Δelem
let initialOffset = 12 + ΔelemC + this.elemCount;
for (let k = 0; k < n; k++) {
const tb = view.getUint8(12 + k);
const bc = tb & 0xF;
const bt = codeToType[bc];
initialOffset += typeToBytes[bt.name];
}
let current = readTypedValue(view, initialOffset, baseType);
// Find offset to start of record data
const recordOffset = this._getRecordOffset();
// Use precomputed offset for this Δelem
const deltaOffset = recordOffset + this._incrOffsets[n];
// Reconstruct the array
const arr = new baseType(count);
const isBigInt = baseType === BigInt64Array || baseType === BigUint64Array;
arr[0] = current;
for (let idx = 1; idx < count; idx++) {
let delta = readTypedValue(view, deltaOffset + idx * this._recordStride, incrType);
if (isBigInt) {
delta = BigInt(delta);
current += delta;
} else {
current += delta;
}
arr[idx] = current;
}
this._ΔelemCaches[n] = arr;
return arr;
}
elem(n) {
if (this._elemCaches[n]) return this._elemCaches[n];
if (n < 0 || n >= this.elemCount) throw new Error(`Invalid elem index: ${n}`);
const view = new DataView(this);
const count = this.jCount;
const ΔelemC = this.ΔelemCount;
const typeCode = view.getUint8(12 + ΔelemC + n);
const type = codeToType[typeCode];
if (!type) throw new Error('Invalid type code for elem');
// Find offset to start of record data
const recordOffset = this._getRecordOffset();
// Use precomputed offset for this elem (relative to start of record data)
const elemOffset = recordOffset + this._elemOffsets[n];
// Create and populate the array
const arr = new type(count);
const bytes = typeToBytes[type.name];
for (let idx = 0; idx < count; idx++) {
arr[idx] = readTypedValue(view, elemOffset + idx * this._recordStride, type);
}
this._elemCaches[n] = arr;
return arr;
}
getRecord (index) {
if (index < 0 || index >= this.jCount) throw new Error(`Invalid record index: ${index}`);
const arr = [this.udv, this.i, this.j0 + index * this.Δj];
for (let m = 0; m < this.ΔelemCount; m++) {
const values = this.Δelem(m);
arr.push(values[index]);
}
for (let m = 0; m < this.elemCount; m++) {
const values = this.elem(m);
arr.push(values[index]);
}
return arr;
}
[Symbol.iterator]() {
let index = 0;
const chunk = this;
return {
next() {
if (index < chunk.jCount) {
return { value: chunk.getRecord(index++), done: false };
} else {
return { done: true };
}
}
};
}
}
module.exports = { DougalBinaryBundle, DougalBinaryChunkSequential, DougalBinaryChunkInterleaved }

View File

@@ -0,0 +1,327 @@
const codeToType = {
0: Int8Array,
1: Uint8Array,
2: Int16Array,
3: Uint16Array,
4: Int32Array,
5: Uint32Array,
7: Float32Array,
8: Float64Array,
9: BigInt64Array,
10: BigUint64Array
};
const typeToBytes = {
Int8Array: 1,
Uint8Array: 1,
Int16Array: 2,
Uint16Array: 2,
Int32Array: 4,
Uint32Array: 4,
Float32Array: 4,
Float64Array: 8,
BigInt64Array: 8,
BigUint64Array: 8
};
function sequential(binary) {
if (!(binary instanceof Uint8Array) || binary.length < 4) {
throw new Error('Invalid binary input');
}
const view = new DataView(binary.buffer, binary.byteOffset, binary.byteLength);
let offset = 0;
// Initialize result (assuming single i value for simplicity; extend for multiple i values if needed)
const result = { i: null, j: [], Δelems: [], elems: [] };
// Process bundles
while (offset < binary.length) {
// Read bundle header
if (offset + 4 > binary.length) throw new Error('Incomplete bundle header');
const bundleHeader = view.getUint32(offset, true);
if ((bundleHeader & 0xFF) !== 0x1C) throw new Error('Invalid bundle marker');
const bundleLength = bundleHeader >> 8;
offset += 4;
const bundleEnd = offset + bundleLength;
if (bundleEnd > binary.length) throw new Error('Bundle length exceeds input size');
// Process chunks in bundle
while (offset < bundleEnd) {
// Read chunk header
if (offset + 12 > bundleEnd) throw new Error('Incomplete chunk header');
const chunkType = view.getUint8(offset);
if (chunkType !== 0x11) throw new Error(`Unsupported chunk type: ${chunkType}`);
offset += 1; // Skip udv
offset += 1;
const count = view.getUint16(offset, true); offset += 2;
if (count > 65535) throw new Error('Chunk count exceeds 65535');
const iValue = view.getUint16(offset, true); offset += 2;
const j0 = view.getUint16(offset, true); offset += 2;
const Δj = view.getInt16(offset, true); offset += 2;
const ΔelemCount = view.getUint8(offset++); // Δelem_count
const elemCount = view.getUint8(offset++); // elem_count
// Set i value (assuming all chunks share the same i)
if (result.i === null) result.i = iValue;
else if (result.i !== iValue) throw new Error('Multiple i values not supported');
// Read preface (element types)
const ΔelemTypes = [];
for (let i = 0; i < ΔelemCount; i++) {
if (offset >= bundleEnd) throw new Error('Incomplete Δelem types');
const typeByte = view.getUint8(offset++);
const baseCode = typeByte & 0x0F;
const incrCode = typeByte >> 4;
if (!codeToType[baseCode] || !codeToType[incrCode]) {
throw new Error(`Invalid type code in Δelem: ${typeByte}`);
}
ΔelemTypes.push({ baseType: codeToType[baseCode], incrType: codeToType[incrCode] });
}
const elemTypes = [];
for (let i = 0; i < elemCount; i++) {
if (offset >= bundleEnd) throw new Error('Incomplete elem types');
const typeCode = view.getUint8(offset++);
if (!codeToType[typeCode]) throw new Error(`Invalid type code in elem: ${typeCode}`);
elemTypes.push(codeToType[typeCode]);
}
// Initialize Δelems and elems arrays if first chunk
if (!result.Δelems.length && ΔelemCount > 0) {
result.Δelems = Array(ΔelemCount).fill().map(() => []);
}
if (!result.elems.length && elemCount > 0) {
result.elems = Array(elemCount).fill().map(() => []);
}
// Read initial values for Δelems
const initialValues = [];
for (const { baseType } of ΔelemTypes) {
if (offset + typeToBytes[baseType.name] > bundleEnd) {
throw new Error('Incomplete initial values');
}
initialValues.push(readTypedValue(view, offset, baseType));
offset += typeToBytes[baseType.name];
}
// Skip padding
while (offset % 4 !== 0) {
if (offset >= bundleEnd) throw new Error('Incomplete padding after initial values');
offset++;
}
// Reconstruct j values
for (let idx = 0; idx < count; idx++) {
result.j.push(j0 + idx * Δj);
}
// Read record data (non-interleaved)
for (let i = 0; i < ΔelemCount; i++) {
let current = initialValues[i];
const values = result.Δelems[i];
const incrType = ΔelemTypes[i].incrType;
const isBigInt = typeof current === 'bigint';
for (let idx = 0; idx < count; idx++) {
if (offset + typeToBytes[incrType.name] > bundleEnd) {
throw new Error('Incomplete Δelem data');
}
let delta = readTypedValue(view, offset, incrType);
if (idx === 0) {
values.push(isBigInt ? Number(current) : current);
} else {
if (isBigInt) {
delta = BigInt(delta);
current += delta;
values.push(Number(current));
} else {
current += delta;
values.push(current);
}
}
offset += typeToBytes[incrType.name];
}
}
for (let i = 0; i < elemCount; i++) {
const values = result.elems[i];
const type = elemTypes[i];
const isBigInt = type === BigInt64Array || type === BigUint64Array;
for (let idx = 0; idx < count; idx++) {
if (offset + typeToBytes[type.name] > bundleEnd) {
throw new Error('Incomplete elem data');
}
let value = readTypedValue(view, offset, type);
values.push(isBigInt ? Number(value) : value);
offset += typeToBytes[type.name];
}
}
// Skip padding
while (offset % 4 !== 0) {
if (offset >= bundleEnd) throw new Error('Incomplete padding after record data');
offset++;
}
}
}
return result;
}
function interleaved(binary) {
if (!(binary instanceof Uint8Array) || binary.length < 4) {
throw new Error('Invalid binary input');
}
const view = new DataView(binary.buffer, binary.byteOffset, binary.byteLength);
let offset = 0;
// Initialize result (assuming single i value for simplicity; extend for multiple i values if needed)
const result = { i: null, j: [], Δelems: [], elems: [] };
// Process bundles
while (offset < binary.length) {
// Read bundle header
if (offset + 4 > binary.length) throw new Error('Incomplete bundle header');
const bundleHeader = view.getUint32(offset, true);
if ((bundleHeader & 0xFF) !== 0x1C) throw new Error('Invalid bundle marker');
const bundleLength = bundleHeader >> 8;
offset += 4;
const bundleEnd = offset + bundleLength;
if (bundleEnd > binary.length) throw new Error('Bundle length exceeds input size');
// Process chunks in bundle
while (offset < bundleEnd) {
// Read chunk header
if (offset + 12 > bundleEnd) throw new Error('Incomplete chunk header');
const chunkType = view.getUint8(offset);
if (chunkType !== 0x12) throw new Error(`Unsupported chunk type: ${chunkType}`);
offset += 1; // Skip udv
offset += 1;
const count = view.getUint16(offset, true); offset += 2;
if (count > 65535) throw new Error('Chunk count exceeds 65535');
const iValue = view.getUint16(offset, true); offset += 2;
const j0 = view.getUint16(offset, true); offset += 2;
const Δj = view.getInt16(offset, true); offset += 2;
const ΔelemCount = view.getUint8(offset++); // Δelem_count
const elemCount = view.getUint8(offset++); // elem_count
// Set i value (assuming all chunks share the same i)
if (result.i === null) result.i = iValue;
else if (result.i !== iValue) throw new Error('Multiple i values not supported');
// Read preface (element types)
const ΔelemTypes = [];
for (let i = 0; i < ΔelemCount; i++) {
if (offset >= bundleEnd) throw new Error('Incomplete Δelem types');
const typeByte = view.getUint8(offset++);
const baseCode = typeByte & 0x0F;
const incrCode = typeByte >> 4;
if (!codeToType[baseCode] || !codeToType[incrCode]) {
throw new Error(`Invalid type code in Δelem: ${typeByte}`);
}
ΔelemTypes.push({ baseType: codeToType[baseCode], incrType: codeToType[incrCode] });
}
const elemTypes = [];
for (let i = 0; i < elemCount; i++) {
if (offset >= bundleEnd) throw new Error('Incomplete elem types');
const typeCode = view.getUint8(offset++);
if (!codeToType[typeCode]) throw new Error(`Invalid type code in elem: ${typeCode}`);
elemTypes.push(codeToType[typeCode]);
}
// Initialize Δelems and elems arrays if first chunk
if (!result.Δelems.length && ΔelemCount > 0) {
result.Δelems = Array(ΔelemCount).fill().map(() => []);
}
if (!result.elems.length && elemCount > 0) {
result.elems = Array(elemCount).fill().map(() => []);
}
// Read initial values for Δelems
const initialValues = [];
for (const { baseType } of ΔelemTypes) {
if (offset + typeToBytes[baseType.name] > bundleEnd) {
throw new Error('Incomplete initial values');
}
initialValues.push(readTypedValue(view, offset, baseType));
offset += typeToBytes[baseType.name];
}
// Skip padding
while (offset % 4 !== 0) {
if (offset >= bundleEnd) throw new Error('Incomplete padding after initial values');
offset++;
}
// Reconstruct j values
for (let idx = 0; idx < count; idx++) {
result.j.push(j0 + idx * Δj);
}
// Read interleaved record data
for (let idx = 0; idx < count; idx++) {
// Read Δelems
for (let i = 0; i < ΔelemCount; i++) {
const values = result.Δelems[i];
const incrType = ΔelemTypes[i].incrType;
const isBigInt = typeof initialValues[i] === 'bigint';
if (offset + typeToBytes[incrType.name] > bundleEnd) {
throw new Error('Incomplete Δelem data');
}
let delta = readTypedValue(view, offset, incrType);
offset += typeToBytes[incrType.name];
if (idx === 0) {
values.push(isBigInt ? Number(initialValues[i]) : initialValues[i]);
} else {
if (isBigInt) {
delta = BigInt(delta);
initialValues[i] += delta;
values.push(Number(initialValues[i]));
} else {
initialValues[i] += delta;
values.push(initialValues[i]);
}
}
}
// Read elems
for (let i = 0; i < elemCount; i++) {
const values = result.elems[i];
const type = elemTypes[i];
const isBigInt = type === BigInt64Array || type === BigUint64Array;
if (offset + typeToBytes[type.name] > bundleEnd) {
throw new Error('Incomplete elem data');
}
let value = readTypedValue(view, offset, type);
values.push(isBigInt ? Number(value) : value);
offset += typeToBytes[type.name];
}
}
// Skip padding
while (offset % 4 !== 0) {
if (offset >= bundleEnd) throw new Error('Incomplete padding after record data');
offset++;
}
}
}
return result;
}
function readTypedValue(view, offset, type) {
switch (type) {
case Int8Array: return view.getInt8(offset);
case Uint8Array: return view.getUint8(offset);
case Int16Array: return view.getInt16(offset, true);
case Uint16Array: return view.getUint16(offset, true);
case Int32Array: return view.getInt32(offset, true);
case Uint32Array: return view.getUint32(offset, true);
case Float32Array: return view.getFloat32(offset, true);
case Float64Array: return view.getFloat64(offset, true);
case BigInt64Array: return view.getBigInt64(offset, true);
case BigUint64Array: return view.getBigUint64(offset, true);
default: throw new Error(`Unsupported type: ${type.name}`);
}
}
module.exports = { sequential, interleaved };

View File

@@ -0,0 +1,380 @@
const typeToCode = {
Int8Array: 0,
Uint8Array: 1,
Int16Array: 2,
Uint16Array: 3,
Int32Array: 4,
Uint32Array: 5,
Float32Array: 7, // Float16 not natively supported in JS, use Float32
Float64Array: 8,
BigInt64Array: 9,
BigUint64Array: 10
};
const typeToBytes = {
Int8Array: 1,
Uint8Array: 1,
Int16Array: 2,
Uint16Array: 2,
Int32Array: 4,
Uint32Array: 4,
Float32Array: 4,
Float64Array: 8,
BigInt64Array: 8,
BigUint64Array: 8
};
function sequential(json, iGetter, jGetter, Δelems = [], elems = [], udv = 0) {
if (!Array.isArray(json) || !json.length) return new Uint8Array(0);
if (typeof iGetter !== 'function' || typeof jGetter !== 'function') throw new Error('i and j must be getter functions');
Δelems.forEach((elem, idx) => {
if (typeof elem.key !== 'function') throw new Error(`Δelems[${idx}].key must be a getter function`);
});
elems.forEach((elem, idx) => {
if (typeof elem.key !== 'function') throw new Error(`elems[${idx}].key must be a getter function`);
});
// Group records by i value
const groups = new Map();
for (const record of json) {
const iValue = iGetter(record);
if (iValue == null) throw new Error('Missing i value from getter');
if (!groups.has(iValue)) groups.set(iValue, []);
groups.get(iValue).push(record);
}
const maxBundleSize = 0xFFFFFF; // Max bundle length (24 bits)
const buffers = [];
// Process each group (i value)
for (const [iValue, records] of groups) {
// Sort records by j to ensure consistent order
records.sort((a, b) => jGetter(a) - jGetter(b));
const jValues = records.map(jGetter);
if (jValues.some(v => v == null)) throw new Error('Missing j value from getter');
// Split records into chunks based on Δj continuity
const chunks = [];
let currentChunk = [records[0]];
let currentJ0 = jValues[0];
let currentΔj = records.length > 1 ? jValues[1] - jValues[0] : 0;
for (let idx = 1; idx < records.length; idx++) {
const chunkIndex = chunks.reduce((sum, c) => sum + c.records.length, 0);
const expectedJ = currentJ0 + (idx - chunkIndex) * currentΔj;
if (jValues[idx] !== expectedJ || idx - chunkIndex >= 65536) {
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
currentChunk = [records[idx]];
currentJ0 = jValues[idx];
currentΔj = idx + 1 < records.length ? jValues[idx + 1] - jValues[idx] : 0;
} else {
currentChunk.push(records[idx]);
}
}
if (currentChunk.length > 0) {
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
}
// Calculate total size for all chunks in this group by simulating offsets
const chunkSizes = chunks.map(({ records: chunkRecords }) => {
if (chunkRecords.length > 65535) throw new Error(`Chunk size exceeds 65535 for i=${iValue}`);
let simulatedOffset = 0; // Relative to chunk start
simulatedOffset += 12; // Header
simulatedOffset += Δelems.length + elems.length; // Preface
simulatedOffset += Δelems.reduce((sum, e) => sum + typeToBytes[e.baseType.name], 0); // Initial values
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after initial
simulatedOffset += chunkRecords.length * (
Δelems.reduce((sum, e) => sum + typeToBytes[e.incrType.name], 0) +
elems.reduce((sum, e) => sum + typeToBytes[e.type.name], 0)
); // Record data
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after record
return simulatedOffset;
});
const totalChunkSize = chunkSizes.reduce((sum, size) => sum + size, 0);
// Start a new bundle if needed
const lastBundle = buffers[buffers.length - 1];
if (!lastBundle || lastBundle.offset + totalChunkSize > maxBundleSize) {
buffers.push({ offset: 4, buffer: null, view: null });
}
// Initialize DataView for current bundle
const currentBundle = buffers[buffers.length - 1];
if (!currentBundle.buffer) {
const requiredSize = totalChunkSize + 4;
currentBundle.buffer = new ArrayBuffer(requiredSize);
currentBundle.view = new DataView(currentBundle.buffer);
}
// Process each chunk
for (const { records: chunkRecords, j0, Δj } of chunks) {
const chunkSize = chunkSizes.shift();
// Ensure buffer is large enough
if (currentBundle.offset + chunkSize > currentBundle.buffer.byteLength) {
const newSize = currentBundle.offset + chunkSize;
const newBuffer = new ArrayBuffer(newSize);
new Uint8Array(newBuffer).set(new Uint8Array(currentBundle.buffer));
currentBundle.buffer = newBuffer;
currentBundle.view = new DataView(newBuffer);
}
// Write chunk header
let offset = currentBundle.offset;
currentBundle.view.setUint8(offset++, 0x11); // Chunk type
currentBundle.view.setUint8(offset++, udv); // udv
currentBundle.view.setUint16(offset, chunkRecords.length, true); offset += 2; // count
currentBundle.view.setUint16(offset, iValue, true); offset += 2; // i
currentBundle.view.setUint16(offset, j0, true); offset += 2; // j0
currentBundle.view.setInt16(offset, Δj, true); offset += 2; // Δj
currentBundle.view.setUint8(offset++, Δelems.length); // Δelem_count
currentBundle.view.setUint8(offset++, elems.length); // elem_count
// Write chunk preface (element types)
for (const elem of Δelems) {
const baseCode = typeToCode[elem.baseType.name];
const incrCode = typeToCode[elem.incrType.name];
currentBundle.view.setUint8(offset++, (incrCode << 4) | baseCode);
}
for (const elem of elems) {
currentBundle.view.setUint8(offset++, typeToCode[elem.type.name]);
}
// Write initial values for Δelems
for (const elem of Δelems) {
const value = elem.key(chunkRecords[0]);
if (value == null) throw new Error('Missing Δelem value from getter');
writeTypedValue(currentBundle.view, offset, value, elem.baseType);
offset += typeToBytes[elem.baseType.name];
}
// Pad to 4-byte boundary
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
// Write record data (non-interleaved)
for (const elem of Δelems) {
let prev = elem.key(chunkRecords[0]);
for (let idx = 0; idx < chunkRecords.length; idx++) {
const value = idx === 0 ? 0 : elem.key(chunkRecords[idx]) - prev;
writeTypedValue(currentBundle.view, offset, value, elem.incrType);
offset += typeToBytes[elem.incrType.name];
prev = elem.key(chunkRecords[idx]);
}
}
for (const elem of elems) {
for (const record of chunkRecords) {
const value = elem.key(record);
if (value == null) throw new Error('Missing elem value from getter');
writeTypedValue(currentBundle.view, offset, value, elem.type);
offset += typeToBytes[elem.type.name];
}
}
// Pad to 4-byte boundary
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
// Update bundle offset
currentBundle.offset = offset;
}
// Update bundle header
currentBundle.view.setUint32(0, 0x1C | ((currentBundle.offset - 4) << 8), true);
}
// Combine buffers into final Uint8Array
const finalLength = buffers.reduce((sum, b) => sum + b.offset, 0);
const result = new Uint8Array(finalLength);
let offset = 0;
for (const { buffer, offset: bundleOffset } of buffers) {
result.set(new Uint8Array(buffer, 0, bundleOffset), offset);
offset += bundleOffset;
}
return result;
}
function interleaved(json, iGetter, jGetter, Δelems = [], elems = [], udv = 0) {
if (!Array.isArray(json) || !json.length) return new Uint8Array(0);
if (typeof iGetter !== 'function' || typeof jGetter !== 'function') throw new Error('i and j must be getter functions');
Δelems.forEach((elem, idx) => {
if (typeof elem.key !== 'function') throw new Error(`Δelems[${idx}].key must be a getter function`);
});
elems.forEach((elem, idx) => {
if (typeof elem.key !== 'function') throw new Error(`elems[${idx}].key must be a getter function`);
});
// Group records by i value
const groups = new Map();
for (const record of json) {
const iValue = iGetter(record);
if (iValue == null) throw new Error('Missing i value from getter');
if (!groups.has(iValue)) groups.set(iValue, []);
groups.get(iValue).push(record);
}
const maxBundleSize = 0xFFFFFF; // Max bundle length (24 bits)
const buffers = [];
// Process each group (i value)
for (const [iValue, records] of groups) {
// Sort records by j to ensure consistent order
records.sort((a, b) => jGetter(a) - jGetter(b));
const jValues = records.map(jGetter);
if (jValues.some(v => v == null)) throw new Error('Missing j value from getter');
// Split records into chunks based on Δj continuity
const chunks = [];
let currentChunk = [records[0]];
let currentJ0 = jValues[0];
let currentΔj = records.length > 1 ? jValues[1] - jValues[0] : 0;
for (let idx = 1; idx < records.length; idx++) {
const chunkIndex = chunks.reduce((sum, c) => sum + c.records.length, 0);
const expectedJ = currentJ0 + (idx - chunkIndex) * currentΔj;
if (jValues[idx] !== expectedJ || idx - chunkIndex >= 65536) {
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
currentChunk = [records[idx]];
currentJ0 = jValues[idx];
currentΔj = idx + 1 < records.length ? jValues[idx + 1] - jValues[idx] : 0;
} else {
currentChunk.push(records[idx]);
}
}
if (currentChunk.length > 0) {
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
}
// Calculate total size for all chunks in this group by simulating offsets
const chunkSizes = chunks.map(({ records: chunkRecords }) => {
if (chunkRecords.length > 65535) throw new Error(`Chunk size exceeds 65535 for i=${iValue}`);
let simulatedOffset = 0; // Relative to chunk start
simulatedOffset += 12; // Header
simulatedOffset += Δelems.length + elems.length; // Preface
simulatedOffset += Δelems.reduce((sum, e) => sum + typeToBytes[e.baseType.name], 0); // Initial values
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after initial
simulatedOffset += chunkRecords.length * (
Δelems.reduce((sum, e) => sum + typeToBytes[e.incrType.name], 0) +
elems.reduce((sum, e) => sum + typeToBytes[e.type.name], 0)
); // Interleaved record data
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after record
return simulatedOffset;
});
const totalChunkSize = chunkSizes.reduce((sum, size) => sum + size, 0);
// Start a new bundle if needed
const lastBundle = buffers[buffers.length - 1];
if (!lastBundle || lastBundle.offset + totalChunkSize > maxBundleSize) {
buffers.push({ offset: 4, buffer: null, view: null });
}
// Initialize DataView for current bundle
const currentBundle = buffers[buffers.length - 1];
if (!currentBundle.buffer) {
const requiredSize = totalChunkSize + 4;
currentBundle.buffer = new ArrayBuffer(requiredSize);
currentBundle.view = new DataView(currentBundle.buffer);
}
// Process each chunk
for (const { records: chunkRecords, j0, Δj } of chunks) {
const chunkSize = chunkSizes.shift();
// Ensure buffer is large enough
if (currentBundle.offset + chunkSize > currentBundle.buffer.byteLength) {
const newSize = currentBundle.offset + chunkSize;
const newBuffer = new ArrayBuffer(newSize);
new Uint8Array(newBuffer).set(new Uint8Array(currentBundle.buffer));
currentBundle.buffer = newBuffer;
currentBundle.view = new DataView(newBuffer);
}
// Write chunk header
let offset = currentBundle.offset;
currentBundle.view.setUint8(offset++, 0x12); // Chunk type
currentBundle.view.setUint8(offset++, udv); // udv
currentBundle.view.setUint16(offset, chunkRecords.length, true); offset += 2; // count
currentBundle.view.setUint16(offset, iValue, true); offset += 2; // i
currentBundle.view.setUint16(offset, j0, true); offset += 2; // j0
currentBundle.view.setInt16(offset, Δj, true); offset += 2; // Δj
currentBundle.view.setUint8(offset++, Δelems.length); // Δelem_count
currentBundle.view.setUint8(offset++, elems.length); // elem_count
// Write chunk preface (element types)
for (const elem of Δelems) {
const baseCode = typeToCode[elem.baseType.name];
const incrCode = typeToCode[elem.incrType.name];
currentBundle.view.setUint8(offset++, (incrCode << 4) | baseCode);
}
for (const elem of elems) {
currentBundle.view.setUint8(offset++, typeToCode[elem.type.name]);
}
// Write initial values for Δelems
for (const elem of Δelems) {
const value = elem.key(chunkRecords[0]);
if (value == null) throw new Error('Missing Δelem value from getter');
writeTypedValue(currentBundle.view, offset, value, elem.baseType);
offset += typeToBytes[elem.baseType.name];
}
// Pad to 4-byte boundary
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
// Write interleaved record data
const prevValues = Δelems.map(elem => elem.key(chunkRecords[0]));
for (let idx = 0; idx < chunkRecords.length; idx++) {
// Write Δelems increments
for (let i = 0; i < Δelems.length; i++) {
const elem = Δelems[i];
const value = idx === 0 ? 0 : elem.key(chunkRecords[idx]) - prevValues[i];
writeTypedValue(currentBundle.view, offset, value, elem.incrType);
offset += typeToBytes[elem.incrType.name];
prevValues[i] = elem.key(chunkRecords[idx]);
}
// Write elems
for (const elem of elems) {
const value = elem.key(chunkRecords[idx]);
if (value == null) throw new Error('Missing elem value from getter');
writeTypedValue(currentBundle.view, offset, value, elem.type);
offset += typeToBytes[elem.type.name];
}
}
// Pad to 4-byte boundary
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
// Update bundle offset
currentBundle.offset = offset;
}
// Update bundle header
currentBundle.view.setUint32(0, 0x1C | ((currentBundle.offset - 4) << 8), true);
}
// Combine buffers into final Uint8Array
const finalLength = buffers.reduce((sum, b) => sum + b.offset, 0);
const result = new Uint8Array(finalLength);
let offset = 0;
for (const { buffer, offset: bundleOffset } of buffers) {
result.set(new Uint8Array(buffer, 0, bundleOffset), offset);
offset += bundleOffset;
}
return result;
}
function writeTypedValue(view, offset, value, type) {
switch (type) {
case Int8Array: view.setInt8(offset, value); break;
case Uint8Array: view.setUint8(offset, value); break;
case Int16Array: view.setInt16(offset, value, true); break;
case Uint16Array: view.setUint16(offset, value, true); break;
case Int32Array: view.setInt32(offset, value, true); break;
case Uint32Array: view.setUint32(offset, value, true); break;
case Float32Array: view.setFloat32(offset, value, true); break;
case Float64Array: view.setFloat64(offset, value, true); break;
case BigInt64Array: view.setBigInt64(offset, BigInt(value), true); break;
case BigUint64Array: view.setBigUint64(offset, BigInt(value), true); break;
default: throw new Error(`Unsupported type: ${type.name}`);
}
}
module.exports = { sequential, interleaved };

View File

@@ -0,0 +1,139 @@
/** Binary encoder
*
* This module encodes scalar data from a grid-like source
* into a packed binary format for bandwidth efficiency and
* speed of access.
*
* Data are indexed by i & j values, with "i" being constant
* (e.g., a sequence or line number) and "j" expected to change
* by a constant, linear amount (e.g., point numbers). All data
* from consecutive "j" values will be encoded as a single array
* (or series of arrays if multiple values are encoded).
* If there is a jump in the "j" progression, a new "chunk" will
* be started with a new array (or series of arrays).
*
* Multiple values may be encoded per (i, j) pair, using any of
* the types supported by JavaScript's TypedArray except for
* Float16 and Uint8Clamped. Each variable can be encoded with
* a different size.
*
* Values may be encoded directly or as deltas from an initial
* value. The latter is particularly efficient when dealing with
* monotonically incrementing data, such as timestamps.
*
* The conceptual packet format for sequentially encoded data
* looks like this:
*
* <msg-type> <count: x> <i> <j0> <Δj>
*
* <Δelement_count: y>
* <element_count: z>
*
* <Δelement_1_type_base> … <Δelement_y_type_base>
* <Δelement_1_type_incr> … <Δelement_y_type_incr>
* <elem_1_type> … <elem_z_type>
*
* <Δelement_1_first> … <Δelement_z_first>
*
* <Δelem_1_0> … <Δelem_1_x>
* …
* <Δelem_y_0> … <Δelem_y_x>
* <elem_1_0> … <elem_1_x>
* …
* <elem_z_0> … <elem_z_x>
*
*
* The conceptual packet format for interleaved encoded data
* looks like this:
*
*
* <msg-type> <count: x> <i> <j0> <Δj>
*
* <Δelement_count: y>
* <element_count: z>
*
* <Δelement_1_type_base> … <Δelement_y_type_base>
* <Δelement_1_type_incr> … <Δelement_y_type_incr>
* <elem_1_type> … <elem_z_type>
*
* <Δelement_1_first> … <Δelement_y_first>
*
* <Δelem_1_0> <Δelem_2_0> … <Δelem_y_0> <elem_1_0> <elem_2_0> … <elem_z_0>
* <Δelem_1_1> <Δelem_2_1> … <Δelem_y_1> <elem_1_1> <elem_2_1> … <elem_z_1>
* …
* <Δelem_1_x> <Δelem_2_x> … <Δelem_y_x> <elem_1_x> <elem_2_x> … <elem_z_x>
*
*
* Usage example:
*
* json = [
* {
* sequence: 7,
* sailline: 5354,
* line: 5356,
* point: 1068,
* tstamp: 1695448704372,
* objrefraw: 3,
* objreffinal: 4
* },
* {
* sequence: 7,
* sailline: 5354,
* line: 5352,
* point: 1070,
* tstamp: 1695448693612,
* objrefraw: 2,
* objreffinal: 3
* },
* {
* sequence: 7,
* sailline: 5354,
* line: 5356,
* point: 1072,
* tstamp: 1695448684624,
* objrefraw: 3,
* objreffinal: 4
* }
* ];
*
* deltas = [
* { key: el => el.tstamp, baseType: BigUint64Array, incrType: Int16Array }
* ];
*
* elems = [
* { key: el => el.objrefraw, type: Uint8Array },
* { key: el => el.objreffinal, type: Uint8Array }
* ];
*
* i = el => el.sequence;
*
* j = el => el.point;
*
* bundle = encode(json, i, j, deltas, elems);
*
* // bundle:
*
* Uint8Array(40) [
* 36, 0, 0, 28, 17, 0, 3, 0, 7, 0,
* 44, 4, 2, 0, 1, 2, 42, 1, 1, 116,
* 37, 158, 192, 138, 1, 0, 0, 0, 0, 0,
* 248, 213, 228, 220, 3, 2, 3, 4, 3, 4
* ]
*
* decode(bundle);
*
* {
* i: 7,
* j: [ 1068, 1070, 1072 ],
* 'Δelems': [ [ 1695448704372, 1695448693612, 1695448684624 ] ],
* elems: [ [ 3, 2, 3 ], [ 4, 3, 4 ] ]
* }
*
*/
module.exports = {
encode: {...require('./encode')},
decode: {...require('./decode')},
...require('./classes')
};

View File

@@ -0,0 +1,12 @@
{
"name": "@dougal/binary",
"version": "1.0.0",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC",
"description": ""
}

View File

@@ -0,0 +1,25 @@
class ConcurrencyLimiter {
constructor(maxConcurrent) {
this.maxConcurrent = maxConcurrent;
this.active = 0;
this.queue = [];
}
async enqueue(task) {
if (this.active >= this.maxConcurrent) {
await new Promise(resolve => this.queue.push(resolve));
}
this.active++;
try {
return await task();
} finally {
this.active--;
if (this.queue.length > 0) {
this.queue.shift()();
}
}
}
}
module.exports = ConcurrencyLimiter;

View File

@@ -0,0 +1,12 @@
{
"name": "@dougal/concurrency",
"version": "1.0.0",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC",
"description": ""
}

View File

@@ -0,0 +1,75 @@
class Organisation {
constructor (data) {
this.read = !!data?.read;
this.write = !!data?.write;
this.edit = !!data?.edit;
this.other = {};
return new Proxy(this, {
get (target, prop) {
if (prop in target) {
return target[prop]
} else {
return target.other[prop];
}
},
set (target, prop, value) {
const oldValue = target[prop] !== undefined ? target[prop] : target.other[prop];
const newValue = Boolean(value);
if (["read", "write", "edit"].includes(prop)) {
target[prop] = newValue;
} else {
target.other[prop] = newValue;
}
return true;
}
});
}
toJSON () {
return {
read: this.read,
write: this.write,
edit: this.edit,
...this.other
}
}
toString (replacer, space) {
return JSON.stringify(this.toJSON(), replacer, space);
}
/** Limit the operations to only those allowed by `other`
*/
filter (other) {
const filteredOrganisation = new Organisation();
filteredOrganisation.read = this.read && other.read;
filteredOrganisation.write = this.write && other.write;
filteredOrganisation.edit = this.edit && other.edit;
return filteredOrganisation;
}
intersect (other) {
return this.filter(other);
}
}
if (typeof module !== 'undefined' && module.exports) {
module.exports = Organisation; // CJS export
}
// ESM export
if (typeof exports !== 'undefined' && !exports.default) {
exports.default = Organisation; // ESM export
}

View File

@@ -0,0 +1,225 @@
const Organisation = require('./Organisation');
class Organisations {
#values = {}
#overlord
static entries (orgs) {
return orgs.names().map(name => [name, orgs.get(name)]);
}
constructor (data, overlord) {
if (data instanceof Organisations) {
for (const [name, value] of Organisations.entries(data)) {
this.set(name, new Organisation(value));
}
} else if (data instanceof Object) {
for (const [name, value] of Object.entries(data)) {
this.set(name, new Organisation(value));
}
} else if (data instanceof String) {
this.set(data, new Organisation());
} else if (typeof data !== "undefined") {
throw new Error("Invalid constructor argument");
}
if (overlord) {
this.#overlord = overlord;
}
}
get values () {
return this.#values;
}
get length () {
return this.names().length;
}
get overlord () {
return this.#overlord;
}
set overlord (v) {
this.#overlord = new Organisations(v);
}
/** Get the operations for `name`
*/
get (name) {
const key = Object.keys(this.values).find( k => k.toLowerCase() == name.toLowerCase() ) ?? name;
return this.values[key];
}
/** Set the operations for `name` to `value`
*
* If we have an overlord, ensure we cannot:
*
* 1. Add new organisations which the overlord
* is not a member of
* 2. Access operations that the overlord is not
* allowed to access
*/
set (name, value) {
name = String(name).trim();
const key = Object.keys(this.values).find( k => k.toLowerCase() == name.toLowerCase() ) ?? name;
const org = new Organisation(value);
if (this.overlord) {
const parent = this.overlord.get(key) ?? this.overlord.get("*");
if (parent) {
this.values[key] = parent.filter(org);
}
} else {
this.values[key] = new Organisation(value);
}
return this;
}
/** Enable the operation `op` in all organisations
*/
enableOperation (op) {
if (this.overlord) {
Object.keys(this.#values)
.filter( key => (this.overlord.get(key) ?? this.overlord.get("*"))?.[op] )
.forEach( key => this.#values[key][op] = true );
} else {
Object.values(this.#values).forEach( org => org[op] = true );
}
return this;
}
/** Disable the operation `op` in all organisations
*/
disableOperation (op) {
Object.values(this.#values).forEach( org => org[op] = false );
return this;
}
/** Create a new organisation object limited by the caller's rights
*
* The spawned Organisations instance will have the same organisations
* and rights as the caller minus the applied `mask`. With the default
* mask, the spawned object will inherit all rights except for `edit`
* rights.
*
* The "*" organisation must be explicitly assigned. It is not inherited.
*/
spawn (mask = {read: true, write: true, edit: false}) {
const parent = new Organisations();
const wildcard = this.get("*").edit; // If true, we can spawn everywhere
this.entries().forEach( ([k, v]) => {
// if (k != "*") { // This organisation is not inherited
if (v.edit || wildcard) { // We have the right to spawn in this organisation
const o = new Organisation({
read: v.read && mask.read,
write: v.write && mask.write,
edit: v.edit && mask.edit
});
parent.set(k, o);
}
// }
});
return new Organisations({}, parent);
}
remove (name) {
const key = Object.keys(this.values).find( k => k.toLowerCase() == name.toLowerCase() ) ?? name;
delete this.values[key];
}
/** Return the list of organisation names
*/
names () {
return Object.keys(this.values);
}
/** Same as this.get(name)
*/
value (name) {
return this.values[name];
}
/** Same as Object.prototype.entries
*/
entries () {
return this.names().map( name => [ name, this.value(name) ] );
}
/** Return true if the named organisation is present
*/
has (name) {
return Boolean(this.value(name));
}
/** Return only those of our organisations
* and operations present in `other`
*/
filter (other) {
const filteredOrganisations = new Organisations();
const wildcard = other.value("*");
for (const [name, org] of this.entries()) {
const ownOrg = other.value(name) ?? wildcard;
if (ownOrg) {
filteredOrganisations.set(name, org.filter(ownOrg))
}
}
return filteredOrganisations;
}
/** Return only those organisations
* that have access to the required
* operation
*/
accessToOperation (op) {
const filteredOrganisations = new Organisations();
for (const [name, org] of this.entries()) {
if (org[op]) {
filteredOrganisations.set(name, org);
}
}
return filteredOrganisations;
}
toJSON () {
const obj = {};
for (const key in this.values) {
obj[key] = this.values[key].toJSON();
}
return obj;
}
toString (replacer, space) {
return JSON.stringify(this.toJSON(), replacer, space);
}
*[Symbol.iterator] () {
for (const [name, operations] of this.entries()) {
yield {name, operations};
}
}
}
if (typeof module !== 'undefined' && module.exports) {
module.exports = Organisations; // CJS export
}
// ESM export
if (typeof exports !== 'undefined' && !exports.default) {
exports.default = Organisations; // ESM export
}

View File

@@ -0,0 +1,5 @@
module.exports = {
Organisation: require('./Organisation'),
Organisations: require('./Organisations')
}

View File

@@ -0,0 +1,12 @@
{
"name": "@dougal/organisations",
"version": "1.0.0",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC",
"description": ""
}

View File

@@ -0,0 +1,364 @@
const EventEmitter = require('events');
const { Organisations } = require('@dougal/organisations');
function randomUUID () {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
const r = Math.random() * 16 | 0;
const v = c === 'x' ? r : (r & 0x3 | 0x8);
return v.toString(16);
});
}
class User extends EventEmitter {
// Valid field names
static fields = [ "ip", "host", "name", "email", "description", "colour", "active", "organisations", "meta" ]
static validUUID (str) {
const uuidv4Rx = /^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
return uuidv4Rx.test(str);
}
static validIPv4 (str) {
const ipv4Rx = /^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\/([0-9]|[1-2][0-9]|3[0-2]))?$/;
return ipv4Rx.test(str);
}
static validIPv6 (str) {
const ipv6Rx = /^(?:[0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|(?:[0-9a-fA-F]{1,4}:){1,7}:|(?:[0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|(?:[0-9a-fA-F]{1,4}:){1,5}(?::[0-9a-fA-F]{1,4}){1,2}|(?:[0-9a-fA-F]{1,4}:){1,4}(?::[0-9a-fA-F]{1,4}){1,3}|(?:[0-9a-fA-F]{1,4}:){1,3}(?::[0-9a-fA-F]{1,4}){1,4}|(?:[0-9a-fA-F]{1,4}:){1,2}(?::[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:(?::[0-9a-fA-F]{1,4}){1,6}|:((?::[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(?::[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(?:ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|(2[0-4][0-9]|[01]?[0-9][0-9]?))|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|(2[0-4][0-9]|[01]?[0-9][0-9]?))))$/;
return ipv6Rx.test(str);
}
static validHostname (str) {
const hostnameRx = /^(?=.{1,253}$)(?:(?!-)[A-Za-z0-9-]{1,63}(?<!-)\.)+[A-Za-z]{2,}$/;
return hostnameRx.test(str);
}
#setString (k, v) {
if (typeof v === "undefined") {
this.values[k] = v;
} else {
this.values[k] = String(v).trim();
}
this.emit("changed", k, v);
this.#updateTimestamp();
}
#updateTimestamp (v) {
if (typeof v === "undefined") {
this.#timestamp = (new Date()).valueOf();
} else {
this.#timestamp = (new Date(v)).valueOf();
}
this.emit("last_modified", this.#timestamp);
}
// Create a new instance of `other`, where `other` is
// an instance of User or of a derived class
#clone (other = this) {
const clone = new this.constructor();
Object.assign(clone.values, other.values);
clone.organisations = new Organisations(other.organisations);
return clone;
}
values = {}
#timestamp
constructor (data) {
super();
User.fields.forEach( f => this[f] = data?.[f] );
this.values.id = data?.id ?? randomUUID();
this.values.active = !!this.active;
this.values.hash = data?.hash;
this.values.password = data?.password;
this.values.organisations = new Organisations(data?.organisations);
this.#updateTimestamp(data?.last_modified);
}
/*
* Getters
*/
get id () { return this.values.id }
get ip () { return this.values.ip }
get host () { return this.values.host }
get name () { return this.values.name }
get email () { return this.values.email }
get description () { return this.values.description }
get colour () { return this.values.colour }
get active () { return this.values.active }
get organisations () { return this.values.organisations }
get password () { return this.values.password }
get timestamp () { return new Date(this.#timestamp) }
/*
* Setters
*/
set id (v) {
if (typeof v === "undefined") {
this.values.id = randomUUID();
} else if (User.validUUID(v)) {
this.values.id = v;
} else {
throw new Error("Invalid ID format (must be UUIDv4)");
}
this.emit("changed", "id", this.values.id);
this.#updateTimestamp();
}
set ip (v) {
if (User.validIPv4(v) || User.validIPv6(v) || typeof v === "undefined") {
this.values.ip = v;
} else {
throw new Error("Invalid IP address or subnet");
}
this.emit("changed", "ip", this.values.ip);
this.#updateTimestamp();
}
set host (v) {
if (User.validHostname(v) || typeof v === "undefined") {
this.values.host = v;
} else {
throw new Error("Invalid hostname");
}
this.emit("changed", "host", this.values.host);
this.#updateTimestamp();
}
set name (v) {
this.#setString("name", v);
}
set email (v) {
// TODO should validate, buy hey!
this.#setString("email", v);
}
set description (v) {
this.#setString("description", v);
}
set colour (v) {
this.#setString("colour", v);
}
set active (v) {
this.values.active = !!v;
this.emit("changed", "active", this.values.active);
this.#updateTimestamp();
}
set organisations (v) {
this.values.organisations = new Organisations(v);
this.emit("changed", "organisations", this.values.organisations);
this.#updateTimestamp();
}
set password (v) {
this.values.password = v;
this.emit("changed", "password", this.values.password);
this.#updateTimestamp();
}
/*
* Validation methods
*/
get errors () {
let err = [];
if (!this.id) err.push("ERR_NO_ID");
if (!this.name) err.push("ERR_NO_NAME");
if (!this.organisations.length) err.push("ERR_NO_ORG");
return err;
}
get isValid () {
return this.errors.length == 0;
}
/*
* Filtering methods
*/
filter (other) {
// const filteredUser = new User(this);
const filteredUser = this.#clone();
filteredUser.organisations = this.organisations.filter(other.organisations);
return filteredUser;
}
/** Return users that are visible to me.
*
* These are users with which at leas one common organisation
* with read, write or delete access to.
*
* If we are wildcarded ("*"), we see everyone.
*
* If a peer is wildcarded, they can be seen by everone.
*/
peers (list) {
if (this.organisations.value("*")) {
return list;
} else {
return list.filter( user => this.canRead(user) );
// return list.filter( user =>
// user.organisations.value("*") ||
// user.organisations.filter(this.organisations).length > 0
// this.organisations.filter(user.organisations).length > 0
// );
}
}
/** Return users that I can edit
*
* These users must belong to an organisation
* over which I have edit rights.
*
* If we are edit wildcarded, we can edit everyone.
*/
editablePeers (list) {
const editableOrgs = this.organisations.accessToOperation("edit");
if (editableOrgs.value("*")) {
return list;
} else {
return list.filter( user => this.canEdit(user) );
// editableOrgs.filter(user.organisations).length > 0
// );
}
}
/*
* General methods
*/
/** Return `true` if we are `other`
*/
is (other) {
return this.id == other.id;
}
canDo (operation, other) {
if (this.organisations.get('*')?.[operation])
return true;
if (other instanceof User) {
return other.organisations.names().some(name => this.organisations.get(name)?.[operation]);
} else if (other instanceof Organisations) {
return other.accessToOperation(operation).names().some(name => this.organisations.get(name)?.[operation]);
} else if (other?.organisations) {
return this.canDo(operation, new Organisations(other.organisations));
} else if (other instanceof Object) {
return this.canDo(operation, new Organisations(other));
}
return false;
}
canRead (other) {
return this.canDo("read", other);
}
canWrite (other) {
return this.canDo("write", other);
}
canEdit (other) {
return this.canDo("edit", other);
}
/** Perform an edit on another user
*
* Syntax: user.edit(other).to(another);
*
* Applies to `other` the changes described in `another`
* that are permitted to `user`. The argument `another`
* must be a plain object (not a `User` instance) with
* only the properties that are to be changed.
*
* NOTE: Organisations are not merged, they are overwritten
* and then filtered to ensure that the edited user does not
* gain more privileges than those granted to the editing
* user.
*
* Example:
*
* // This causes user test77 to set user x23 to
* // inactive
* test77.edit(x23).to({active: false})
*/
edit (other) {
if (this.canEdit(other)) {
return {
to: (another) => {
const newUser = Object.assign(this.#clone(other), another);
return newUser.filter(this);
}
}
}
// Do not fail or throw but return undefined
}
/** Create a new user similar to us except it doesn't have `edit` rights
* by default
*/
spawn (init = {}, mask = {read: true, write: true, edit: false}) {
// const user = new User(init);
const user = this.#clone(init);
user.organisations = this.organisations.accessToOperation("edit").disableOperation("edit");
user.organisations.overlord = this.organisations;
return user;
}
/*
* Conversion and presentation methods
*/
toJSON () {
return {
id: this.id,
ip: this.ip,
host: this.host,
name: this.name,
email: this.email,
description: this.description,
colour: this.colour,
active: this.active,
organisations: this.organisations.toJSON(),
password: this.password
}
}
toString (replacer, space) {
return JSON.stringify(this.toJSON(), replacer, space);
}
}
if (typeof module !== 'undefined' && module.exports) {
module.exports = User; // CJS export
}
// ESM export
if (typeof exports !== 'undefined' && !exports.default) {
exports.default = User; // ESM export
}

View File

@@ -0,0 +1,4 @@
module.exports = {
User: require('./User')
}

View File

@@ -0,0 +1,15 @@
{
"name": "@dougal/user",
"version": "1.0.0",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC",
"description": "",
"dependencies": {
"@dougal/organisations": "file:../organisations"
}
}

View File

@@ -3,6 +3,7 @@ module.exports = {
'@vue/cli-plugin-babel/preset'
],
plugins: [
'@babel/plugin-proposal-logical-assignment-operators'
'@babel/plugin-proposal-logical-assignment-operators',
'@babel/plugin-transform-private-methods'
]
}

File diff suppressed because it is too large Load Diff

View File

@@ -7,6 +7,14 @@
"build": "vue-cli-service build"
},
"dependencies": {
"@deck.gl/aggregation-layers": "^9.1.13",
"@deck.gl/geo-layers": "^9.1.13",
"@deck.gl/mesh-layers": "^9.1.14",
"@dougal/binary": "file:../../../modules/@dougal/binary",
"@dougal/concurrency": "file:../../../modules/@dougal/concurrency",
"@dougal/organisations": "file:../../../modules/@dougal/organisations",
"@dougal/user": "file:../../../modules/@dougal/user",
"@loaders.gl/obj": "^4.3.4",
"@mdi/font": "^7.2.96",
"buffer": "^6.0.3",
"core-js": "^3.6.5",
@@ -17,6 +25,7 @@
"leaflet-arrowheads": "^1.2.2",
"leaflet-realtime": "^2.2.0",
"leaflet.markercluster": "^1.4.1",
"lodash.debounce": "^4.0.8",
"marked": "^9.1.4",
"path-browserify": "^1.0.1",
"plotly.js-dist": "^2.27.0",
@@ -31,6 +40,7 @@
},
"devDependencies": {
"@babel/plugin-proposal-logical-assignment-operators": "^7.14.5",
"@babel/plugin-transform-private-methods": "^7.27.1",
"@vue/cli-plugin-babel": "^5.0.8",
"@vue/cli-plugin-router": "^5.0.8",
"@vue/cli-plugin-vuex": "^5.0.8",

File diff suppressed because it is too large Load Diff

View File

@@ -9,7 +9,7 @@
:color="snackColour"
:timeout="6000"
>
{{ snackText }}
<div v-html="snackText"></div>
<template v-slot:action="{ attrs }">
<v-btn
text
@@ -52,9 +52,8 @@ export default {
}),
computed: {
snackText () { return this.$store.state.snack.snackText },
snackText () { return this.$root.markdownInline(this.$store.state.snack.snackText) },
snackColour () { return this.$store.state.snack.snackColour },
...mapGetters(["serverEvent"])
},
watch: {
@@ -77,24 +76,41 @@ export default {
this.$store.commit('setSnackText', "");
}
},
async serverEvent (event) {
if (event.channel == "project" && event.payload?.schema == "public") {
// Projects changed in some way or another
await this.refreshProjects();
} else if (event.channel == ".jwt" && event.payload?.token) {
await this.setCredentials({token: event.payload?.token});
}
}
},
methods: {
handleJWT (context, {payload}) {
this.setCredentials({token: payload.token});
},
handleProject (context, {payload}) {
if (payload?.table == "public") {
this.refreshProjects();
}
},
registerNotificationHandlers () {
this.$store.dispatch('registerHandler', {
table: '.jwt',
handler: this.handleJWT
});
this.$store.dispatch('registerHandler', {
table: 'project',
handler: this.handleProject
});
},
...mapActions(["setCredentials", "refreshProjects"])
},
async mounted () {
// Local Storage values are always strings
this.$vuetify.theme.dark = localStorage.getItem("darkTheme") == "true";
this.registerNotificationHandlers();
await this.setCredentials();
this.refreshProjects();
}

View File

@@ -5,7 +5,7 @@
max-width="600"
>
<template v-slot:activator="{ on, attrs }">
<v-btn v-if="adminaccess"
<v-btn v-if="adminaccess()"
title="Create a new project from scratch. Generally, it's preferable to clone an existing project (right-click → Clone)"
small
outlined
@@ -31,6 +31,7 @@
<script>
import { mapActions, mapGetters } from 'vuex';
import DougalProjectSettingsNameIdGeodetics from '@/components/project-settings/name-id-geodetics'
import AccessMixin from '@/mixins/access';
export default {
name: 'DougalAppBarExtensionProjectList',
@@ -39,6 +40,10 @@ export default {
DougalProjectSettingsNameIdGeodetics
},
mixins: [
AccessMixin
],
data() {
return {
dialogOpen: false,
@@ -50,10 +55,6 @@ export default {
};
},
computed: {
...mapGetters(["adminaccess"])
},
methods: {
async save (data) {
this.dialogOpen = false;

View File

@@ -1,7 +1,7 @@
<template>
<v-tabs :value="tab" show-arrows v-if="page != 'configuration'">
<v-tab v-for="tab, index in tabs" :key="index" link :to="tabLink(tab.href)" v-text="tab.text"></v-tab>
<template v-if="adminaccess">
<template v-if="adminaccess()">
<v-spacer></v-spacer>
<v-tab :to="tabLink('configuration')" class="orange--text darken-3" title="Edit project settings"><v-icon small left color="orange darken-3">mdi-cog-outline</v-icon> Settings</v-tab>
</template>
@@ -15,9 +15,15 @@
<script>
import { mapActions, mapGetters } from 'vuex';
import AccessMixin from '@/mixins/access';
export default {
name: 'DougalAppBarExtensionProject',
mixins: [
AccessMixin
],
data() {
return {
tabs: [
@@ -44,7 +50,6 @@ export default {
return this.tabs.findIndex(t => t.href == this.page);
},
...mapGetters(["adminaccess"])
},
methods: {

View File

@@ -9,8 +9,17 @@
<v-spacer></v-spacer>
<v-icon v-if="serverConnected" class="mr-6" small title="Connected to server">mdi-lan-connect</v-icon>
<v-icon v-else class="mr-6" small color="red" title="Server connection lost (we'll reconnect automatically when the server comes back)">mdi-lan-disconnect</v-icon>
<template v-if="isFrontendRemote">
<template v-if="serverConnected">
<v-icon v-if="isGatewayReliable" class="mr-6" title="Connected to server via gateway">mdi-cloud-outline</v-icon>
<v-icon v-else class="mr-6" color="orange" title="Gateway connection is unreliable. Expect outages.">mdi-cloud-off</v-icon>
</template>
<v-icon v-else class="mr-6" color="red" :title="`Server connection lost: the gateway cannot reach the remote server.\nWe will reconnect automatically when the link with the remote server is restored.`">mdi-cloud-off</v-icon>
</template>
<template v-else>
<v-icon v-if="serverConnected" class="mr-6" small title="Connected to server">mdi-lan-connect</v-icon>
<v-icon v-else class="mr-6" small color="red" :title="`Server connection lost.\nWe will reconnect automatically when the server comes back.`">mdi-lan-disconnect</v-icon>
</template>
<dougal-notifications-control class="mr-6"></dougal-notifications-control>
@@ -51,13 +60,39 @@ export default {
DougalNotificationsControl
},
data () {
return {
lastGatewayErrorTimestamp: 0,
gatewayErrorSilencePeriod: 60000,
}
},
computed: {
year () {
const date = new Date();
return date.getUTCFullYear();
},
...mapState({serverConnected: state => state.notify.serverConnected})
...mapState({
serverConnected: state => state.notify.serverConnected,
isFrontendRemote: state => state.api.serverInfo?.["remote-frontend"] ?? false,
isGatewayReliable: state => state.api.isGatewayReliable
})
},
watch: {
isGatewayReliable (val) {
if (val === false) {
const elapsed = Date.now() - this.lastGatewayErrorTimestamp;
const lastGatewayErrorTimestamp = Date.now();
if (elapsed > this.gatewayErrorSilencePeriod) {
this.$root.showSnack("Gateway error", "warning");
}
}
}
}
};
</script>

View File

@@ -3,8 +3,10 @@
<v-card-title class="headline">
Array inline / crossline error
<v-spacer></v-spacer>
<!--
<v-switch v-model="scatterplot" label="Scatterplot"></v-switch>
<v-switch class="ml-4" v-model="histogram" label="Histogram"></v-switch>
-->
</v-card-title>
<v-container fluid fill-height>
@@ -57,8 +59,8 @@ export default {
graph: [],
busy: false,
resizeObserver: null,
scatterplot: false,
histogram: false
scatterplot: true,
histogram: true
};
},

View File

@@ -3,8 +3,10 @@
<v-card-title class="headline">
Gun depth
<v-spacer></v-spacer>
<!--
<v-switch v-model="shotpoint" label="Shotpoint"></v-switch>
<v-switch class="ml-4" v-model="violinplot" label="Violin plot"></v-switch>
-->
</v-card-title>
<v-container fluid fill-height>
@@ -59,7 +61,7 @@ export default {
busy: false,
resizeObserver: null,
shotpoint: true,
violinplot: false
violinplot: true
};
},

View File

@@ -3,8 +3,10 @@
<v-card-title class="headline">
Gun pressures
<v-spacer></v-spacer>
<!--
<v-switch v-model="shotpoint" label="Shotpoint"></v-switch>
<v-switch class="ml-4" v-model="violinplot" label="Violin plot"></v-switch>
-->
</v-card-title>
<v-container fluid fill-height>
@@ -59,7 +61,7 @@ export default {
busy: false,
resizeObserver: null,
shotpoint: true,
violinplot: false
violinplot: true
};
},

View File

@@ -3,8 +3,10 @@
<v-card-title class="headline">
Gun timing
<v-spacer></v-spacer>
<!--
<v-switch v-model="shotpoint" label="Shotpoint"></v-switch>
<v-switch class="ml-4" v-model="violinplot" label="Violin plot"></v-switch>
-->
</v-card-title>
<v-container fluid fill-height>
@@ -59,7 +61,7 @@ export default {
busy: false,
resizeObserver: null,
shotpoint: true,
violinplot: false
violinplot: true
};
},

View File

@@ -127,7 +127,7 @@ export default {
},
computed: {
...mapGetters(['user', 'writeaccess', 'loading', 'serverEvent'])
...mapGetters(['user', 'loading', 'serverEvent'])
},
methods: {

View File

@@ -39,7 +39,8 @@ export default {
default:
return {
editable: false,
displaylogo: false
displaylogo: false,
responsive: true
};
}
},
@@ -48,7 +49,8 @@ export default {
const base = {
font: {
color: this.$vuetify.theme.isDark ? "#fff" : undefined
}
},
autosize: true
};
switch (this.facet) {
@@ -274,18 +276,25 @@ export default {
replot () {
if (this.plotted) {
const ref = this.$refs.graph;
Plotly.relayout(ref, {
width: ref.clientWidth,
height: ref.clientHeight
});
if (ref && ref.clientWidth > 0 && ref.clientHeight > 0) {
Plotly.relayout(ref, {
width: ref.clientWidth,
height: ref.clientHeight
});
}
}
}
},
mounted () {
this.resizeObserver = new ResizeObserver(this.replot)
this.resizeObserver.observe(this.$refs.graph);
this.$nextTick( () => {
if (this.items?.length) {
this.plot();
}
this.resizeObserver = new ResizeObserver(this.replot)
this.resizeObserver.observe(this.$refs.graph);
});
},
beforeDestroy () {

View File

@@ -36,7 +36,8 @@ export default {
config () {
return {
editable: false,
displaylogo: false
displaylogo: false,
responsive: true
};
},
@@ -53,7 +54,8 @@ export default {
title: "Time (s)"
},
plot_bgcolor:"rgba(0,0,0,0)",
paper_bgcolor:"rgba(0,0,0,0)"
paper_bgcolor:"rgba(0,0,0,0)",
autosize: true
};
},
@@ -154,10 +156,12 @@ export default {
replot () {
if (this.plotted) {
const ref = this.$refs.graph;
Plotly.relayout(ref, {
width: ref.clientWidth,
height: ref.clientHeight
});
if (ref && ref.clientWidth > 0 && ref.clientHeight > 0) {
Plotly.relayout(ref, {
width: ref.clientWidth,
height: ref.clientHeight
});
}
}
},
@@ -190,8 +194,13 @@ export default {
},
mounted () {
this.resizeObserver = new ResizeObserver(this.replot)
this.resizeObserver.observe(this.$refs.graph);
this.$nextTick( () => {
if (this.items?.length) {
this.plot();
}
this.resizeObserver = new ResizeObserver(this.replot)
this.resizeObserver.observe(this.$refs.graph);
});
},
beforeDestroy () {

View File

@@ -0,0 +1,187 @@
<template>
<v-card v-if="comparison" class="ma-1">
<v-card-title>Comparison Summary: Baseline {{ baseline.pid }} vs Monitor {{ monitor.pid }}</v-card-title>
<v-card-text>
<v-row>
<v-col cols="12" md="6">
<h3>Deviation Statistics</h3>
<v-simple-table dense>
<template v-slot:default>
<thead>
<tr>
<th>Metric</th>
<th>I (m)</th>
<th>J (m)</th>
</tr>
</thead>
<tbody>
<tr>
<td>Mean (μ)</td>
<td>{{ comparison['μ'][0].toFixed(3) }}</td>
<td>{{ comparison['μ'][1].toFixed(3) }}</td>
</tr>
<tr>
<td>Std Dev (σ)</td>
<td>{{ comparison['σ'][0].toFixed(3) }}</td>
<td>{{ comparison['σ'][1].toFixed(3) }}</td>
</tr>
<tr>
<td>RMS</td>
<td>{{ comparison.rms[0].toFixed(3) }}</td>
<td>{{ comparison.rms[1].toFixed(3) }}</td>
</tr>
</tbody>
</template>
</v-simple-table>
<h3 class="mt-4">Error distribution</h3>
<ul>
<li title="Relative to I-axis positive direction">Primary Direction: {{ (comparison.primaryDirection * 180 / Math.PI).toFixed(2) }}°</li>
<li>Anisotropy: {{ comparison.anisotropy.toFixed(2) }}</li>
<li title="Length of the semi-major axis of the error ellipse">Semi-Major Axis: {{ semiMajorAxis.toFixed(2) }} m</li>
<li title="Length of the semi-minor axis of the error ellipse">Semi-Minor Axis: {{ semiMinorAxis.toFixed(2) }} m</li>
<li title="Area of the error ellipse">Error Ellipse Area: {{ ellipseArea.toFixed(2) }} </li>
</ul>
<h3 class="mt-4">Counts</h3>
<ul>
<li title="Unique line / point pairs found in both projects">Common Points: {{ comparison.common }}</li>
<li title="Total number of points compared, including reshoots, infills, etc.">Comparison Length: {{ comparison.length }}</li>
<li title="Number of points in the baseline project">Baseline Points: {{ comparison.baselineLength }} (Unique: {{ comparison.baselineUniqueLength }})</li>
<li title="Number of points in the monitor project">Monitor Points: {{ comparison.monitorLength }} (Unique: {{ comparison.monitorUniqueLength }})</li>
</ul>
<p class="mt-3" title="Date and time when the comparison was last performed">Computation timestamp: {{ new Date(comparison.tstamp).toLocaleString() }}</p>
</v-col>
<v-col cols="12" md="6">
<h3>Error Ellipse</h3>
<svg width="300" height="300" style="border: 1px solid #ccc;">
<g :transform="`translate(150, 150) scale(${ellipseScale})`">
<line x1="0" y1="-150" x2="0" y2="150" stroke="lightgray" stroke-dasharray="5,5"/>
<line x1="-150" y1="0" x2="150" y2="0" stroke="lightgray" stroke-dasharray="5,5"/>
<ellipse
:rx="Math.sqrt(comparison.eigenvalues[0])"
:ry="Math.sqrt(comparison.eigenvalues[1])"
:transform="`rotate(${ellipseAngle})`"
fill="none"
stroke="blue"
stroke-width="2"
/>
<line
:x1="0"
:y1="0"
:x2="Math.sqrt(comparison.eigenvalues[0]) * Math.cos(ellipseRad)"
:y2="Math.sqrt(comparison.eigenvalues[0]) * Math.sin(ellipseRad)"
stroke="red"
stroke-width="2"
arrow-end="classic-wide-long"
/>
<line
:x1="0"
:y1="0"
:x2="Math.sqrt(comparison.eigenvalues[1]) * Math.cos(ellipseRad + Math.PI / 2)"
:y2="Math.sqrt(comparison.eigenvalues[1]) * Math.sin(ellipseRad + Math.PI / 2)"
stroke="green"
stroke-width="2"
arrow-end="classic-wide-long"
/>
</g>
</svg>
<p class="text-caption">Ellipse scaled for visibility (factor: {{ ellipseScale.toFixed(1) }}). Axes represent sqrt(eigenvalues).</p>
</v-col>
</v-row>
</v-card-text>
</v-card>
</template>
<script>
export default {
name: "DougalGroupComparisonSummary",
props: {
baseline: { type: Object, required: true },
monitor: { type: Object, required: true },
comparison: { type: Object, required: true }
},
data () {
return {
};
},
computed: {
ellipseAngle () {
if (!this.comparison) return 0;
const ev = this.comparison.eigenvectors[0];
return Math.atan2(ev[1], ev[0]) * 180 / Math.PI;
},
ellipseRad () {
return this.ellipseAngle * Math.PI / 180;
},
ellipseRx () {
if (!this.comparison) return 0;
return Math.sqrt(this.comparison.eigenvalues[0]) * this.ellipseScale;
},
ellipseRy () {
if (!this.comparison) return 0;
return Math.sqrt(this.comparison.eigenvalues[1]) * this.ellipseScale;
},
ellipseScale () {
if (!this.comparison) return 1;
const maxSigma = Math.max(
Math.sqrt(this.comparison.eigenvalues[0]),
Math.sqrt(this.comparison.eigenvalues[1])
);
const maxMu = Math.max(
Math.abs(this.comparison['μ'][0]),
Math.abs(this.comparison['μ'][1])
);
//const maxExtent = maxMu + 3 * maxSigma;
const maxExtent = 20;
return 100 / maxExtent; // Adjust scale to fit within ~200 pixels diameter
},
ellipseArea () {
if (!this.comparison) return 0;
const a = Math.sqrt(this.comparison.eigenvalues[0]);
const b = Math.sqrt(this.comparison.eigenvalues[1]);
return Math.PI * a * b;
},
semiMajorAxis () {
if (!this.comparison) return 0;
return Math.max(
Math.sqrt(this.comparison.eigenvalues[0]),
Math.sqrt(this.comparison.eigenvalues[1])
);
},
semiMinorAxis () {
if (!this.comparison) return 0;
return Math.min(
Math.sqrt(this.comparison.eigenvalues[0]),
Math.sqrt(this.comparison.eigenvalues[1])
);
},
meanX () {
return this.comparison ? this.comparison['μ'][0] : 0;
},
meanY () {
return this.comparison ? this.comparison['μ'][1] : 0;
},
ellipseViewBox () {
return '-150 -150 300 300';
},
}
}
</script>

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,118 @@
<template>
<v-card class="ma-1">
<v-card-title>Group Repeatability Summary</v-card-title>
<v-card-text>
<p>Error ellipse area for each baseline-monitor pair. Lower values indicate better repeatability. Colors range from green (best) to red (worst).</p>
<v-simple-table dense>
<thead>
<tr>
<th>Baseline \ Monitor</th>
<th v-for="project in projects" :key="project.pid">{{ project.pid }}</th>
</tr>
</thead>
<tbody>
<tr v-for="(baselineProject, rowIndex) in projects" :key="baselineProject.pid">
<td>{{ baselineProject.pid }}</td>
<td v-for="(monitorProject, colIndex) in projects" :key="monitorProject.pid">
<v-tooltip v-if="colIndex > rowIndex" top>
<template v-slot:activator="{ on, attrs }">
<div
:style="{ backgroundColor: getEllipseAreaColor(baselineProject.pid, monitorProject.pid), color: 'white', textAlign: 'center', padding: '4px' }"
v-bind="attrs"
v-on="on"
@click="emitInput(baselineProject, monitorProject)"
>
{{ formatEllipseArea(baselineProject.pid, monitorProject.pid) }}
</div>
</template>
<span v-if="getComp(baselineProject.pid, monitorProject.pid)">
<div>σ_i: {{ getComp(baselineProject.pid, monitorProject.pid).meta['σ'][0].toFixed(2) }} m</div>
<div>σ_j: {{ getComp(baselineProject.pid, monitorProject.pid).meta['σ'][1].toFixed(2) }} m</div>
<div>Anisotropy: {{ getComp(baselineProject.pid, monitorProject.pid).meta.anisotropy.toFixed(0) }}</div>
<div>Ellipse Area: {{ getEllipseArea(baselineProject.pid, monitorProject.pid).toFixed(2) }} </div>
<div>Primary Direction: {{ formatPrimaryDirection(getComp(baselineProject.pid, monitorProject.pid)) }}°</div>
</span>
</v-tooltip>
</td>
</tr>
</tbody>
</v-simple-table>
</v-card-text>
</v-card>
</template>
<script>
export default {
name: 'DougalGroupRepeatabilitySummary',
props: {
comparisons: {
type: Array,
required: true
},
projects: {
type: Array,
required: true
}
},
data () {
return {
};
},
computed: {
compMap () {
return new Map(this.comparisons.map(c => [`${c.baseline_pid}-${c.monitor_pid}`, c]));
},
minEllipseArea () {
if (!this.comparisons.length) return 0;
return Math.min(...this.comparisons.map(c => {
const a = Math.sqrt(c.meta.eigenvalues[0]);
const b = Math.sqrt(c.meta.eigenvalues[1]);
return Math.PI * a * b;
}));
},
maxEllipseArea () {
if (!this.comparisons.length) return 0;
return Math.max(...this.comparisons.map(c => {
const a = Math.sqrt(c.meta.eigenvalues[0]);
const b = Math.sqrt(c.meta.eigenvalues[1]);
return Math.PI * a * b;
}));
}
},
methods: {
getComp (basePid, monPid) {
return this.compMap.get(`${basePid}-${monPid}`);
},
getEllipseArea (basePid, monPid) {
const comp = this.getComp(basePid, monPid);
if (!comp) return null;
const a = Math.sqrt(comp.meta.eigenvalues[0]);
const b = Math.sqrt(comp.meta.eigenvalues[1]);
return Math.PI * a * b;
},
formatEllipseArea (basePid, monPid) {
const val = this.getEllipseArea(basePid, monPid);
return val !== null ? val.toFixed(1) : '';
},
getEllipseAreaColor (basePid, monPid) {
const val = this.getEllipseArea(basePid, monPid);
if (val === null) return '';
const ratio = (val - this.minEllipseArea) / (this.maxEllipseArea - this.minEllipseArea);
const hue = (1 - ratio) * 120;
return `hsl(${hue}, 70%, 70%)`;
},
formatPrimaryDirection (comp) {
if (!comp) return '';
return (comp.meta.primaryDirection * 180 / Math.PI).toFixed(1);
},
emitInput (baselineProject, monitorProject) {
if (this.getComp(baselineProject.pid, monitorProject.pid)) {
this.$emit('input', baselineProject, monitorProject);
}
}
}
}
</script>

View File

@@ -2,6 +2,7 @@
<v-dialog
v-model="dialog"
max-width="500"
scrollable
style="z-index:2020;"
>
<template v-slot:activator="{ on, attrs }">
@@ -14,15 +15,54 @@
</template>
<v-card>
<v-card-title class="headline">
Dougal user support
</v-card-title>
<v-window v-model="page">
<v-window-item value="support">
<v-card-title class="headline">
Dougal user support
</v-card-title>
<v-card-text>
<p>You can get help or report a problem by sending an email to <a :href="`mailto:${email}`">{{email}}</a>. Please include as much information as possible about your problem or questionscreenshots are often a good idea, and data files may also be attached.</p>
<v-card-text>
<p>You can get help or report a problem by sending an email to <a :href="`mailto:${email}`">{{email}}</a>. Please include as much information as possible about your problem or questionscreenshots are often a good idea, and data files may also be attached.</p>
<p>When you write to the above address a ticket will be automatically created in the project's issue tracking system.</p>
</v-card-text>
<p>When you write to the above address a ticket will be automatically created in the project's issue tracking system.</p>
<v-alert dense type="info" border="left" outlined>
<div class="text-body-2">
You are using Dougal version:
<ul>
<li><code>{{clientVersion}}</code> (client)</li>
<li><code>{{serverVersion}}</code> (server)</li>
</ul>
</div>
</v-alert>
</v-card-text>
</v-window-item>
<v-window-item value="changelog">
<v-card-title class="headline">
Dougal release notes
</v-card-title>
<v-card-text>
<v-carousel v-model="releaseShown"
:continuous="false"
:cycle="false"
:show-arrows="true"
:hide-delimiters="true"
>
<v-carousel-item v-for="release in releaseHistory">
<pre>{{release}}</pre>
</v-carousel-item>
</v-carousel>
</v-card-text>
</v-window-item>
<v-window-item value="serverinfo">
<dougal-server-status :status="serverStatus"></dougal-server-status>
</v-window-item>
</v-window>
<v-divider></v-divider>
@@ -33,8 +73,7 @@
text
:href="`mailto:${email}?Subject=Question`"
>
<v-icon class="d-lg-none">mdi-help-circle</v-icon>
<span class="d-none d-lg-inline">Ask a question</span>
<v-icon title="Ask a question">mdi-help-circle</v-icon>
</v-btn>
<v-btn
@@ -42,10 +81,10 @@
text
href="mailto:dougal-support@aaltronav.eu?Subject=Bug report"
>
<v-icon class="d-lg-none">mdi-bug</v-icon>
<span class="d-none d-lg-inline">Report a bug</span>
<v-icon title="Report a bug">mdi-bug</v-icon>
</v-btn>
<!---
<v-btn
color="info"
text
@@ -54,6 +93,37 @@
>
<v-icon>mdi-rss</v-icon>
</v-btn>
--->
<v-btn
color="info"
text
title="View support info"
:input-value="page == 'support'"
@click="page = 'support'"
>
<v-icon>mdi-account-question</v-icon>
</v-btn>
<v-btn v-if="versionHistory"
color="info"
text
title="View release notes"
:input-value="page == 'changelog'"
@click="page = 'changelog'"
>
<v-icon>mdi-history</v-icon>
</v-btn>
<v-btn v-if="serverStatus"
color="info"
text
title="View server status"
:input-value="page == 'serverinfo'"
@click="page = 'serverinfo'"
>
<v-icon>mdi-server-network</v-icon>
</v-btn>
<v-spacer></v-spacer>
@@ -75,15 +145,111 @@
</template>
<script>
import { mapActions, mapGetters } from 'vuex';
import DougalServerStatus from './server-status';
export default {
name: 'DougalHelpDialog',
components: {
DougalServerStatus
},
data () {
return {
dialog: false,
email: "dougal-support@aaltronav.eu",
feed: btoa(encodeURIComponent("https://gitlab.com/wgp/dougal/software.atom?feed_token=XSPpvsYEny8YmH75Nz5W"))
feed: btoa(encodeURIComponent("https://gitlab.com/wgp/dougal/software.atom?feed_token=XSPpvsYEny8YmH75Nz5W")),
serverStatus: null,
clientVersion: process.env.DOUGAL_FRONTEND_VERSION ?? "(unknown)",
serverVersion: null,
versionHistory: null,
releaseHistory: [],
releaseShown: null,
page: "support",
lastUpdate: 0,
updateInterval: 12000,
refreshTimer: null
};
},
computed: {
sinceUpdate () {
return this.lastUpdate
? (Date.now() - this.lastUpdate)
: +Infinity;
}
},
watch: {
dialog(newVal) {
if (newVal) {
this.startAutoRefresh();
} else {
this.stopAutoRefresh();
}
},
page(newVal) {
if (newVal === 'serverinfo' && this.dialog) {
this.getServerStatus(); // Immediate update when switching to serverinfo
this.startAutoRefresh();
} else {
this.stopAutoRefresh();
}
}
},
methods: {
async getServerVersion () {
if (!this.serverVersion) {
const version = await this.api(['/version', {}, null, {silent:true}]);
this.serverVersion = version?.tag ?? "(unknown)";
if (version) this.lastUpdate = Date.now();
}
if (!this.versionHistory) {
const history = await this.api(['/version/history?count=6', {}, null, {silent:true}]);
this.releaseHistory = history;
this.versionHistory = history?.[this.serverVersion.replace(/-.*$/, "")] ?? null;
}
},
async getServerStatus () {
const status = await this.api(['/diagnostics', {}, null, {silent: true}]);
if (status) {
this.serverStatus = status;
this.lastUpdate = Date.now();
}
},
startAutoRefresh() {
if (this.refreshTimer) return; // Prevent multiple timers
this.refreshTimer = setInterval(() => {
if (this.dialog && this.page === 'serverinfo') {
this.getServerStatus();
// Optionally refresh server version if needed
// this.getServerVersion();
}
}, this.updateInterval);
},
stopAutoRefresh() {
if (this.refreshTimer) {
clearInterval(this.refreshTimer);
this.refreshTimer = null;
}
},
...mapActions(["api"])
},
async mounted () {
this.getServerVersion();
this.getServerStatus();
},
beforeDestroy() {
this.stopAutoRefresh(); // Clean up timer on component destruction
}
};

View File

@@ -1,8 +1,5 @@
<template>
<div class="line-status" v-if="sequences.length == 0">
<slot name="empty"></slot>
</div>
<div class="line-status" v-else-if="sequenceHref || plannedSequenceHref || pendingReshootHref">
<div class="line-status" v-if="sequenceHref || plannedSequenceHref || pendingReshootHref">
<router-link v-for="sequence in sequences" :key="sequence.sequence" v-if="sequenceHref"
class="sequence"
:class="sequence.status"
@@ -26,7 +23,7 @@
>
</router-link>
</div>
<div class="line-status" v-else>
<div class="line-status" v-else-if="sequences.length || plannedSequences.length || Object.keys(pendingReshoots).length">
<div v-for="sequence in sequences" :key="sequence.sequence"
class="sequence"
:class="sequence.status"
@@ -47,6 +44,9 @@
>
</div>
</div>
<div class="line-status" v-else>
<slot name="empty"></slot>
</div>
</template>
<style lang="stylus" scoped>

View File

@@ -32,16 +32,61 @@
</template>
<v-list dense>
<v-list-item :href="`/settings/equipment`">
<v-list-item-title>Equipment list</v-list-item-title>
<v-list-item href="/settings/equipment">
<v-list-item-content>
<v-list-item-title>Equipment list</v-list-item-title>
<v-list-item-subtitle>Manage the list of equipment reported in logs</v-list-item-subtitle>
</v-list-item-content>
<v-list-item-action><v-icon small>mdi-view-list</v-icon></v-list-item-action>
</v-list-item>
<template v-if="false">
<v-divider></v-divider>
<v-list-item href="/settings">
<v-list-item-content>
<v-list-item-title>Local settings</v-list-item-title>
<v-list-item-subtitle>Manage this vessel's configuration</v-list-item-subtitle>
</v-list-item-content>
<v-list-item-action><v-icon small>mdi-ferry</v-icon></v-list-item-action>
</v-list-item>
</template>
</v-list>
</v-menu>
<v-breadcrumbs :items="path"></v-breadcrumbs>
<v-breadcrumbs :items="path">
<template v-slot:item="{ item }">
<v-breadcrumbs-item :href="item.href" :disabled="item.disabled" v-if="item.organisations">
<v-tooltip bottom>
<template v-slot:activator="{ on, attrs }">
<span v-bind="attrs" v-on="on">{{ item.text }}</span>
</template>
<div class="text-overline">Project permissions</div>
<v-simple-table dense>
<template v-slot:default>
<thead>
<tr>
<th>Organisation</th><th>Read</th><th>Write</th><th>Edit</th>
</tr>
</thead>
<tbody>
<tr v-for="(operations, name) in item.organisations">
<td v-if="name == '*'"><v-chip small label color="primary">All</v-chip></td>
<td v-else><v-chip small label outlined>{{ name }}</v-chip></td>
<td>{{ operations.read ? "✔" : " " }}</td>
<td>{{ operations.write ? "✔" : " " }}</td>
<td>{{ operations.edit ? "✔" : " " }}</td>
</tr>
</tbody>
</template>
</v-simple-table>
</v-tooltip>
</v-breadcrumbs-item>
<v-breadcrumbs-item :href="item.href" :disabled="item.disabled" v-else>
{{ item.text }}
</v-breadcrumbs-item>
</template>
</v-breadcrumbs>
<template v-if="$route.name != 'Login'">
<v-btn text link to="/login" v-if="!user && !loading">Log in</v-btn>
@@ -50,10 +95,37 @@
<v-menu
offset-y
>
<template v-slot:activator="{on, attrs}">
<v-avatar :color="user.colour || 'primary'" :title="`${user.name} (${user.role})`" v-bind="attrs" v-on="on">
<span class="white--text">{{user.name.slice(0, 5)}}</span>
</v-avatar>
<template v-slot:activator="{ on: menu, attrs }">
<v-tooltip bottom>
<template v-slot:activator="{ on: tooltip }">
<v-avatar :color="user.colour || 'primary'" v-bind="attrs" v-on="{...tooltip, ...menu}">
<span class="white--text">{{user.name.slice(0, 5)}}</span>
</v-avatar>
</template>
<div class="text-overline">{{ user.name }}</div>
<v-card flat class="my-1" v-if="user.description">
<v-card-text class="pb-1" v-html="$root.markdown(user.description)">
</v-card-text>
</v-card>
<v-simple-table dense>
<template v-slot:default>
<thead>
<tr>
<th>Organisation</th><th>Read</th><th>Write</th><th>Edit</th>
</tr>
</thead>
<tbody>
<tr v-for="org in user.organisations">
<td v-if="org.name == '*'"><v-chip small label color="primary">All</v-chip></td>
<td v-else><v-chip small label outlined>{{ org.name }}</v-chip></td>
<td>{{ org.operations.read ? "✔" : " " }}</td>
<td>{{ org.operations.write ? "✔" : " " }}</td>
<td>{{ org.operations.edit ? "✔" : " " }}</td>
</tr>
</tbody>
</template>
</v-simple-table>
</v-tooltip>
</template>
<v-list dense>
@@ -66,8 +138,29 @@
</v-list-item>
<v-list-item link to="/logout" v-else>
<v-list-item-icon><v-icon small>mdi-logout</v-icon></v-list-item-icon>
<v-list-item-title>Log out</v-list-item-title>
<v-list-item-content>
<v-list-item-title>Log out</v-list-item-title>
</v-list-item-content>
</v-list-item>
<v-divider></v-divider>
<template v-if="canManageUsers">
<v-list-item link to="/users">
<v-list-item-icon><v-icon small>mdi-account-multiple</v-icon></v-list-item-icon>
<v-list-item-content>
<v-list-item-title>Manage users</v-list-item-title>
<v-list-item-subtitle>Add, edit and remove users</v-list-item-subtitle>
</v-list-item-content>
</v-list-item>
</template>
<template v-else-if="user && !user.autologin">
<v-list-item link :to="`/users/${user.id}`">
<v-list-item-icon><v-icon small>mdi-account</v-icon></v-list-item-icon>
<v-list-item-content>
<v-list-item-title>User profile</v-list-item-title>
<v-list-item-subtitle>Edit your user profile</v-list-item-subtitle>
</v-list-item-content>
</v-list-item>
</template>
</v-list>
</v-menu>
@@ -102,6 +195,19 @@ export default {
.pop()?.component;
},
title () {
return this.user.name + "\n" + [...this.user.organisations].map( ({name, operations}) => {
if (name == "*") name = "All organisations";
let str = name+": ";
str += [ "read", "write", "edit" ].map( op => operations[op] ? op : null ).filter( op => op ).join(", ");
return str;
}).join("\n")
},
canManageUsers () {
return this.user.organisations.accessToOperation("edit").length;
},
...mapGetters(['user', 'loading'])
},

View File

@@ -0,0 +1,112 @@
<template>
<v-row dense no-gutters>
<v-col>
<slot name="prepend"></slot>
</v-col>
<v-col cols="6">
<v-text-field
class="mr-5"
dense
label="Name"
:value="name"
:readonly="true"
></v-text-field>
</v-col>
<v-col>
<v-checkbox
class="mr-3"
label="Read"
v-model="operations.read"
:readonly="readonly"
></v-checkbox>
</v-col>
<v-col>
<v-checkbox
class="mr-3"
label="Write"
v-model="operations.write"
:readonly="readonly"
></v-checkbox>
</v-col>
<v-col>
<v-checkbox
class="mr-3"
label="Edit"
v-model="operations.edit"
:readonly="readonly"
></v-checkbox>
</v-col>
<v-col>
<!-- Just to fill the twelve-column grid -->
<!--
NOTE: this column could also be used for
a popdown menu with additional operations
if needed.
-->
</v-col>
<v-col>
<slot name="append"></slot>
</v-col>
</v-row>
</template>
<style scoped>
</style>
<script>
import { Organisations } from '@dougal/organisations';
export default {
name: "DougalOrganisationsItem",
props: {
name: String,
value: Object,
readonly: Boolean,
},
data () {
return {
operations: {...this.value}
}
},
watch: {
value: {
handler (newValue) {
this.operations = {...this.value};
},
deep: true,
},
operations: {
handler (newValue) {
if (["read", "write", "edit"].some( k => newValue[k] != this.value[k] )) {
// Only emit if a value has actually changed
this.$emit("input", {...newValue});
}
},
deep: true,
},
},
methods: {
reset () {
}
},
mounted () {
this.reset();
}
}
</script>

View File

@@ -0,0 +1,191 @@
<template>
<v-card>
<v-card-title>Organisations</v-card-title>
<v-card-subtitle>Organisation access</v-card-subtitle>
<v-card-text>
<v-form>
<v-container>
<dougal-organisations-item v-for="organisation in localOrganisations.names()"
:key="organisation"
:name="organisation"
:value="localOrganisations.get(organisation)"
@input="setOrganisation(organisation, $event)"
>
<template v-slot:append v-if="!readonly">
<v-btn
class="ml-3"
fab
text
small
title="Remove this organisation"
>
<v-icon
color="error"
@click="removeOrganisation(organisation)"
>mdi-minus</v-icon>
</v-btn>
</template>
</dougal-organisations-item>
<v-row no-gutters class="mb-2" v-if="!readonly">
<h4>Add organisation</h4>
</v-row>
<v-row no-gutters class="mb-2" v-if="!readonly">
<v-combobox v-if="canCreateOrganisations"
label="Organisation"
:items="remainingOrganisations"
v-model="organisationName"
@input.native="organisationName = $event.srcElement.value"
@keyup.enter="addOrganisation()"
></v-combobox>
<v-select v-else
label="Organisation"
:items="remainingOrganisations"
v-model="organisationName"
></v-select>
<v-btn
class="ml-3"
fab
text
small
title="Add organisation"
:disabled="!(organisationName && organisationName.length)"
@click="addOrganisation()"
>
<v-icon
color="primary"
>mdi-plus</v-icon>
</v-btn>
</v-row>
</v-container>
</v-form>
</v-card-text>
<v-card-actions>
<slot name="actions" v-bind="{ self, organisations, readonly, validationErrors, canCreateOrganisations }">
</slot>
</v-card-actions>
</v-card>
</template>
<script>
import { Organisations } from '@dougal/organisations';
import DougalOrganisationsItem from './organisations-item';
export default {
name: "DougalOrganisations",
components: {
DougalOrganisationsItem
},
props: {
self: Object,
organisations: Object,
readonly: Boolean
},
data () {
return {
organisationName: "",
localOrganisations: this.setLocalOrganisations(this.organisations)
}
},
computed: {
availableOrganisations () {
return this.self.organisations.names();
},
// Organisations available to add.
// These are the organisations in `availableOrganisations`
// minus any that have already been added.
// The special value "*" (meaning "every organisation")
// is not included.
remainingOrganisations () {
const orgs = [];
for (const org of this.availableOrganisations) {
if (org != "*" && !this.localOrganisations.has(org)) {
orgs.push(org);
}
}
return orgs;
},
canCreateOrganisations () {
return this.self.organisations.value("*")?.edit ?? false;
},
validationErrors () {
const errors = [];
// Check if there is at least one organisation
if (this.localOrganisations.length) {
errors.push("ERR_NO_ORGS");
}
// Check if at least one organisation has edit rights
},
},
watch: {
organisations (newValue) {
this.localOrganisations = this.setLocalOrganisations(newValue);
},
},
methods: {
setLocalOrganisations (value) {
return new Organisations(this.organisations);
},
setOrganisation(name, value) {
this.localOrganisations.set(name, value);
this.$emit("update:organisations", new Organisations(this.localOrganisations));
},
addOrganisation () {
const key = this.organisationName;
if (!this.localOrganisations.has(key)) {
this.localOrganisations.set(key);
this.$emit("update:organisations", this.localOrganisations);
}
this.organisationName = "";
},
removeOrganisation (key) {
if (this.localOrganisations.has(key)) {
this.localOrganisations.remove(key);
}
this.$emit("update:organisations", this.localOrganisations);
},
reset () {
},
save () {
},
back () {
this.$emit('close');
}
},
mounted () {
this.reset();
}
}
</script>

View File

@@ -4,15 +4,15 @@
<v-card-subtitle v-text="subtitle"></v-card-subtitle>
<v-card-text>
<v-tabs v-model="tab">
<v-tab>Paths</v-tab>
<v-tab>Globs</v-tab>
<v-tab v-if="pattern">Pattern</v-tab>
<v-tab v-if="lineNameInfo">Line info</v-tab>
<v-tab tab-value="paths">Paths</v-tab>
<v-tab tab-value="globs">Globs</v-tab>
<v-tab tab-value="pattern" v-if="pattern">Pattern</v-tab>
<v-tab tab-value="lineNameInfo" v-if="lineNameInfo">Line info</v-tab>
</v-tabs>
<v-tabs-items v-model="tab">
<v-tab-item>
<v-tab-item value="paths">
<v-card flat>
<v-card-subtitle>
A list of directories which are searched for matching files.
@@ -56,7 +56,7 @@
</v-card>
</v-tab-item>
<v-tab-item>
<v-tab-item value="globs">
<v-card flat>
<v-card-subtitle>
A list of <a href="https://en.wikipedia.org/wiki/Glob_(programming)" target="_blank">glob patterns</a> expanding to match the files of interest. Note that Linux is case-sensitive.
@@ -93,7 +93,7 @@
</v-card>
</v-tab-item>
<v-tab-item v-if="pattern">
<v-tab-item value="pattern" v-if="pattern">
<v-card flat>
<v-card-subtitle>
Regular expression that describes the file format definition. Used to capture information such as line and sequence number, etc.
@@ -153,7 +153,7 @@
</v-card>
</v-tab-item>
<v-tab-item v-if="lineNameInfo">
<v-tab-item value="lineNameInfo">
<v-card flat>
<v-card-subtitle>
Line information that will be extracted from file names
@@ -165,14 +165,14 @@
label="Example file name"
hint="Enter the name of a representative file to make it easier to visualise your configuration"
persistent-hint
v-model="lineNameInfo.example"
v-model="lineNameInfo_.example"
></v-text-field>
<dougal-fixed-string-decoder
:multiline="true"
:text="lineNameInfo.example"
:fixed.sync="lineNameInfo.fixed"
:fields.sync="lineNameInfo.fields"
:text="lineNameInfo_.example"
:fixed.sync="lineNameInfo_.fixed"
:fields.sync="lineNameInfo_.fields"
></dougal-fixed-string-decoder>
</v-form>
@@ -195,6 +195,23 @@
@click="reset"
>Reset</v-btn>
-->
<v-btn
v-if="tab=='lineNameInfo'"
:disabled="!validLineNameInfo"
@click="copyLineNameInfo"
title="Copy this definition into the clipboard. It can then be pasted into other sections or configurations."
>
<v-icon left>mdi-content-copy</v-icon>
Copy
</v-btn>
<v-btn
v-if="tab=='lineNameInfo'"
@click="pasteLineNameInfo"
title="Paste a line info definition copied from elsewhere"
>
<v-icon left>mdi-content-paste</v-icon>
Paste
</v-btn>
<v-spacer></v-spacer>
<v-btn
color="secondary"
@@ -253,6 +270,9 @@ export default {
},
computed: {
validLineNameInfo () {
return typeof this.lineNameInfo == 'object';
},
},
watch: {
@@ -285,6 +305,28 @@ export default {
methods: {
async copyLineNameInfo () {
await navigator.clipboard.writeText(JSON.stringify(this.lineNameInfo, null, 4));
this.showSnack(["Line name information copied to clipboard", "primary"]);
},
async pasteLineNameInfo () {
const text = await navigator.clipboard.readText();
try {
const data = JSON.parse(text);
if (["fixed", "fields", "example"].every( key => key in data )) {
this.$emit("update:lineNameInfo", data);
this.showSnack(["Line name information pasted from clipboard", "primary"]);
} else {
this.showSnack(["Clipboard contents are not valid line name information", "error"]);
}
} catch (err) {
if (err instanceof SyntaxError) {
this.showSnack(["Clipboard contents are not valid line name information", "error"]);
}
}
},
reset () {
this.globs_ = this.globs;
this.paths_ = this.paths;
@@ -302,6 +344,8 @@ export default {
this.$emit('close');
},
...mapActions(["showSnack"])
},
mounted () {

View File

@@ -0,0 +1,81 @@
<template>
<v-card flat>
<v-card-text>
<dougal-organisations
:self="user"
:organisations.sync="organisations_"
>
<template v-slot:actions>
<v-spacer></v-spacer>
<v-btn
color="secondary"
@click="back"
>Back</v-btn>
</template>
</dougal-organisations>
</v-card-text>
<v-card-actions>
</v-card-actions>
</v-card>
</template>
<script>
import { mapActions, mapGetters } from 'vuex'
import DougalOrganisations from '../organisations'
export default {
name: "DougalProjectSettingsOrganisations",
components: {
DougalOrganisations
},
props: {
organisations: Object,
value: Object
},
data () {
return {
}
},
computed: {
organisations_: {
get () {
return this.organisations;
},
set (v) {
this.$emit("input", {
...this.value,
organisations: v.toJSON()
});
}
},
...mapGetters(['user', 'loading', 'serverEvent'])
},
methods: {
reset () {
},
save () {
},
back () {
this.$emit('close');
}
},
mounted () {
this.reset();
}
}
</script>

View File

@@ -23,7 +23,34 @@
label="File format"
:items="preplotFileTypes"
v-model="fileType"
></v-select>
:append-outer-icon="fileClass == 'saillines' && fileType == 'x-sl+csv' ? 'mdi-help-circle-outline' : ''"
>
<template v-slot:append-outer="" v-if="fileClass == 'saillines' && fileType == 'x-sl+csv'">
<v-menu :close-on-content-click="false" v-model="tooltip">
<template v-slot:activator="{ on, attrs }">
<v-btn icon v-bind="attrs" title="Information on sailline CSV files" @click="tooltip = !tooltip"><v-icon>mdi-help-circle-outline</v-icon></v-btn>
</template>
<v-card>
<v-card-title>Saillines CSV format</v-card-title>
<v-card-text>
<p>
The input CSV should have the following comma-separated fields:
<dl>
<dt><code>sail_line</code></dt> <dd>The vessel line number</dd>
<dt><code>incr</code></dt> <dd><em>1</em> if this line is to be shot in the incrementing shot points direction, <em>0</em> or blank otherwise</dd>
<dt><code>ntba</code></dt> <dd><em>1</em> if this line is not to be acquired</dd>
<dt><code>remarks</code></dt> <dd>Any comments pertinent to the line. Supports <a target="_blank" href="https://commonmark.org/help/">Markdown</a>.</dd>
<dt><code>meta.colour</code></dt> <dd>An <a target="_blank" href="https://developer.mozilla.org/en-US/docs/Web/CSS/color_value">HTML colour</a>. Changes the background colour of the line in Dougal's Lines tab.</dd>
<dt><code>source_line</code></dt> <dd>The source line number. This column should be repeated once per gun array.</dd>
</dl>
</p>
<p>See an <a target="_blank" href="https://gitlab.com/-/snippets/4873650">example file</a> (<a title="Direct download" href="https://gitlab.com/-/snippets/4873650/raw/main/preplots-saillines-example.csv?inline=false"><v-icon dense small>mdi-paperclip</v-icon></a>)</p>
</v-card-text>
</v-card>
</v-menu>
</template>
</v-select>
<v-text-field v-if="value.class == 'S'"
class="mb-3"
@@ -218,6 +245,7 @@ export default {
{ text: "16 kiB", value: 1024*16 },
{ text: "32 kiB", value: 1024*32 },
],
tooltip: false,
};
},
@@ -491,17 +519,18 @@ export default {
methods: {
async getHead () {
console.log("getHead", this.value?.path);
if (this.value?.path) {
const url = `/files/${this.value.path}`;
const init = {
text: true,
headers: {
"Range": `bytes=0-${this.sampleSize}`
}
};
const head = await this.api([url, init]);
return head?.substring(0, head.lastIndexOf("\n")) || "";
const opts = {format: "text"};
const head = await this.api([url, init, null, opts]);
return typeof head === "string"
? head?.substring(0, head.lastIndexOf("\n")) || ""
: this.head ?? "";
}
return "";
},

View File

@@ -0,0 +1,213 @@
<template>
<v-card max-width="800" max-height="600" class="mx-auto" style="overflow-y: auto;">
<v-card-title class="headline">
Server status {{ status.hostname }}
</v-card-title>
<v-card-text>
<v-expansion-panels accordion>
<!-- System Info -->
<v-expansion-panel>
<v-expansion-panel-header>System Info</v-expansion-panel-header>
<v-expansion-panel-content>
<v-row>
<v-col cols="6">
<strong>Uptime:</strong> {{ formatUptime(status.uptime) }}
</v-col>
<v-col cols="6">
<strong>Load:</strong> {{ status.loadavg[0].toFixed(2) }} / {{ status.loadavg[1].toFixed(2) }} / {{ status.loadavg[2].toFixed(2) }}
<v-progress-linear
:value="loadAvgPercent"
:color="getLoadAvgColor(status.loadavg[0])"
height="6"
rounded
></v-progress-linear>
<div class="text-caption">
1-min Load: {{ status.loadavg[0].toFixed(2) }} ({{ loadAvgPercent.toFixed(1) }}% of max)
</div>
</v-col>
</v-row>
</v-expansion-panel-content>
</v-expansion-panel>
<!-- Memory -->
<v-expansion-panel>
<v-expansion-panel-header>Memory</v-expansion-panel-header>
<v-expansion-panel-content>
<v-progress-linear
:value="memoryUsedPercent"
:color="getProgressColor(memoryUsedPercent)"
height="10"
rounded
></v-progress-linear>
<div class="text-caption mt-2">
Used: {{ formatBytes(status.memory.total - status.memory.free) }} / Total: {{ formatBytes(status.memory.total) }} ({{ memoryUsedPercent.toFixed(1) }}%)
</div>
</v-expansion-panel-content>
</v-expansion-panel>
<!-- CPUs -->
<v-expansion-panel>
<v-expansion-panel-header>CPUs ({{ status.cpus.length }} cores)</v-expansion-panel-header>
<v-expansion-panel-content>
<v-row dense>
<v-col v-for="(cpu, index) in status.cpus" :key="index" cols="12" sm="6">
<v-card outlined class="pa-2">
<div class="text-caption">Core {{ index + 1 }}: {{ cpu.model }} @ {{ cpu.speed }} MHz</div>
<v-progress-linear
:value="cpuUsagePercent(cpu)"
:color="getProgressColor(cpuUsagePercent(cpu))"
height="8"
rounded
></v-progress-linear>
<div class="text-caption">
Usage: {{ cpuUsagePercent(cpu).toFixed(1) }}% (Idle: {{ cpuIdlePercent(cpu).toFixed(1) }}%)
</div>
</v-card>
</v-col>
</v-row>
</v-expansion-panel-content>
</v-expansion-panel>
<!-- Network Interfaces -->
<v-expansion-panel>
<v-expansion-panel-header>Network Interfaces</v-expansion-panel-header>
<v-expansion-panel-content>
<v-list dense>
<v-list-item v-for="(iface, name) in status.networkInterfaces" :key="name">
<v-list-item-content>
<v-list-item-title>{{ name }}</v-list-item-title>
<v-list-item-subtitle v-for="(addr, idx) in iface" :key="idx">
{{ addr.family }}: {{ addr.address }} (Netmask: {{ addr.netmask }})
</v-list-item-subtitle>
</v-list-item-content>
</v-list-item>
</v-list>
</v-expansion-panel-content>
</v-expansion-panel>
<!-- Storage -->
<v-expansion-panel>
<v-expansion-panel-header>Storage</v-expansion-panel-header>
<v-expansion-panel-content>
<!-- Root -->
<div class="mb-4">
<strong>Root (/):</strong>
<v-progress-linear
:value="status.storage.root.usedPercent"
:color="getProgressColor(status.storage.root.usedPercent)"
height="10"
rounded
></v-progress-linear>
<div class="text-caption">
Used: {{ formatBytes(status.storage.root.used) }} / Total: {{ formatBytes(status.storage.root.total) }} ({{ status.storage.root.usedPercent.toFixed(1) }}%)
</div>
</div>
<!-- Data subfolders -->
<div>
<strong>Data:</strong>
<v-expansion-panels flat>
<v-expansion-panel v-for="(folder, name) in status.storage.data" :key="name">
<v-expansion-panel-header disable-icon-rotate>{{ name }}</v-expansion-panel-header>
<v-expansion-panel-content>
<v-progress-linear
:value="folder.usedPercent"
:color="getProgressColor(folder.usedPercent)"
height="10"
rounded
></v-progress-linear>
<div class="text-caption">
Used: {{ formatBytes(folder.used) }} / Total: {{ formatBytes(folder.total) }} ({{ folder.usedPercent.toFixed(1) }}%)
</div>
</v-expansion-panel-content>
</v-expansion-panel>
</v-expansion-panels>
</div>
</v-expansion-panel-content>
</v-expansion-panel>
<!-- Database -->
<v-expansion-panel>
<v-expansion-panel-header>Database</v-expansion-panel-header>
<v-expansion-panel-content>
<div class="mb-2">
<strong>Total Size:</strong> {{ formatBytes(status.database.size) }}
</div>
<v-list dense>
<v-list-item v-for="(project, name) in status.database.projects" :key="name">
<v-list-item-content>
<v-list-item-title>{{ name }}</v-list-item-title>
<v-progress-linear
:value="project.percent"
:color="getProgressColor(project.percent)"
height="8"
rounded
></v-progress-linear>
<v-list-item-subtitle>
Size: {{ formatBytes(project.size) }} ({{ project.percent.toFixed(2) }}%)
</v-list-item-subtitle>
</v-list-item-content>
</v-list-item>
</v-list>
</v-expansion-panel-content>
</v-expansion-panel>
</v-expansion-panels>
</v-card-text>
</v-card>
</template>
<script>
export default {
name: "DougalServerStatus",
props: {
status: {
type: Object,
required: true
}
},
computed: {
memoryUsedPercent() {
return ((this.status.memory.total - this.status.memory.free) / this.status.memory.total) * 100;
},
loadAvgPercent() {
const maxLoad = this.status.cpus.length * 4; // Assume 4x cores as max for scaling
return Math.min((this.status.loadavg[0] / maxLoad) * 100, 100); // Cap at 100%
}
},
methods: {
getProgressColor(value) {
if (value >= 80) return 'error'; // Red for 80100%
if (value >= 60) return 'warning'; // Yellow for 6080%
return 'success'; // Green for 060%
},
getLoadAvgColor(load) {
const coreCount = this.status.cpus.length;
if (load >= coreCount * 2) return 'error'; // Red for load ≥ 2x cores
if (load >= coreCount) return 'warning'; // Yellow for load ≥ 1x cores but < 2x
return 'success'; // Green for load < 1x cores
},
formatBytes(bytes) {
if (bytes === 0) return '0 Bytes';
const k = 1024;
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
},
formatUptime(seconds) {
const days = Math.floor(seconds / 86400);
seconds %= 86400;
const hours = Math.floor(seconds / 3600);
seconds %= 3600;
const minutes = Math.floor(seconds / 60);
return `${days}d ${hours}h ${minutes}m`;
},
cpuUsagePercent(cpu) {
const total = cpu.times.user + cpu.times.nice + cpu.times.sys + cpu.times.idle + cpu.times.irq;
return ((total - cpu.times.idle) / total) * 100;
},
cpuIdlePercent(cpu) {
const total = cpu.times.user + cpu.times.nice + cpu.times.sys + cpu.times.idle + cpu.times.irq;
return (cpu.times.idle / total) * 100;
}
}
};
</script>

View File

@@ -0,0 +1,256 @@
<template>
<v-card>
<v-card-title>
User {{ name }} <v-chip class="mx-3" small>{{id}}</v-chip>
<v-chip v-if="self.id == value.id"
small
color="primary"
>It's me!</v-chip>
</v-card-title>
<v-card-subtitle>User settings</v-card-subtitle>
<v-card-text>
<v-form>
<!--
<v-text-field
label="User ID"
hint="Unique user ID (read-only)"
persistent-hint
readonly
disabled
v-model="id"
>
</v-text-field>
-->
<v-switch
dense
label="Active"
:title="(self.id == value.id) ? 'You cannot make yourself inactive' : active ? 'Make this user inactive' : 'Make this user active'"
:disabled="self.id == value.id"
v-model="active"
></v-switch>
<label class="mr-3 pt-5">Colour
<v-menu v-model="colourMenu"
:close-on-content-click="false"
offset-y
>
<template v-slot:activator="{ on, attrs }">
<v-btn
:title="colour"
dense
small
icon
v-on="on"
><v-icon :color="colour">mdi-palette</v-icon>
</v-btn>
</template>
<v-color-picker
dot-size="25"
mode="hexa"
swatches-max-height="200"
v-model="colour"
></v-color-picker>
</v-menu>
</label>
<v-text-field
v-if="showIp || ip"
label="IP address"
hint="IP address or subnet specification for auto-login"
v-model="ip"
>
</v-text-field>
<v-text-field
v-if="showHost || host"
label="Host name"
hint="Hostname (for auto-login)"
v-model="host"
>
</v-text-field>
<v-text-field
label="Name"
hint="User name"
v-model="name"
>
</v-text-field>
<v-text-field
v-if="showPasswordField"
:type="visiblePassword ? 'text' : 'password'"
:append-icon="visiblePassword ? 'mdi-eye' : 'mdi-eye-off'"
@click:append="visiblePassword = !visiblePassword"
label="Password"
hint="User password"
v-model="password"
>
</v-text-field>
<v-text-field
label="Email"
hint="Email address"
v-model="email"
>
</v-text-field>
<v-textarea
class="mb-5"
label="Remarks"
hint="User description (visible to the user)"
auto-grow
v-model="description"
></v-textarea>
<dougal-organisations
:self="self"
:organisations.sync="organisations"
></dougal-organisations>
</v-form>
</v-card-text>
<v-card-actions>
<slot name="actions" v-bind="{ isValid, hasErrors, errors, dirty }"></slot>
</v-card-actions>
</v-card>
</template>
<script>
import { mapActions, mapGetters } from 'vuex';
import { User } from '@/lib/user';
import DougalOrganisations from './organisations'
export default {
name: "DougalUserSettings",
components: {
DougalOrganisations
},
props: {
value: Object,
self: Object, // User calling the dialogue
// The next three props determine whether the
// ip, host, and password fields are shown even
// when null / empty. If non-null, those fields
// are always shown
showIp: { type: Boolean, default: false },
showHost: { type: Boolean, default: false },
showPassword: { type: Boolean, default: false },
},
data () {
return {
colourMenu: null,
visiblePassword: false
}
},
computed: {
id () { return this.value.id },
ip: {
get () { return this.value.ip },
set (v) { this.input("ip", v) }
},
host: {
get () { return this.value.host },
set (v) { this.input("host", v) }
},
name: {
get () { return this.value.name },
set (v) { this.input("name", v) }
},
password: {
get () { return this.value.password },
set (v) { this.input("password", v) }
},
active: {
get () { return this.value.active },
set (v) { this.input("active", v) }
},
email: {
get () { return this.value.email },
set (v) { this.input("email", v) }
},
colour: {
get () { return this.value.colour },
set (v) { this.input("colour", v) }
},
description: {
get () { return this.value.description },
set (v) { this.input("description", v) }
},
organisations: {
get () { return this.value.organisations },
set (v) { this.input("organisations", v) }
},
errors () {
return this.value.errors;
},
hasErrors () {
return !this.isValid;
},
isValid () {
return this.value.isValid;
},
dirty () {
return this.value?.dirty ?? false;
},
showPasswordField () {
return this.password || (this.showPassword &&
!(this.showIp || this.ip || this.showHost || this.host));
},
...mapGetters(['user', 'loading', 'serverEvent'])
},
watch: {
validationErrors () {
this.$emit("update:errors", this.validationErrors);
}
},
methods: {
input (k, v) {
const user = new User(this.value);
user[k] = v;
this.$emit("input", user);
},
reset () {
},
save () {
},
back () {
this.$emit('close');
}
},
mounted () {
this.reset();
}
}
</script>

View File

@@ -0,0 +1 @@
../../../../server/lib/binary

View File

@@ -0,0 +1,150 @@
// src/lib/deck.gl/DougalBinaryLoader.js
import { LoaderObject } from '@loaders.gl/core';
import { DougalBinaryBundle } from '@dougal/binary';
async function cachedFetch(url, init, opts = {}) {
let res; // The response
let cache; // Potentially, a Cache API cache name
let isCached;
if (opts?.cache === true) {
opts.cache = { name: "dougal" };
} else if (typeof opts?.cache === "string") {
opts.cache = { name: opts.cache };
} else if (opts?.cache) {
if (!(opts.cache instanceof Object)) {
opts.cache = { name: "dougal" }
} else if (!(opts.cache.name)) {
opts.cache.name = "dougal";
}
}
if (opts?.cache && window.cache) {
cache = await caches.open(opts.cache.name);
res = await cache.match(url);
isCached = !!res;
}
if (!res) {
res = await fetch(url, init);
}
if (cache && !isCached) {
cache.put(url, res.clone());
}
return res;
}
const DougalBinaryLoader = {
name: 'DougalBinaryBundle Loader',
extensions: ['dbb'],
mimeTypes: ['application/vnd.aaltronav.dougal+octet-stream'],
parse: async (input, options) => {
let arrayBuffer;
if (typeof input === 'string') {
// Input is URL, fetch with caching
const response = await cachedFetch(input, options?.fetch, options);
if (!response.ok) {
throw new Error(`Failed to fetch: ${response.statusText}`);
}
arrayBuffer = await response.arrayBuffer();
} else if (input instanceof ArrayBuffer) {
arrayBuffer = input;
} else {
throw new Error('Invalid input: Expected URL string or ArrayBuffer');
}
const bundle = DougalBinaryBundle.clone(arrayBuffer);
// Calculate total points
const totalCount = bundle.chunks().reduce((acc, chunk) => acc + chunk.jCount, 0);
// Prepare positions (Float32Array: [lon1, lat1, lon2, lat2, ...])
const positions = new Float32Array(totalCount * 2);
// Extract udv (assume constant across chunks)
const udv = bundle.chunks()[0].udv;
// Prepare values as an array of TypedArrays
const ΔelemCount = bundle.chunks()[0].ΔelemCount;
const elemCount = bundle.chunks()[0].elemCount;
const values = new Array(ΔelemCount + elemCount + 2);
// Initialize values arrays with correct types
if (udv == 0) {
for (let k = 0; k < values.length; k++) {
values[k] = new (k === 0 ? Uint16Array : k === 1 ? Uint32Array : Uint8Array)(totalCount);
}
} else if (udv == 1) {
for (let k = 0; k < values.length; k++) {
values[k] = new (k === 0 ? Uint16Array : k === 1 ? Uint32Array : k === 2 ? Uint8Array : Uint16Array)(totalCount);
}
} else if (udv == 2) {
for (let k = 0; k < values.length; k++) {
values[k] = new (k === 0 ? Uint16Array : k === 1 ? Uint32Array : k === 2 ? BigUint64Array : Float32Array)(totalCount);
}
} else if (udv == 4) {
for (let k = 0; k < values.length; k++) {
values[k] = new (k === 0 ? Uint16Array : k === 1 ? Uint32Array : k === 2 ? Uint16Array : Float32Array)(totalCount);
}
} else {
throw new Error(`Invalid udv: Expected 0, 1, 2, or 4; found ${udv}`);
}
let offset = 0;
for (const chunk of bundle.chunks()) {
const λarray = chunk.elem(0);
const φarray = chunk.elem(1);
for (let i = 0; i < λarray.length; i++) {
positions[offset * 2 + i * 2] = λarray[i];
positions[offset * 2 + i * 2 + 1] = φarray[i];
}
values[0].set(new Uint16Array(chunk.jCount).fill(chunk.i), offset);
values[1].set(Uint32Array.from({ length: chunk.jCount }, (_, i) => chunk.j0 + i * chunk.Δj), offset);
for (let j = 0; j < ΔelemCount; j++) {
values[2 + j].set(chunk.Δelem(j), offset);
}
for (let j = 2; j < elemCount; j++) {
values[2 + ΔelemCount + j - 2].set(chunk.elem(j), offset);
}
offset += chunk.jCount;
}
console.log(`Parsed ${totalCount} points, ${values.length} value arrays, udv = ${udv}`);
const attributes = {
getPosition: {
value: positions,
type: 'float32',
size: 2
},
udv
};
values.forEach((valArray, k) => {
let value = valArray;
if (valArray instanceof BigUint64Array) {
value = Float64Array.from(valArray, v => Number(v));
}
attributes[`value${k}`] = {
value,
type: value instanceof Float64Array ? 'float64' :
value instanceof Uint16Array ? 'uint16' :
value instanceof Uint32Array ? 'uint32' : 'float32',
size: 1
};
});
return {
length: totalCount,
attributes
};
},
options: {} // Optional: Add custom options if needed
};
export default DougalBinaryLoader;

View File

@@ -0,0 +1,144 @@
// Ref.: https://deck.gl/docs/developer-guide/custom-layers/composite-layers
import { CompositeLayer } from '@deck.gl/core';
import { GeoJsonLayer, ColumnLayer } from '@deck.gl/layers';
class DougalEventsLayer extends CompositeLayer {
static layerName = "DougalEventsLayer";
static defaultProps = {
columnsZoom: 11, // Threshold zoom level for switching layers
jitter: 0, // Add a small amount of jitter so that columns do not overlap.
// GeoJsonLayer props
getLineColor: [127, 65, 90],
getFillColor: [127, 65, 90],
getPointRadius: 2,
radiusUnits: "pixels",
pointRadiusMinPixels: 2,
lineWidthMinPixels: 2,
// ColumnLayer props
getPosition: { type: 'accessor', value: d => d.geometry.coordinates },
getElevation: { type: 'accessor', value: d => Math.min(Math.max(d.properties.remarks?.length || 10, 10), 200) },
diskResolution: 20,
radius: 5,
radiusUnits: "pixels",
radiusScale: 1,
elevationScale: 1,
filled: true,
stroked: false,
extruded: true,
wireframe: false,
material: true,
getFillColor: [255, 0, 0, 200],
getLineColor: [255, 0, 0, 200],
getLineWidth: 2,
pickable: true
}
constructor(props) {
super(props);
this.uid = "el-" + Math.random().toString().slice(2);
// Initialize state with current zoom
this.state = {
zoom: this.context?.viewport?.zoom || 0
};
}
shouldUpdateState({ changeFlags }) {
// Always update if viewport changed (including zoom)
if (changeFlags.viewportChanged) {
return true;
}
return super.shouldUpdateState({ changeFlags });
}
updateState({ props, oldProps, context, changeFlags }) {
// Check if zoom has changed
const newZoom = context.viewport?.zoom || 0;
if (newZoom !== this.state.zoom) {
this.setState({ zoom: newZoom });
this.setNeedsRedraw(); // Trigger re-render of sublayers
console.log(`Zoom changed to ${newZoom}, triggering redraw`);
}
}
getPickingInfo({ info, mode, sourceLayer }) {
if (info.index >= 0) {
info.object = {
...info.object // Merge default picking info (GeoJSON feature or ColumnLayer object)
};
if (sourceLayer) {
info.object.type = sourceLayer.constructor.layerName;
}
//console.log(`Picked ${info.object.type}, index ${info.index}`);
}
return info;
}
renderLayers() {
const { zoom } = this.state;
const sublayers = [];
if (zoom >= this.props.columnsZoom) {
// Render ColumnLayer at high zoom
const data = Array.isArray(this.props.data) ? this.props.data : this.props.data.features || [];
const positionFn = this.props.jitter
? (d, info) => {
let pos;
if (typeof this.props.getPosition == 'function') {
pos = this.props.getPosition(d, info);
} else {
pos = this.props.getPosition;
}
return pos.map( i => i + (Math.random() - 0.5) * this.props.jitter )
}
: this.props.getPosition;
sublayers.push(
new ColumnLayer(this.getSubLayerProps({
id: `${this.uid}-column`,
data,
visible: this.props.visible,
getPosition: positionFn,
getElevation: this.props.getElevation,
diskResolution: this.props.diskResolution,
radius: this.props.radius,
radiusUnits: this.props.radiusUnits,
radiusScale: this.props.radiusScale,
elevationScale: this.props.elevationScale,
filled: this.props.filled,
stroked: this.props.stroked,
extruded: this.props.extruded,
wireframe: this.props.wireframe,
material: this.props.material,
getFillColor: this.props.getFillColor,
getLineColor: this.props.getLineColor,
getLineWidth: this.props.getLineWidth,
pickable: this.props.pickable
}))
);
} else {
// Render GeoJsonLayer at low zoom
sublayers.push(
new GeoJsonLayer(this.getSubLayerProps({
id: `${this.uid}-geojson`,
data: this.props.data,
visible: this.props.visible,
getLineColor: this.props.getLineColor,
getFillColor: this.props.getFillColor,
getPointRadius: this.props.getPointRadius,
radiusUnits: this.props.radiusUnits,
pointRadiusMinPixels: this.props.pointRadiusMinPixels,
lineWidthMinPixels: this.props.lineWidthMinPixels,
pickable: this.props.pickable
}))
);
}
console.log(`Rendering ${sublayers.length} sublayer(s) at zoom ${zoom}`);
return sublayers;
}
}
export default DougalEventsLayer;

View File

@@ -0,0 +1,108 @@
// Ref.: https://deck.gl/docs/developer-guide/custom-layers/layer-lifecycle
import { ScatterplotLayer } from '@deck.gl/layers';
class DougalSequenceLayer extends ScatterplotLayer {
static layerName = "DougalSequenceLayer";
static defaultProps = {
...ScatterplotLayer.defaultProps,
valueIndex: 0,
radiusUnits: "pixels",
radiusScale: 1,
lineWidthUnits: "pixels",
lineWidthScale: 1,
stroked: false,
filled: true,
radiusMinPixels: 1,
radiusMaxPixels: 50,
lineWidthMinPixels: 1,
lineWidthMaxPixels: 50,
getPosition: { type: 'accessor', value: d => d.positions },
getRadius: 5,
getFillColor: [255, 0, 0, 200],
getLineColor: [255, 0, 0, 200],
getLineWidth: 2,
pickable: true
}
constructor(props) {
super(props);
}
initializeState(context) {
super.initializeState(context);
}
getPickingInfo({ info, mode }) {
const index = info.index;
if (index >= 0) {
const d = this.props.data.attributes;
if (d) {
if (d.udv == 0) {
info.object = {
udv: d.udv,
i: d.value0.value[index],
j: d.value1.value[index],
ntba: d.value2.value[index] & 0x01,
sailline_ntba: d.value2.value[index] & 0x02
};
} else if (d.udv == 1) {
info.object = {
udv: d.udv,
i: d.value0.value[index],
j: d.value1.value[index],
sailline: d.value3.value[index],
ntba: d.value2.value[index] & 0x01 ? true : false,
sailline_ntba: d.value2.value[index] & 0x02 ? true : false
};
} else if (d.udv == 2) {
info.object = {
udv: d.udv,
i: d.value0.value[index],
j: d.value1.value[index],
ts: Number(d.value2.value[index]),
εi: d.value3.value[index] / 100,
εj: d.value4.value[index] / 100,
delta: d.value5.value[index] / 10,
delta_σ: d.value6.value[index] / 10,
delta_R: d.value7.value[index] / 10,
press: d.value8.value[index],
press_σ: d.value9.value[index],
press_R: d.value10.value[index],
depth: d.value11.value[index] / 10,
depth_σ: d.value12.value[index] / 10,
depth_R: d.value13.value[index] / 10,
fill: d.value14.value[index],
fill_σ: d.value15.value[index],
fill_R: d.value16.value[index],
delay: d.value17.value[index] / 10,
delay_σ: d.value18.value[index] / 10,
delay_R: d.value19.value[index] / 10,
nofire: d.value20.value[index] >> 4,
autofire: d.value20.value[index] & 0xf
};
} else if (d.udv == 3) {
info.object = {
udv: d.udv,
i: d.value0.value[index],
j: d.value1.value[index],
ts: Number(d.value2.value[index]),
εi: d.value3.value[index] / 100,
εj: d.value4.value[index] / 100,
co_i: d.value5.value[index] / 100,
co_j: d.value6.value[index] / 100,
}
} else {
console.warn(`Unknown udv value ${d.udv}. No picking info`);
info.object = {};
}
console.log(`Picked sequence ${info.object.i}, point ${info.object.j}, udv ${info.object.udv}`);
} else {
console.log(`No data found index = ${index}`);
}
}
return info;
}
}
export default DougalSequenceLayer;

View File

@@ -0,0 +1,8 @@
import DougalSequenceLayer from './DougalSequenceLayer'
import DougalEventsLayer from './DougalEventsLayer'
export {
DougalSequenceLayer,
DougalEventsLayer
};

View File

@@ -0,0 +1,47 @@
function duration_to_ms(v) {
if (v instanceof Object) {
return (
(v.days || 0) * 86400000 +
(v.hours || 0) * 3600000 +
(v.minutes || 0) * 60000 +
(v.seconds || 0) * 1000 +
(v.milliseconds || 0)
);
} else {
return {
days: 0,
hours: 0,
minutes: 0,
seconds: 0,
milliseconds: 0
}
}
}
function ms_to_duration(v) {
const days = Math.floor(v / 86400000);
v %= 86400000;
const hours = Math.floor(v / 3600000);
v %= 3600000;
const minutes = Math.floor(v / 60000);
v %= 60000;
const seconds = Math.floor(v / 1000);
const milliseconds = v % 1000;
return { days, hours, minutes, seconds, milliseconds };
}
function normalise_duration (v) {
return ms_to_duration(duration_to_ms(v));
}
function add_durations(a, b) {
return ms_to_duration(duration_to_ms(a) + duration_to_ms(b));
}
export {
duration_to_ms,
ms_to_duration,
normalise_duration,
add_durations
}

View File

@@ -0,0 +1,97 @@
import { User as BaseUser } from '@dougal/user';
class User extends BaseUser {
api // Instance of Vuex api method
dirty // Whether the values have changed since last saved
constructor (data, client) {
super (data);
if (client) {
this.api = client;
} else if (data instanceof User) {
this.api = data.api;
}
this.dirty = false;
this.on("changed", () => this.dirty = true);
}
static async fromAPI (api, id) {
if (id) {
const url = `/user/${id}`;
const res = await api([url]);
return new User(res, api);
} else {
const url = `/user`;
const res = await api([url]);
return res?.map( row => new User(row, api) );
}
}
/** Save this user to the server
*
* If this is a new user, the `api` parameter must be
* supplied and this will result in a `POST` request.
* For an existing user coming from the database,
* `this.api` will be used for a `PUT` request.
*/
async save (api) {
if (this.api) {
const url = `/user/${this.id}`;
const init = {
headers: {
"Content-Type": "application/json"
},
method: "PUT",
body: this.toJSON()
};
const res = await this.api([url, init]);
if (res) {
this.dirty = false;
return new User(res, this.api);
} else {
// Something has gone wrong
console.log("Something has gone wrong (PUT)");
}
} else if (api) {
const url = `/user`;
const init = {
headers: {
"Content-Type": "application/json"
},
method: "POST",
body: this.toJSON()
}
const res = await api([url, init]);
if (res) {
return new User(res, api);
} else {
// Something has gone wrong
console.log("Something has gone wrong (POST)");
}
} else {
throw new Error("Don't know how to save this user");
}
}
/** Delete this user from the server
*/
async remove () {
const url = `/user/${this.id}`;
const init = {
headers: {
"Content-Type": "application/json"
},
method: "PUT",
body: this.toJSON()
};
const res = await this.api([url, init]);
console.log("remove RES", res);
}
}
export default User;

View File

@@ -0,0 +1,5 @@
import User from './User'
export {
User
}

View File

@@ -4,7 +4,7 @@ import router from './router'
import store from './store'
import vuetify from './plugins/vuetify'
import vueDebounce from 'vue-debounce'
import { mapMutations } from 'vuex';
import { mapMutations, mapActions } from 'vuex';
import { markdown, markdownInline } from './lib/markdown';
import { geometryAsString } from './lib/utils';
import { mapGetters } from 'vuex';
@@ -46,20 +46,33 @@ new Vue({
methods: {
async sleep (ms = 0) {
return await new Promise( (resolve) => {
setTimeout( resolve, ms );
});
},
markdown (value) {
return typeof value == "string"
? marked(value)
: value;
return markdown(value);
},
markdownInline (value) {
return markdownInline(value);
},
showSnack(text, colour = "primary") {
console.log("showSnack", text, colour);
this.snackColour = colour;
this.snackText = text;
this.snack = true;
this.$store.dispatch("showSnack", [text, colour]);
},
sendJwt () {
if (this.jwt) {
this.ws.send(JSON.stringify({ jwt: this.jwt }));
}
},
initWs () {
if (this.ws) {
console.log("WebSocket initWs already called");
return;
@@ -69,11 +82,12 @@ new Vue({
this.ws.addEventListener("message", (ev) => {
const msg = JSON.parse(ev.data);
this.setServerEvent(msg);
this.processServerEvent(msg);
});
this.ws.addEventListener("open", (ev) => {
console.log("WebSocket connection open", ev);
this.sendJwt()
this.setServerConnectionState(true);
});
@@ -99,14 +113,13 @@ new Vue({
}
this.wsCredentialsCheckTimer = setInterval( () => {
this.ws.send(JSON.stringify({
jwt: this.jwt
}));
this.sendJwt();
}, this.wsCredentialsCheckInterval);
},
...mapMutations(['setServerEvent', 'setServerConnectionState'])
...mapMutations(['setServerConnectionState']),
...mapActions(['processServerEvent'])
},

View File

@@ -0,0 +1,35 @@
import { mapGetters } from 'vuex';
import { Organisations } from '@dougal/organisations';
export default {
name: "AccessMixin",
computed: {
...mapGetters(['user', 'projectConfiguration'])
},
methods: {
access (operation, organisations) {
if (this.user) {
if (!organisations) organisations = this.projectConfiguration?.organisations;
if (!organisations instanceof Organisations) {
organisations = new Organisations(organisations);
}
return this.user.canDo(operation, organisations);
}
},
readaccess (item) {
return this.access('read', item);
},
writeaccess (item) {
return this.access('write', item);
},
adminaccess (item) {
return this.access('edit', item);
}
}
}

View File

@@ -17,8 +17,12 @@ import QC from '../views/QC.vue'
import Graphs from '../views/Graphs.vue'
import Map from '../views/Map.vue'
import ProjectSettings from '../views/ProjectSettings.vue'
import Users from '../views/Users.vue'
import DougalAppBarExtensionProject from '../components/app-bar-extension-project'
import DougalAppBarExtensionProjectList from '../components/app-bar-extension-project-list'
import GroupList from '../views/GroupList.vue'
import Group from '../views/Group.vue'
Vue.use(VueRouter)
@@ -49,6 +53,19 @@ Vue.use(VueRouter)
name: "equipment",
component: () => import(/* webpackChunkName: "about" */ '../views/Equipment.vue')
},
{
pathToRegexpOptions: { strict: true },
path: "/users",
redirect: "/users/"
},
{
pathToRegexpOptions: { strict: true },
name: "Users",
path: "/users/",
component: Users,
meta: {
}
},
{
pathToRegexpOptions: { strict: true },
path: "/login",
@@ -103,7 +120,9 @@ Vue.use(VueRouter)
{ text: "Projects", href: "/projects" },
{
text: (ctx) => ctx.$store.state.project.projectName || "…",
href: (ctx) => `/projects/${ctx.$store.state.project.projectId || ctx.$route.params.project || ""}/`
href: (ctx) => `/projects/${ctx.$store.state.project.projectId || ctx.$route.params.project || ""}/`,
title: (ctx) => Object.entries(ctx.$store.getters.projectConfiguration?.organisations ?? {}).map( ([org, ops]) => `* ${org}: ${Object.entries(ops).filter( ([k, v]) => v ).map( ([k, v]) => k ).join(", ")}`).join("\n"),
organisations: (ctx) => ctx.$store.getters.projectConfiguration?.organisations ?? {}
}
],
appBarExtension: {
@@ -139,6 +158,7 @@ Vue.use(VueRouter)
component: SequenceList
},
{
name: "shotlog",
path: "sequences/:sequence",
component: SequenceSummary
},
@@ -180,7 +200,57 @@ Vue.use(VueRouter)
component: ProjectSettings
}
]
}
},
{
pathToRegexpOptions: { strict: true },
path: "/groups",
redirect: "/groups/"
},
{
pathToRegexpOptions: { strict: true },
path: "/groups/",
component: GroupList,
meta: {
breadcrumbs: [
{ text: "Comparisons", href: "/groups", disabled: true }
],
appBarExtension: {
// component: DougalAppBarExtensionProjectList
}
}
},
{
pathToRegexpOptions: { strict: true },
path: "/groups/:group",
redirect: "/groups/:group/"
},
{
pathToRegexpOptions: { strict: true },
path: "/groups/:group/",
name: "Group",
component: Group,
meta: {
breadcrumbs: [
{ text: "Comparisons", href: "/groups" },
{ text: (ctx) => ctx.$route.params.group }
/*
{
text: (ctx) => ctx.$store.state.project.projectName || "…",
href: (ctx) => `/projects/${ctx.$store.state.project.projectId || ctx.$route.params.project || ""}/`,
title: (ctx) => Object.entries(ctx.$store.getters.projectConfiguration?.organisations ?? {}).map( ([org, ops]) => `* ${org}: ${Object.entries(ops).filter( ([k, v]) => v ).map( ([k, v]) => k ).join(", ")}`).join("\n"),
organisations: (ctx) => ctx.$store.getters.projectConfiguration?.organisations ?? {}
}
*/
],
/*
appBarExtension: {
component: DougalAppBarExtensionGroup
}
*/
},
children: [
]
},
]
const router = new VueRouter({

View File

@@ -1,5 +1,27 @@
const ConcurrencyLimiter = require('@dougal/concurrency');
/** Make an API request
*
* @a resource {String} is the target URL
* @a init {Object} are the Fetch options
* @a cb {Function} is a callback function: (res, err) => {}
* @a opts {Object} are other optional parameters:
* opts.silent {Boolean} controls whether snack messages are shown on failure
* opts.cache {Object} controls whether Cache API is used
* opts.cache.name {String} is the name of the cache to use. Defaults to "dougal"
*
* If Cache API is used, this function looks for a matching request in the cache
* first, and returns it if found. If not found, it makes the request over the API
* and then stores it in the cache.
*
* `opts.cache` may also be `true` (defaults to using the "dougal" cache),
* a cache name (equivalent to {name: "…"}) or even an empty object (equivalent
* to `true`).
*/
async function api ({state, getters, commit, dispatch}, [resource, init = {}, cb, opts = {}]) {
const limiter = api.limiter || (api.limiter = new ConcurrencyLimiter(state.maxConcurrent));
async function api ({state, getters, commit, dispatch}, [resource, init = {}, cb]) {
try {
commit("queueRequest");
if (init && init.hasOwnProperty("body")) {
@@ -15,22 +37,89 @@ async function api ({state, getters, commit, dispatch}, [resource, init = {}, cb
}
// We also send Authorization: Bearer …
if (getters.jwt) {
init.credentials = "include";
init.headers["Authorization"] = "Bearer "+getters.jwt;
}
if (typeof init.body != "string") {
init.body = JSON.stringify(init.body);
}
const url = /^https?:\/\//i.test(resource) ? resource : (state.apiUrl + resource);
const res = await fetch(url, init);
if (typeof cb === 'function') {
await cb(null, res);
let res; // The response
let cache; // Potentially, a Cache API cache name
let isCached;
if (opts?.cache === true) {
opts.cache = { name: "dougal" };
} else if (typeof opts?.cache === "string") {
opts.cache = { name: opts.cache };
} else if (opts?.cache) {
if (!(opts.cache instanceof Object)) {
opts.cache = { name: "dougal" }
} else if (!(opts.cache.name)) {
opts.cache.name = "dougal";
}
}
if (opts?.cache && window.caches) {
cache = await caches.open(opts.cache.name);
res = await cache.match(url);
isCached = !!res;
}
if (!res) {
res = await limiter.enqueue(async () => await fetch(url, init));
}
if (cache && !isCached && res.ok) { // Only cache successful responses
cache.put(url, res.clone());
}
if (typeof cb === 'function') {
await cb(null, res.clone());
}
if (res.headers.has("x-dougal-server")) {
const header = res.headers.get("x-dougal-server")
const entries = header
.split(";")
.map(part => part.trim())
.filter(part => part.length > 0)
.map(part => {
const idx = part.indexOf('=');
if (idx === -1) {
return [part, true];
}
const key = part.slice(0, idx).trim();
const value = part.slice(idx + 1).trim();
return [key, value];
});
state.serverInfo = entries.length ? Object.fromEntries(entries) : {};
if (state.serverInfo["remote-frontend"]) {
state.isGatewayReliable = ![ 502, 503, 504 ].includes(res.status);
} else {
state.isGatewayReliable = null;
}
}
if (res.ok) {
await dispatch('setCredentials');
if (!isCached) {
if (res.headers.has("x-jwt")) {
await dispatch('setCredentials', { response: res });
}
}
try {
return init.text ? (await res.text()) : (await res.json());
if (!res.bodyUsed) { // It may have been consumed by a callback
const validFormats = [ "arrayBuffer", "blob", "formData", "json", "text" ];
if (opts.format && validFormats.includes(opts.format)) {
return await res[opts.format]();
} else {
return await res.json();
}
}
} catch (err) {
if (err instanceof SyntaxError) {
if (Number(res.headers.get("Content-Length")) === 0) {
@@ -49,7 +138,9 @@ async function api ({state, getters, commit, dispatch}, [resource, init = {}, cb
message = body.message;
}
}
await dispatch('showSnack', [message, "warning"]);
if (!opts?.silent) {
await dispatch('showSnack', [message, "warning"]);
}
}
} catch (err) {
if (err && err.name == "AbortError") return;

View File

@@ -1,6 +1,9 @@
const state = () => ({
apiUrl: "/api",
requestsCount: 0
requestsCount: 0,
maxConcurrent: 15,
serverInfo: {}, // Contents of the last received X-Dougal-Server HTTP header
isGatewayReliable: null, // True if we start seeing HTTP 502504 responses
});
export default state;

View File

@@ -17,6 +17,7 @@ async function refreshEvents ({commit, dispatch, state, rootState}, [modifiedAft
? `/project/${pid}/event/changes/${(new Date(modifiedAfter)).toISOString()}?unique=t`
: `/project/${pid}/event`;
const init = {
cache: "reload",
signal: state.loading.signal
};
const res = await dispatch('api', [url, init]);
@@ -35,7 +36,7 @@ async function refreshEvents ({commit, dispatch, state, rootState}, [modifiedAft
/** Return a subset of events from state.events
*/
async function getEvents ({commit, dispatch, state}, [projectId, {sequence, date0, date1, sortBy, sortDesc, itemsPerPage, page, text, label}]) {
async function getEvents ({commit, dispatch, state}, [projectId, {sequence, date0, date1, sortBy, sortDesc, itemsPerPage, page, text, label, excludeLabels}]) {
let filteredEvents = [...state.events];
if (sortBy) {
@@ -113,6 +114,10 @@ async function getEvents ({commit, dispatch, state}, [projectId, {sequence, date
filteredEvents = filteredEvents.filter( event => event.labels?.includes(label) );
}
if (excludeLabels) {
filteredEvents = filteredEvents.filter( event => !excludeLabels?.some( label => event.labels?.includes(label) ) );
}
const count = filteredEvents.length;
if (itemsPerPage && itemsPerPage > 0) {

View File

@@ -11,6 +11,7 @@ async function refreshLabels ({commit, dispatch, state, rootState}) {
const pid = rootState.project.projectId;
const url = `/project/${pid}/label`;
const init = {
cache: "reload",
signal: state.loading.signal
};
const res = await dispatch('api', [url, init]);

View File

@@ -11,6 +11,7 @@ async function refreshLines ({commit, dispatch, state, rootState}) {
const pid = rootState.project.projectId;
const url = `/project/${pid}/line`;
const init = {
cache: "reload",
signal: state.loading.signal
};
const res = await dispatch('api', [url, init]);

View File

@@ -0,0 +1,83 @@
import debounce from 'lodash/debounce';
function registerHandler({ commit }, { table, handler }) {
commit('REGISTER_HANDLER', { table, handler });
}
function unregisterHandler({ commit }, { table, handler }) {
commit('UNREGISTER_HANDLER', { table, handler });
}
function processServerEvent({ commit, dispatch, state, rootState }, message) {
//console.log("processServerEvent", message);
// Error handling for invalid messages
if (!message) {
console.error("processServerEvent called without arguments");
return;
}
if (!message.channel) {
console.error("processServerEvent message missing channel");
return;
}
if (!message.payload) {
console.error("processServerEvent message missing payload");
return;
}
if (message.payload.operation == "INSERT") {
if (message.payload.new == null) {
console.error("Expected payload.new to be non-null");
return;
}
} else if (message.payload.operation == "UPDATE") {
if (message.payload.old == null || message.payload.new == null) {
console.error("Expected payload.old and paylaod.new to be non-null");
return;
}
} else if (message.payload.operation == "DELETE") {
if (message.payload.old == null) {
console.error("Expected payload.old to be non-null");
return;
}
} else {
console.warn(`Unrecognised operation: ${message.payload.operation}`);
}
const table = message.channel; // or message.payload?.table;
//console.log("table=", table);
if (!table || !state.handlers[table] || state.handlers[table].length === 0) {
return;
}
// Create a debounced runner per table if not exists
if (!state.debouncedRunners) {
state.debouncedRunners = {}; // Not reactive needed? Or use Vue.set
}
if (!state.debouncedRunners[table]) {
const config = {
wait: 300, // min silence in ms
maxWait: 1000, // max wait before force run, adjustable
trailing: true,
leading: false
};
state.debouncedRunners[table] = debounce((lastMessage) => {
const context = { commit, dispatch, state: rootState, rootState }; // Approximate action context
state.handlers[table].forEach(handler => {
try {
//console.log("Trying handler:", handler);
handler(context, lastMessage);
} catch (e) {
console.error(`Error in handler for table ${table}:`, e);
}
});
}, config.wait, { maxWait: config.maxWait });
}
// Call the debounced function with the current message
// Debounce will use the last call's argument if multiple
state.debouncedRunners[table](message);
}
export default { registerHandler, unregisterHandler, processServerEvent };

View File

@@ -11,4 +11,29 @@ function setServerConnectionState (state, isConnected) {
state.serverConnected = !!isConnected;
}
export default { setServerEvent, clearServerEvent, setServerConnectionState };
function REGISTER_HANDLER(state, { table, handler }) {
if (!state.handlers[table]) {
state.handlers[table] = [];
}
if (!state.handlers[table].includes(handler)) {
state.handlers[table].push(handler);
}
}
function UNREGISTER_HANDLER(state, { table, handler }) {
if (state.handlers[table]) {
const handlerIndex = state.handlers[table].findIndex(el => el === handler);
if (handlerIndex != -1) {
state.handlers[table].splice(handlerIndex, 1);
}
}
}
export default {
setServerEvent,
clearServerEvent,
setServerConnectionState,
REGISTER_HANDLER,
UNREGISTER_HANDLER
};

View File

@@ -1,6 +1,7 @@
const state = () => ({
serverEvent: null,
serverConnected: false
serverConnected: false,
handlers: {}, // table: array of functions (each fn receives { commit, dispatch, state, rootState, message })
});
export default state;

View File

@@ -11,6 +11,7 @@ async function refreshPlan ({commit, dispatch, state, rootState}) {
const pid = rootState.project.projectId;
const url = `/project/${pid}/plan`;
const init = {
cache: "reload",
signal: state.loading.signal
};
const res = await dispatch('api', [url, init]);

View File

@@ -1,18 +1,47 @@
function transform (item) {
item.ts0 = new Date(item.ts0);
item.ts1 = new Date(item.ts1);
return item;
const newItem = {...item}
newItem.ts0 = new Date(newItem.ts0);
newItem.ts1 = new Date(newItem.ts1);
return newItem;
}
// ATTENTION: This relies on the new planner endpoint
// as per issue #281.
function setRemarks (state, remarks) {
state.remarks = remarks;
}
function setSequence (state, sequence) {
state.sequences.push(Object.freeze(transform(sequence)));
}
function deleteSequence (state, sequence) {
const seq = transform(sequence)
const idx = state.sequences?.findIndex( s => Object.keys(seq).every( k => JSON.stringify(s[k]) == JSON.stringify(seq[k]) ));
if (idx != -1) {
state.sequences.splice(idx, 1)
}
}
function replaceSequence (state, [oldSequence, newSequence]) {
console.log("replaceSequence", oldSequence, newSequence);
const seq = transform(oldSequence)
const idx = state.sequences?.findIndex( s => Object.keys(seq).every( k => JSON.stringify(s[k]) == JSON.stringify(seq[k]) ));
console.log("idx", idx);
if (idx != -1) {
state.sequences.splice(idx, 1, transform(newSequence))
console.log("spliced in");
}
}
function setPlan (state, plan) {
// We don't need or want the planned sequences array to be reactive
state.sequences = Object.freeze(plan.sequences.map(transform));
state.remarks = plan.remarks;
state.sequences = [];
plan.sequences.forEach( sequence => setSequence(state, sequence) );
setRemarks(state, plan.remarks);
}
function setPlanLoading (state, abortController = new AbortController()) {
@@ -51,6 +80,10 @@ function abortPlanLoading (state) {
}
export default {
setRemarks,
setSequence,
deleteSequence,
replaceSequence,
setPlan,
setPlanLoading,
clearPlanLoading,

View File

@@ -1,5 +1,5 @@
const state = () => ({
sequences: Object.freeze([]),
sequences: [],
remarks: null,
loading: null,
timestamp: null,

View File

@@ -1,7 +1,13 @@
async function getProject ({commit, dispatch}, projectId) {
if (projectId == null) {
console.log(`Skipping call to getProject${projectId})`);
return;
}
const init = {
headers: {
cache: "reload",
"If-None-Match": "" // Ensure we get a fresh response
}
};

View File

@@ -1,25 +1,55 @@
import * as d3a from 'd3-array';
import { duration_to_ms, ms_to_duration, normalise_duration, add_durations } from '@/lib/durations';
/** Fetch projects from server
*/
async function refreshProjects ({commit, dispatch, state, rootState}) {
async function getSummary (project) {
const url = `/project/${project.pid}/summary`;
const init = {};
const summary = await dispatch('api', [url, init, null, {silent:true}]);
if (summary) {
return {...project, ...summary};
} else {
return project;
}
}
if (state.loading) {
commit('abortProjectsLoading');
}
commit('setProjectsLoading');
const tstamp = new Date();
const pid = rootState.project.projectId;
const url = `/project`;
const init = {
cache: "reload",
signal: state.loading.signal
};
const res = await dispatch('api', [url, init]);
const res = await dispatch('api', [url, init, null, {silent:true}]);
if (res) {
commit('setProjects', res);
commit('setProjectsTimestamp');
let projects;
if (res.some( project => project.pid == null )) {
console.warn("At least one project found with no PID!");
projects = res.filter( project => project.pid != null );
} else {
projects = res;
}
commit('setProjects', projects); // First without summaries
commit('setProjectsTimestamp', tstamp);
projects = await Promise.all(projects.map( getSummary ));
commit('setProjects', projects); // Then with summaries
}
commit('clearProjectsLoading');
dispatch('prepareGroups');
}
/** Return a subset of projects from state.projects
@@ -117,4 +147,83 @@ async function getProjects ({commit, dispatch, state}, [{pid, name, schema, grou
return {projects: filteredProjects, count};
}
export default { refreshProjects, getProjects };
async function prepareGroups ({commit, dispatch, state, rootState}) {
const groups = {};
for (const project of state.projects) {
if (!project.prod_distance) {
// This project has no production data (either not started yet
// or production data has not been imported) so we skip it.
continue;
}
if (!project.prod_duration.days) {
project.prod_duration = normalise_duration(project.prod_duration);
}
for (const name of project.groups) {
if (!(name in groups)) {
groups[name] = {
group: name,
num_projects: 0,
lines: 0,
points: 0,
sequences: 0,
// Shots:
prime: 0,
other: 0,
ntba: 0,
prod_duration: {
days: 0,
hours: 0,
minutes: 0,
seconds: 0,
milliseconds: 0
},
prod_distance: 0,
shooting_rate: [],
projects: []
};
}
const group = groups[name];
group.num_projects++;
group.lines = Math.max(group.lines, project.lines); // In case preplots changed
group.points = Math.max(group.points, project.total); // Idem
group.sequences += project.seq_final;
group.prime += project.prime;
group.other += project.other;
//group.ntba += project.ntba;
group.prod_duration = add_durations(group.prod_duration, project.prod_duration);
group.prod_distance += project.prod_distance;
group.shooting_rate.push(project.shooting_rate);
group.projects.push(project);
}
}
const grouplist = [];
for (const group of Object.values(groups)) {
group.shooting_rate_mean = d3a.mean(group.shooting_rate);
group.shooting_rate_sd = d3a.deviation(group.shooting_rate);
delete group.shooting_rate;
grouplist.push(group);
}
commit('setGroups', grouplist);
}
async function getGroups({commit, dispatch, state, rootState}) {
if (!state.groups.length) {
await dispatch('refreshProjects');
}
return state.groups;
}
export default { refreshProjects, getProjects, prepareGroups, getGroups };

View File

@@ -3,7 +3,7 @@ function projects (state) {
return state.projects;
}
function projectGroups (state) {
function projectGroupNames (state) {
return [...new Set(state.projects.map(i => i.groups).flat())].sort();
}
@@ -15,4 +15,8 @@ function projectsLoading (state) {
return !!state.loading;
}
export default { projects, projectGroups, projectCount, projectsLoading };
function groups (state) {
return state.groups;
}
export default { projects, projectGroupNames, projectCount, projectsLoading, groups };

View File

@@ -39,10 +39,15 @@ function abortProjectsLoading (state) {
state.loading = null;
}
function setGroups (state, groups) {
state.groups = Object.freeze(groups);
}
export default {
setProjects,
setProjectsLoading,
clearProjectsLoading,
setProjectsTimestamp,
setProjectsETag
setProjectsETag,
setGroups
};

View File

@@ -1,5 +1,6 @@
const state = () => ({
projects: Object.freeze([]),
groups: Object.freeze([]),
loading: null,
timestamp: null,
etag: null,

View File

@@ -11,6 +11,7 @@ async function refreshSequences ({commit, dispatch, state, rootState}) {
const pid = rootState.project.projectId;
const url = `/project/${pid}/sequence?files=true`;
const init = {
cache: "reload",
signal: state.loading.signal
};
const res = await dispatch('api', [url, init]);

View File

@@ -1,6 +1,7 @@
import jwt_decode from 'jwt-decode';
import { User } from '@/lib/user';
async function login ({commit, dispatch}, loginRequest) {
async function login ({ commit, dispatch }, loginRequest) {
const url = "/login";
const init = {
method: "POST",
@@ -8,93 +9,86 @@ async function login ({commit, dispatch}, loginRequest) {
"Content-Type": "application/json"
},
body: loginRequest
};
const callback = async (err, res) => {
if (!err && res) {
const { token } = (await res.json());
await dispatch('setCredentials', {token});
}
}
const res = await dispatch('api', [url, init]);
if (res && res.ok) {
await dispatch('setCredentials', {force: true});
await dispatch('loadUserPreferences');
}
await dispatch('api', [url, init, callback]);
await dispatch('loadUserPreferences');
}
async function logout ({commit, dispatch}) {
commit('setCookie', null);
async function logout ({ commit, dispatch }) {
commit('setToken', null);
commit('setUser', null);
// Should delete JWT cookie
await dispatch('api', ["/logout"]);
// Clear preferences
commit('setPreferences', {});
}
function browserCookie (state) {
return document.cookie.split(/; */).find(i => /^JWT=.+/.test(i));
}
function setCredentials({ state, commit, getters, dispatch, rootState }, { force, token, response } = {}) {
try {
let tokenValue = token;
function cookieChanged (cookie) {
return browserCookie != cookie;
}
if (!tokenValue && response?.headers?.get('x-jwt')) {
tokenValue = response.headers.get('x-jwt');
}
function setCredentials ({state, commit, getters, dispatch}, {force, token} = {}) {
if (token || force || cookieChanged(state.cookie)) {
try {
const cookie = browserCookie();
const decoded = (token ?? cookie) ? jwt_decode(token ?? cookie.split("=")[1]) : null;
commit('setCookie', (cookie ?? (token && ("JWT="+token))) || undefined);
commit('setUser', decoded);
} catch (err) {
if (err.name == "InvalidTokenError") {
console.warn("Failed to decode", browserCookie());
} else {
console.error("setCredentials", err);
}
if (!tokenValue) {
console.log('No JWT found in token or response');
return;
}
if (force || tokenValue !== getters.jwt) {
const decoded = jwt_decode(tokenValue);
commit('setToken', tokenValue);
commit('setUser', decoded ? new User(decoded, rootState.api.api) : null);
commit('setCookie', {name: "JWT", value: tokenValue, expires: (decoded.exp??0)*1000});
console.log('Credentials refreshed at', new Date().toISOString());
} else {
console.log('JWT unchanged, skipping update');
}
} catch (err) {
console.error('setCredentials error:', err.message, 'token:', token, 'response:', response?.headers?.get('x-jwt'));
if (err.name === 'InvalidTokenError') {
commit('setToken', null);
commit('setUser', null);
commit('clearCookie', "JWT")
}
}
dispatch('loadUserPreferences');
}
/**
* Save user preferences to localStorage and store.
*
* User preferences are identified by a key that gets
* prefixed with the user name and role. The value can
* be anything that JSON.stringify can parse.
*/
function saveUserPreference ({state, commit}, [key, value]) {
const k = `${state.user?.name}.${state.user?.role}.${key}`;
function saveUserPreference({ state, commit }, [key, value]) {
const k = `${state.user?.id}.${key}`;
if (value !== undefined) {
localStorage.setItem(k, JSON.stringify(value));
const preferences = state.preferences;
preferences[key] = value;
const preferences = { ...state.preferences, [key]: value };
commit('setPreferences', preferences);
} else {
localStorage.removeItem(k);
const preferences = state.preferences;
const preferences = { ...state.preferences };
delete preferences[key];
commit('setPreferences', preferences);
}
}
async function loadUserPreferences ({state, commit}) {
// Get all keys which are of interest to us
const prefix = `${state.user?.name}.${state.user?.role}`;
const keys = Object.keys(localStorage).filter( k => k.startsWith(prefix) );
// Build the preferences object
async function loadUserPreferences({ state, commit }) {
const prefix = `${state.user?.id}`;
const keys = Object.keys(localStorage).filter(k => k.startsWith(prefix));
const preferences = {};
keys.map(str => {
keys.forEach(str => {
const value = JSON.parse(localStorage.getItem(str));
const key = str.split(".").slice(2).join(".");
preferences[key] = value;
});
// Commit it
commit('setPreferences', preferences);
}
export default {
login,
logout,

View File

@@ -4,21 +4,11 @@ function user (state) {
}
function jwt (state) {
if (state.cookie?.startsWith("JWT=")) {
return state.cookie.substring(4);
}
}
function writeaccess (state) {
return state.user && ["user", "admin"].includes(state.user.role);
}
function adminaccess (state) {
return state.user && state.user.role == "admin";
return state.token;
}
function preferences (state) {
return state.preferences;
}
export default { user, jwt, writeaccess, adminaccess, preferences };
export default { user, jwt, preferences };

View File

@@ -1,6 +1,11 @@
function setCookie (state, cookie) {
state.cookie = cookie;
function setToken (state, token) {
state.token = token;
if (token) {
localStorage?.setItem("jwt", token);
} else {
localStorage?.removeItem("jwt");
}
}
function setUser (state, user) {
@@ -11,4 +16,18 @@ function setPreferences (state, preferences) {
state.preferences = preferences;
}
export default { setCookie, setUser, setPreferences };
function setCookie (state, opts = {}) {
const name = opts.name ?? "JWT";
const value = opts.value ?? "";
const expires = opts.expires ? (new Date(opts.expires)) : (new Date(0));
const path = opts.path ?? "/";
const sameSite = opts.sameSite ?? "Lax";
document.cookie = `${name}=${value};path=${path};SameSite=${sameSite};expires=${expires.toUTCString()}`;
}
function clearCookie (state, name) {
setCookie(state, {name});
}
export default { setToken, setUser, setPreferences, setCookie, clearCookie };

View File

@@ -1,5 +1,5 @@
const state = () => ({
cookie: null,
token: localStorage?.getItem("jwt") ?? null,
user: null,
preferences: {}
});

View File

@@ -8,7 +8,7 @@
@input="closeDialog"
>
<template v-slot:activator="{ on, attrs }">
<v-btn v-if="writeaccess"
<v-btn v-if="writeaccess()"
small
color="primary"
v-bind="attrs"
@@ -182,7 +182,7 @@
</v-container>
</v-card-text>
<v-card-actions>
<v-btn v-if="writeaccess"
<v-btn v-if="writeaccess()"
small
text
color="primary"
@@ -205,7 +205,7 @@
</v-btn>
</v-btn-toggle>
<v-spacer></v-spacer>
<v-btn v-if="writeaccess"
<v-btn v-if="writeaccess()"
small
dark
color="red"
@@ -247,7 +247,7 @@
</v-card-text>
<v-card-actions>
<v-spacer></v-spacer>
<v-btn v-if="writeaccess"
<v-btn v-if="writeaccess()"
small
dark
color="red"
@@ -303,10 +303,15 @@
<script>
import { mapActions, mapGetters } from 'vuex';
import AccessMixin from '@/mixins/access';
export default {
name: "Equipment",
mixins: [
AccessMixin
],
data () {
return {
latest: [],
@@ -395,7 +400,7 @@ export default {
return null;
},
...mapGetters(['user', 'writeaccess', 'loading', 'serverEvent'])
...mapGetters(['user', 'loading', 'serverEvent'])
},

View File

@@ -91,7 +91,7 @@ export default {
},
async refresh () {
const text = await this.api([`/rss/?remote=${atob(this.$route.params.source)}`, {text:true}]);
const text = await this.api([`/rss/?remote=${atob(this.$route.params.source)}`, {format:"text"}]);
try {
this.feed = this.parse(text);
} catch (err) {

View File

@@ -240,7 +240,7 @@ export default {
return this.sequences[0]?.sequence;
},
...mapGetters(['user', 'preferences', 'writeaccess', 'loading', 'serverEvent'])
...mapGetters(['user', 'preferences', 'loading', 'serverEvent'])
},
methods: {

View File

@@ -0,0 +1,339 @@
<template>
<dougal-group-map v-if="mapView"
:baseline="baseline"
:monitor="monitor"
:monitors="monitors"
@input="mapView=$event"
></dougal-group-map>
<v-container fluid fill-height class="ma-0 pa-0" v-else>
<v-overlay :value="loading && !comparisons.length" absolute>
<v-progress-circular
indeterminate
size="64"
></v-progress-circular>
</v-overlay>
<v-overlay :value="!loading && !groupFound" absolute opacity="0.8">
<v-row justify="center">
<v-alert
type="error"
>
Group not found
</v-alert>
</v-row>
<v-row justify="center">
<v-btn color="primary" @click="refreshProjects">Retry</v-btn>
</v-row>
</v-overlay>
<v-row no-gutters align="stretch" class="fill-height">
<v-col cols="12" v-if="groupFound">
<v-data-table class="ma-1"
:headers="projectHeaders"
:items="projects"
dense
>
<template v-slot:item.baseline="{item, value, index}">
<v-simple-checkbox v-if="index+1 < projects.length"
color="primary"
:value="baseline === item"
@input="setBaseline(item)"
></v-simple-checkbox>
</template>
<template v-slot:item.monitor="{item, value, index}">
<v-simple-checkbox v-if="index > 0 && !(index <= baselineIndex)"
color="primary"
:value="monitor === item"
@input="setMonitor(item)"
></v-simple-checkbox>
</template>
<template v-slot:item.pid="{item, value}">
<v-chip
label
small
outlined
:href="`/projects/${item.pid}`"
:color="!item.archived ? 'primary' : ''"
>{{ value }}</v-chip>
</template>
<template v-slot:item.fsp="{item, value}">
<span title="First production shot">{{value.tstamp.substr(0, 10)}}</span>
</template>
<template v-slot:item.lsp="{item, value}">
<span title="Last production shot">{{value.tstamp.substr(0, 10)}}</span>
</template>
<template v-slot:item.prod_duration="{item, value}">
<span v-if="value.days > 2" :title="`${value.days} d ${value.hours} h ${value.minutes} m ${(value.seconds + value.milliseconds/1000).toFixed(3)} s`">
{{ value.days }} d
</span>
<span v-else>
{{ value.days }} d {{ value.hours }} h {{ value.minutes }} m {{ (value.seconds + value.milliseconds/1000).toFixed(1) }} s
</span>
</template>
<template v-slot:item.prod_distance="{item, value}">
{{ (value/1000).toFixed(1) }} km
</template>
<template v-slot:footer.prepend>
<v-btn v-if="baseline && !mapView"
text
color="primary"
title="Switch to map view"
@click="mapView = true"
>View map</v-btn>
<v-btn v-if="comparison"
text
color="primary"
title="Back to summary"
@click="clearComparison"
>Back</v-btn>
</template>
</v-data-table>
<!-- BEGIN TEST -->
<dougal-group-comparison-summary v-if="comparison"
:baseline="baseline"
:monitor="monitor"
:comparison="comparison"
></dougal-group-comparison-summary>
<dougal-group-repeatability-summary v-else-if="comparisons.length"
:comparisons="comparisons"
:projects="projects"
@input="setComparison"
></dougal-group-repeatability-summary>
<!-- END TEST -->
</v-col>
<v-col cols="12" v-else>
<v-card>
<v-card-text>
Group does not exist.
</v-card-text>
</v-card>
</v-col>
</v-row>
</v-container>
</template>
<script>
import { mapActions, mapGetters } from 'vuex'
import AccessMixin from '@/mixins/access';
import DougalGroupRepeatabilitySummary from '@/components/groups/group-repeatability-summary.vue';
import DougalGroupComparisonSummary from '@/components/groups/group-comparison-summary';
import DougalGroupMap from '@/components/groups/group-map';
export default {
name: 'Group',
mixins: [
AccessMixin
],
components: {
DougalGroupRepeatabilitySummary,
DougalGroupComparisonSummary,
DougalGroupMap
},
data () {
return {
projectHeaders: [
{
value: "baseline",
text: "Baseline"
},
{
value: "monitor",
text: "Monitor"
},
{
value: "pid",
text: "ID"
},
{
value: "name",
text: "Name"
},
{
value: "fsp",
text: "Start"
},
{
value: "lsp",
text: "Finish"
},
{
value: "lines",
text: "Preplot lines"
},
{
value: "seq_final",
text: "Num. of sequences"
},
{
value: "prod_duration",
text: "Duration"
},
{
value: "prod_distance",
text: "Distance"
},
],
mapView: false,
baseline: null,
monitor: null,
comparisons: []
}
},
computed: {
groupName () {
return this.$route.params.group;
},
group () {
return this.groups.find( i => i.group === this.groupName );
},
groupFound () {
return !!(this.loading || this.group);
},
projects () {
return this.group?.projects.toSorted((a, b) => a.pid.localeCompare(b.pid));
},
baselineIndex () {
return this.projects.indexOf(this.baseline);
},
monitors () {
if (this.baseline && this.comparisons) {
return this.comparisons
.filter( i => i.baseline_pid == this.baseline.pid )
.map( i => this.projects.find( p => p.pid == i.monitor_pid ));
} else {
return null;
}
},
comparison () {
return this.comparisons.find( row =>
row.baseline_pid == this.baseline?.pid && row.monitor_pid == this.monitor?.pid
)?.meta;
},
...mapGetters(["loading", "groups"])
},
methods: {
setBaseline (project) {
if (project === this.baseline) {
this.baseline = null;
} else {
this.baseline = project;
if (this.monitor) {
if (this.projects.indexOf(this.monitor) <= this.projects.indexOf(this.baseline)) {
this.monitor = null;
}
}
}
},
setMonitor (project) {
if (project === this.monitor) {
this.monitor = null;
} else {
this.monitor = project;
}
},
clearComparison () {
this.baseline = null;
this.monitor = null;
},
setComparison (baseline, monitor) {
this.clearComparison();
this.setBaseline(baseline);
this.setMonitor(monitor);
},
async getComparisons () {
const url = `/comparison/group/${this.$route.params.group}`;
this.comparisons = await this.api([url]);
},
// TODO Should this go in a Vuex action rather?
async refreshComparisons () {
await this.getGroups();
if (this.groupFound) {
await this.getComparisons();
}
},
/*
async getComparison () {
if (this.baseline && this.monitor) {
const url = `/comparison/group/${this.$route.params.group}/baseline/${this.baseline.pid}/monitor/${this.monitor.pid}`;
const comparison = await this.api([url]);
if (comparison) {
this.comparison = comparison;
}
}
},
*/
handleComparisons (context, {payload}) {
this.refreshComparisons();
},
registerNotificationHandlers (action = "registerHandler") {
this.$store.dispatch(action, {
table: 'comparisons',
handler: this.handleComparisons
});
},
unregisterNotificationHandlers () {
return this.registerNotificationHandlers("unregisterHandler");
},
...mapActions(["api", "getGroups", "refreshProjects"])
},
async mounted () {
this.registerNotificationHandlers();
this.refreshComparisons()
},
beforeDestroy () {
this.unregisterNotificationHandlers();
}
}
</script>

View File

@@ -0,0 +1,396 @@
<template>
<v-container fluid>
<v-data-table
:headers="headers"
:items="displayItems"
item-key="group"
:options.sync="options"
:expanded.sync="expanded"
show-expand
:loading="loading"
>
<template v-slot:item.group="{item, value}">
<v-chip
label
small
:href="`./${value}`"
>{{ value }}</v-chip>
</template>
<template v-slot:item.shots_total="{item, value}">
<div>{{ item.prime + item.other }}</div>
<v-progress-linear
background-color="secondary"
color="primary"
:value="item.prime/(item.prime+item.other)*100"
></v-progress-linear>
</template>
<template v-slot:item.prime="{item, value}">
{{ value }}
({{ (value / (item.prime + item.other) * 100).toFixed(1) }}%)
</template>
<template v-slot:item.other="{item, value}">
{{ value }}
({{ (value / (item.prime + item.other) * 100).toFixed(1) }}%)
</template>
<template v-slot:item.prod_duration="{item, value}">
<span v-if="value.days > 2" :title="`${value.days} d ${value.hours} h ${value.minutes} m ${(value.seconds + value.milliseconds/1000).toFixed(3)} s`">
{{ value.days }} d
</span>
<span v-else>
{{ value.days }} d {{ value.hours }} h {{ value.minutes }} m {{ (value.seconds + value.milliseconds/1000).toFixed(1) }} s
</span>
</template>
<template v-slot:item.prod_distance="{item, value}">
{{ (value/1000).toFixed(1) }} km
</template>
<template v-slot:item.shooting_rate_mean="{item, value}">
{{ (value).toFixed(2) }} s ±{{ (item.shooting_rate_sd).toFixed(3) }} s
</template>
<template v-slot:item.shots_per_point="{item, value}">
<div>
{{ ((item.prime + item.other)/item.points).toFixed(1) }}
({{ ((((item.prime + item.other)/item.points) / item.num_projects)*100).toFixed(2) }}%)
</div>
<v-progress-linear
:value="((((item.prime + item.other)/item.points) / item.num_projects)*100)"
></v-progress-linear>
</template>
<template v-slot:expanded-item="{ headers, item }">
<td :colspan="headers.length">
<v-data-table class="ma-1"
:headers="projectHeaders"
:items="item.projects"
dense
hide-default-footer
>
<template v-slot:item.pid="{item, value}">
<a :href="`/projects/${value}`" title="Go to project">{{ value }}</a>
</template>
<template v-slot:item.fsp="{item, value}">
<span title="First production shot">{{value.tstamp.substr(0, 10)}}</span>
</template>
<template v-slot:item.lsp="{item, value}">
<span title="Last production shot">{{value.tstamp.substr(0, 10)}}</span>
</template>
<template v-slot:item.prod_duration="{item, value}">
<span v-if="value.days > 2" :title="`${value.days} d ${value.hours} h ${value.minutes} m ${(value.seconds + value.milliseconds/1000).toFixed(3)} s`">
{{ value.days }} d
</span>
<span v-else>
{{ value.days }} d {{ value.hours }} h {{ value.minutes }} m {{ (value.seconds + value.milliseconds/1000).toFixed(1) }} s
</span>
</template>
<template v-slot:item.prod_distance="{item, value}">
{{ (value/1000).toFixed(1) }} km
</template>
</v-data-table>
</td>
</template>
</v-data-table>
</v-container>
</template>
<style>
td p:last-of-type {
margin-bottom: 0;
}
</style>
<script>
import { mapActions, mapGetters } from 'vuex';
import AccessMixin from '@/mixins/access';
// FIXME send to lib/utils or so
/*
function duration_to_ms(v) {
if (v instanceof Object) {
return (
(v.days || 0) * 86400000 +
(v.hours || 0) * 3600000 +
(v.minutes || 0) * 60000 +
(v.seconds || 0) * 1000 +
(v.milliseconds || 0)
);
} else {
return {
days: 0,
hours: 0,
minutes: 0,
seconds: 0,
milliseconds: 0
}
}
}
function ms_to_duration(v) {
const days = Math.floor(v / 86400000);
v %= 86400000;
const hours = Math.floor(v / 3600000);
v %= 3600000;
const minutes = Math.floor(v / 60000);
v %= 60000;
const seconds = Math.floor(v / 1000);
const milliseconds = v % 1000;
return { days, hours, minutes, seconds, milliseconds };
}
function normalise_duration (v) {
return ms_to_duration(duration_to_ms(v));
}
function add_durations(a, b) {
return ms_to_duration(duration_to_ms(a) + duration_to_ms(b));
}
*/
export default {
name: "GroupList",
components: {
},
mixins: [
AccessMixin
],
data () {
return {
headers: [
{
value: "group",
text: "Group name"
},
{
value: "num_projects",
text: "Number of campaigns"
},
{
value: "lines",
text: "Preplot lines"
},
{
value: "points",
text: "Preplot points"
},
{
value: "sequences",
text: "Total sequences"
},
{
value: "shots_total",
text: "Total shots"
},
{
value: "prime",
text: "Total prime"
},
{
value: "other",
text: "Total reshoot + infill"
},
/*
{
value: "ntba",
text: "Total NTBA"
},
*/
{
value: "prod_duration",
text: "Total duration"
},
{
value: "prod_distance",
text: "Total distance"
},
{
value: "shooting_rate_mean",
text: "Shooting rate (mean)"
},
{
value: "shots_per_point",
text: "Shots per point"
},
],
items: [],
expanded: [],
options: { sortBy: ["group"], sortDesc: [false] },
projectHeaders: [
{
value: "pid",
text: "ID"
},
{
value: "name",
text: "Name"
},
{
value: "fsp",
text: "Start"
},
{
value: "lsp",
text: "Finish"
},
{
value: "lines",
text: "Preplot lines"
},
{
value: "seq_final",
text: "Num. of sequences"
},
{
value: "prod_duration",
text: "Duration"
},
{
value: "prod_distance",
text: "Distance"
},
],
// Context menu stuff
contextMenuShow: false,
contextMenuX: 0,
contextMenuY: 0,
contextMenuItem: null,
/*
// FIXME Eventually need to move this into Vuex
groups: []
*/
}
},
computed: {
displayItems () {
return this.items.filter(i => i.prod_distance);
},
...mapGetters(['loading', 'groups'])
},
methods: {
/*
async prepareGroups () {
//const groups = await this.api(["/prospects"]);
//console.log("groups", groups);
const groups = {};
for (const project of this.projects) {
if (!project.prod_distance) {
// This project has no production data (either not started yet
// or production data has not been imported) so we skip it.
continue;
}
if (!project.prod_duration.days) {
project.prod_duration = normalise_duration(project.prod_duration);
}
for (const name of project.groups) {
if (!(name in groups)) {
groups[name] = {
group: name,
num_projects: 0,
lines: 0,
points: 0,
sequences: 0,
// Shots:
prime: 0,
other: 0,
ntba: 0,
prod_duration: {
days: 0,
hours: 0,
minutes: 0,
seconds: 0,
milliseconds: 0
},
prod_distance: 0,
shooting_rate: [],
projects: []
};
}
const group = groups[name];
group.num_projects++;
group.lines = Math.max(group.lines, project.lines); // In case preplots changed
group.points = Math.max(group.points, project.total); // Idem
group.sequences += project.seq_final;
group.prime += project.prime;
group.other += project.other;
//group.ntba += project.ntba;
group.prod_duration = add_durations(group.prod_duration, project.prod_duration);
group.prod_distance += project.prod_distance;
group.shooting_rate.push(project.shooting_rate);
group.projects.push(project);
}
}
this.groups = [];
for (const group of Object.values(groups)) {
group.shooting_rate_mean = d3a.mean(group.shooting_rate);
group.shooting_rate_sd = d3a.deviation(group.shooting_rate);
delete group.shooting_rate;
this.groups.push(group);
}
},
*/
async list () {
this.items = [...this.groups];
},
async load () {
await this.refreshProjects();
//await this.prepareGroups();
await this.list();
},
registerNotificationHandlers () {
this.$store.dispatch('registerHandler', {
table: 'project`',
handler: (context, message) => {
if (message.payload?.table == "public") {
this.load();
}
}
});
},
...mapActions(["api", "showSnack", "refreshProjects"])
},
mounted () {
this.registerNotificationHandlers();
this.load();
}
}
</script>

View File

@@ -17,7 +17,7 @@
</v-card-title>
<v-card-text>
<v-menu v-if="writeaccess"
<v-menu v-if="writeaccess()"
v-model="contextMenuShow"
:position-x="contextMenuX"
:position-y="contextMenuY"
@@ -164,7 +164,7 @@
</v-text-field>
<div v-else>
<span v-html="$options.filters.markdownInline(item.remarks)"></span>
<v-btn v-if="writeaccess && edit === null"
<v-btn v-if="writeaccess() && edit === null"
icon
small
title="Edit"
@@ -196,6 +196,7 @@
<script>
import { mapActions, mapGetters } from 'vuex';
import DougalLineStatus from '@/components/line-status';
import AccessMixin from '@/mixins/access';
export default {
name: "LineList",
@@ -204,6 +205,10 @@ export default {
DougalLineStatus
},
mixins: [
AccessMixin
],
data () {
return {
headers: [
@@ -281,7 +286,7 @@ export default {
},
computed: {
...mapGetters(['user', 'writeaccess', 'linesLoading', 'lines', 'sequences', 'plannedSequences'])
...mapGetters(['user', 'linesLoading', 'lines', 'sequences', 'plannedSequences'])
},
watch: {

View File

@@ -5,6 +5,22 @@
<v-card-title>
<v-toolbar flat>
<v-toolbar-title>
<template v-if="$route.params.sequence">
<v-btn icon small
:disabled="sequenceIndex >= (sequences.length - 1)"
:to="{name: 'logBySequence', params: { sequence: (sequences[sequences.length-1]||{}).sequence }}"
title="Go to the first sequence"
>
<v-icon dense>mdi-chevron-double-left</v-icon>
</v-btn>
<v-btn icon small
:disabled="sequenceIndex >= (sequences.length - 1)"
:to="{name: 'logBySequence', params: { sequence: (sequences[sequenceIndex+1]||{}).sequence }}"
title="Go to the previous sequence"
>
<v-icon dense>mdi-chevron-left</v-icon>
</v-btn>
</template>
<span class="d-none d-lg-inline">
{{
$route.params.sequence
@@ -31,20 +47,40 @@
: ""
}}
</span>
<template v-if="$route.params.sequence">
<v-btn icon small
:disabled="sequenceIndex==0"
:to="{name: 'logBySequence', params: { sequence: (sequences[sequenceIndex-1]||{}).sequence }}"
title="Go to the next sequence"
>
<v-icon dense>mdi-chevron-right</v-icon>
</v-btn>
<v-btn icon small class="mr-1"
:disabled="sequenceIndex==0"
:to="{name: 'logBySequence', params: { sequence: (sequences[0]||{}).sequence }}"
title="Go to the last sequence"
>
<v-icon dense>mdi-chevron-double-right</v-icon>
</v-btn>
</template>
<a v-if="$route.params.sequence"
class="mr-3"
:href="`/projects/${$route.params.project}/sequences/${$route.params.sequence}`"
title="View the shotlog for this sequence"
>
<v-icon
right
color="teal"
>mdi-format-list-numbered</v-icon>
</a>
</v-toolbar-title>
<a v-if="$route.params.sequence"
class="mr-3"
:href="`/projects/${$route.params.project}/sequences/${$route.params.sequence}`"
title="View the shotlog for this sequence"
>
<v-icon
right
color="teal"
>mdi-format-list-numbered</v-icon>
</a>
<dougal-event-edit v-if="writeaccess"
<dougal-event-edit v-if="$parent.writeaccess()"
v-model="eventDialog"
v-bind="editedEvent"
:available-labels="userLabels"
@@ -54,7 +90,7 @@
>
</dougal-event-edit>
<dougal-event-edit-labels v-if="writeaccess"
<dougal-event-edit-labels v-if="$parent.writeaccess()"
v-model="eventLabelsDialog"
:labels="userLabels"
:selected="contextMenuItem ? contextMenuItem.labels||[] : []"
@@ -171,7 +207,7 @@
<v-card-text>
<!-- BEGIN Context menu for log entries -->
<v-menu v-if="writeaccess"
<v-menu v-if="$parent.writeaccess()"
v-model="contextMenuShow"
:position-x="contextMenuX"
:position-y="contextMenuY"
@@ -325,7 +361,13 @@
@click="labelSearch=label"
>{{label}}</v-chip>
</span>
<dougal-event-edit-history v-if="entry.has_edits && writeaccess"
<v-icon v-if="entry.meta.auto || entry.meta.author"
x-small
left
color="primary"
:title="entry.meta.author?`Automatic event by ${entry.meta.author}`:'Automatic event'"
>mdi-robot</v-icon>
<dougal-event-edit-history v-if="entry.has_edits && $parent.writeaccess()"
:id="entry.id"
:disabled="eventsLoading"
:labels="labels"
@@ -488,17 +530,6 @@ export default {
rows () {
const rows = {};
this.items
.filter(i => {
return !this.$route.params.sequence || (this.$route.params.sequence == i.sequence)
})
.filter(i => {
for (const label of this.filterableLabels) {
if (!this.shownLabels.includes(label) && i.labels.includes(label)) {
return false;
}
}
return true;
})
.forEach(i => {
const key = (i.sequence && i.point) ? (i.sequence+"@"+i.point) : i.tstamp;
if (!rows[key]) {
@@ -529,6 +560,10 @@ export default {
.sort( (a, b) => b[1]-a[1] );
},
filteredLabels () {
return this.filterableLabels.filter( label => !this.shownLabels.includes(label) );
},
presetRemarks () {
return this.projectConfiguration?.events?.presetRemarks ?? [];
},
@@ -541,7 +576,17 @@ export default {
}
},
...mapGetters(['user', 'writeaccess', 'eventsLoading', 'online', 'sequence', 'line', 'point', 'position', 'timestamp', 'lineName', 'events', 'labels', 'userLabels', 'projectConfiguration']),
sequenceIndex () {
if ("sequence" in this.$route.params) {
const index = this.sequences.findIndex( i => i.sequence == this.$route.params.sequence );
if (index != -1) {
return index;
}
}
// return undefined
},
...mapGetters(['user', 'eventsLoading', 'online', 'sequence', 'sequences', 'line', 'point', 'position', 'timestamp', 'lineName', 'events', 'labels', 'userLabels', 'projectConfiguration']),
...mapState({projectSchema: state => state.project.projectSchema})
},
@@ -549,6 +594,7 @@ export default {
watch: {
options: {
async handler () {
this.savePrefs(),
await this.fetchEvents();
},
deep: true
@@ -567,12 +613,19 @@ export default {
},
filter (newVal, oldVal) {
this.savePrefs();
if (newVal?.toLowerCase() != oldVal?.toLowerCase()) {
this.fetchEvents();
}
},
labelSearch () {
this.savePrefs();
this.fetchEvents();
},
filteredLabels () {
this.savePrefs()
this.fetchEvents();
},
@@ -581,7 +634,7 @@ export default {
},
user (newVal, oldVal) {
this.itemsPerPage = Number(localStorage.getItem(`dougal/prefs/${this.user?.name}/${this.$route.params.project}/${this.$options.name}/items-per-page`)) || 25;
this.loadPrefs();
}
},
@@ -632,8 +685,10 @@ export default {
async fetchEvents (opts = {}) {
const options = {
sequence: this.$route.params.sequence,
text: this.filter,
label: this.labelSearch,
excludeLabels: this.filteredLabels,
...this.options
};
const res = await this.getEvents([this.$route.params.project, options]);
@@ -737,6 +792,13 @@ export default {
if (event.id) {
const id = event.id;
delete event.id;
// If this is an edit, ensure that it is *either*
// a timestamp event or a sequence + point one.
if (event.sequence && event.point && event.tstamp) {
delete event.tstamp;
}
this.putEvent(id, event, callback); // No await
} else {
this.postEvent(event, callback); // No await
@@ -829,7 +891,7 @@ export default {
viewOnMap(item) {
if (item?.meta && item.meta?.geometry?.type == "Point") {
const [ lon, lat ] = item.meta.geometry.coordinates;
return `map#15/${lon.toFixed(6)}/${lat.toFixed(6)}`;
return `map#z15x${lon.toFixed(6)}y${lat.toFixed(6)}::${lon.toFixed(6)},${lat.toFixed(6)}`;
} else if (item?.items) {
return this.viewOnMap(item.items[0]);
}
@@ -864,10 +926,36 @@ export default {
*/
},
getPrefsKey () {
return `dougal/prefs/${this.user?.name}/${this.$route.params.project}/Log/v1`;
},
savePrefs () {
const prefs = {
shownLabels: this.shownLabels,
labelSearch: this.labelSearch,
filter: this.filter,
options: this.options
};
localStorage.setItem(this.getPrefsKey(), JSON.stringify(prefs));
},
loadPrefs () {
const stored = localStorage.getItem(this.getPrefsKey());
if (stored) {
const prefs = JSON.parse(stored);
if (prefs.shownLabels !== undefined) this.shownLabels = prefs.shownLabels;
if (prefs.labelSearch !== undefined) this.labelSearch = prefs.labelSearch;
if (prefs.filter !== undefined) this.filter = prefs.filter;
if (prefs.options !== undefined) this.options = prefs.options;
}
},
...mapActions(["api", "showSnack", "refreshEvents", "getEvents"])
},
async mounted () {
this.loadPrefs();
this.fetchEvents();
window.addEventListener('keyup', this.handleKeyboardEvent);

View File

@@ -81,6 +81,13 @@ export default {
await this.logout();
await this.login(this.credentials);
if (this.user) {
console.log("Login successful");
// Should trigger auto-refresh over ws as well as authenticating the
// user over ws.
this.$root.sendJwt();
}
if (this.user && !this.user.autologin) {
this.$router.replace("/");
} else {

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,761 @@
<script>
// Important info about performance:
// https://deck.gl/docs/developer-guide/performance#supply-attributes-directly
import { Deck, WebMercatorViewport, FlyToInterpolator, CompositeLayer } from '@deck.gl/core';
import { GeoJsonLayer, LineLayer, PathLayer, BitmapLayer, ScatterplotLayer, ColumnLayer, IconLayer } from '@deck.gl/layers';
import {HeatmapLayer} from '@deck.gl/aggregation-layers';
import { TileLayer, MVTLayer, TripsLayer } from '@deck.gl/geo-layers';
import { SimpleMeshLayer } from '@deck.gl/mesh-layers';
import { OBJLoader } from '@loaders.gl/obj';
//import { json } from 'd3-fetch';
import * as d3a from 'd3-array';
import { DougalBinaryBundle, DougalBinaryChunkSequential, DougalBinaryChunkInterleaved } from '@dougal/binary';
import { DougalShotLayer } from '@/lib/deck.gl';
import { DougalSequenceLayer, DougalEventsLayer } from '@/lib/deck.gl';
import DougalBinaryLoader from '@/lib/deck.gl/DougalBinaryLoader';
import { colors } from 'vuetify/lib'
function hexToArray (hex, defaultValue = [ 0xc0, 0xc0, 0xc0, 0xff ]) {
if (typeof hex != "string" || hex.length < 6) {
return defaultValue;
}
if (hex[0] == "#") {
hex = hex.slice(1); // remove the '#' character
}
return [
parseInt(hex.slice(0, 2), 16),
parseInt(hex.slice(2, 4), 16),
parseInt(hex.slice(4, 6), 16),
hex.length > 6 ? parseInt(hex.slice(6, 8), 16) : 255
];
}
function namedColourToArray (name) {
const parts = name.split(/\s+/).map( (s, i) =>
i
? s.replace("-", "")
: s.replace(/-([a-z])/g, (match, group1) => group1.toUpperCase())
);
parts[0]
if (parts.length == 1) parts[1] = "base";
const hex = parts.reduce((acc, key) => acc[key], colors);
return hexToArray(hex);
}
export default {
name: "MapLayersMixin",
data () {
return {
COLOUR_SCALE_1: [
// negative
[65, 182, 196],
[127, 205, 187],
[199, 233, 180],
[237, 248, 177],
// positive
[255, 255, 204],
[255, 237, 160],
[254, 217, 118],
[254, 178, 76],
[253, 141, 60],
[252, 78, 42],
[227, 26, 28],
[189, 0, 38],
[128, 0, 38]
]
};
},
methods: {
normalisedColourScale(v, scale = this.COLOUR_SCALE_1, min = 0, max = 1) {
const range = max-min;
const i = Math.min(scale.length, Math.max(Math.round((v-min) / range * scale.length), 0));
//console.log(`v=${v}, scale.length=${scale.length}, min=${min}, max=${max}, i=${i}, → ${scale[i]}`);
return scale[i];
},
makeDataFromBinary ( {positions, values, udv} ) {
const totalCount = positions.length / 2;
const attributes = {
getPosition: {
value: positions,
type: 'float32',
size: 2
},
udv
};
values.forEach((valArray, k) => {
let value = valArray;
if (valArray instanceof BigUint64Array) {
value = Float64Array.from(valArray, v => Number(v));
}
attributes[`value${k}`] = {
value,
type: value instanceof Float64Array ? 'float64' :
value instanceof Uint16Array ? 'uint16' :
value instanceof Uint32Array ? 'uint32' : 'float32',
size: 1
};
});
return {
length: totalCount,
attributes
};
},
loadOptions (options = {}) {
return {
loadOptions: {
fetch: {
method: 'GET',
headers: {
'Authorization': `Bearer ${this.$store.getters.jwt}`,
}
},
...options
},
};
},
osmLayer (options = {}) {
return new TileLayer({
id: "osm",
// https://wiki.openstreetmap.org/wiki/Slippy_map_tilenames#Tile_servers
data: 'https://c.tile.openstreetmap.org/{z}/{x}/{y}.png',
minZoom: 0,
maxZoom: 19,
tileSize: 256,
renderSubLayers: props => {
const {
bbox: {west, south, east, north}
} = props.tile;
return new BitmapLayer(props, {
data: null,
image: props.data,
bounds: [west, south, east, north]
});
},
...options
})
},
// OSM tiles layer. Handy to make water transparent
// but not super reliable yet
osmVectorLayer (options = {}) {
return new MVTLayer({
id: 'osm',
data: 'https://vector.openstreetmap.org/shortbread_v1/{z}/{x}/{y}.mvt',
minZoom: 0,
maxZoom: 14,
getFillColor: feature => {
const layer = feature.properties.layerName;
//console.log("layer =", layer, feature.properties.kind);
switch (layer) {
case "ocean":
return [0, 0, 0, 0];
case "land":
return [ 0x54, 0x6E, 0x7A, 255 ];
default:
return [ 240, 240, 240, 255 ];
}
},
getLineColor: feature => {
if (feature.properties.layer === 'water') {
return [0, 0, 0, 0]; // No outline for water
}
return [192, 192, 192, 255]; // Default line color for roads, etc.
},
getLineWidth: feature => {
if (feature.properties.highway) {
return feature.properties.highway === 'motorway' ? 6 : 3; // Example road widths
}
return 1;
},
stroked: true,
filled: true,
pickable: true
});
},
openSeaMapLayer (options = {}) {
return new TileLayer({
id: "sea",
data: 'https://tiles.openseamap.org/seamark/{z}/{x}/{y}.png',
minZoom: 0,
maxZoom: 19,
tileSize: 256,
renderSubLayers: props => {
const {
bbox: {west, south, east, north}
} = props.tile;
return new BitmapLayer(props, {
data: null,
image: props.data,
bounds: [west, south, east, north]
});
},
...options
})
},
// Norwegian nautical charts
// As of 2025, not available for some weird reason
nauLayer (options = {}) {
return new TileLayer({
id: "nau",
// https://wiki.openstreetmap.org/wiki/Slippy_map_tilenames#Tile_servers
data: 'https://opencache.statkart.no/gatekeeper/gk/gk.open_gmaps?layers=sjokartraster&zoom={z}&x={x}&y={y}',
minZoom: 0,
maxZoom: 19,
tileSize: 256,
renderSubLayers: props => {
const {
bbox: {west, south, east, north}
} = props.tile;
return new BitmapLayer(props, {
data: null,
image: props.data,
bounds: [west, south, east, north]
});
},
...options
})
},
vesselTrackPointsLayer (options = {}) {
if (!this.vesselPosition) return;
return new SimpleMeshLayer({
id: 'navp',
data: [ this.vesselPosition ],
//getColor: [ 255, 48, 0 ],
getColor: [ 174, 1, 174 ],
getOrientation: d => [0, (270 - (d.heading ?? d.cmg ?? d.bearing ?? d.lineBearing ?? 0)) % 360 , 0],
getPosition: d => [ d.x, d.y ],
mesh: `/assets/boat0.obj`,
sizeScale: 0.1,
loaders: [OBJLoader],
pickable: true,
...options
});
},
vesselTrackLinesLayer (options = {}) {
const cfg = this.vesselTrackPeriodSettings[this.vesselTrackPeriod];
let ts1 = new Date(this.vesselTrackLastRefresh*1000);
let ts0 = new Date(ts1.valueOf() - cfg.offset);
let di = cfg.decimation;
let l = 10000;
const breakLimit = (di ? di*20 : 5 * 60) * 1000;
let trailLength = (ts1 - ts0) / 1000;
return new TripsLayer({
id: 'navl',
data: `/api/vessel/track/?di=${di}&l=${l}&project=&ts0=${ts0.toISOString()}&ts1=${ts1.toISOString()}`,
...this.loadOptions({
fetch: {
method: 'GET',
headers: {
Authorization: `Bearer ${this.$store.getters.jwt}`,
}
}
}),
dataTransform: (data) => {
if (data.length >= l) {
console.warn(`Vessel track data may be truncated! Limit: ${l}`);
}
const paths = [];
let prevTstamp;
paths.push({path: [], timestamps: [], num: 0, ts0: +Infinity, ts1: -Infinity});
for (const el of data) {
const tstamp = new Date(el.tstamp).valueOf();
const curPath = () => paths[paths.length-1];
if (prevTstamp && Math.abs(tstamp - prevTstamp) > breakLimit) {
// Start a new path
console.log(`Breaking path on interval ${Math.abs(tstamp - prevTstamp)} > ${breakLimit}`);
paths.push({path: [], timestamps: [], num: paths.length, ts0: +Infinity, ts1: -Infinity});
}
if (tstamp < curPath().ts0) {
curPath().ts0 = tstamp;
}
if (tstamp > curPath().ts1) {
curPath().ts1 = tstamp;
}
curPath().path.push([el.x, el.y]);
curPath().timestamps.push(tstamp/1000);
prevTstamp = tstamp;
}
paths.forEach (path => {
path.nums = paths.length;
path.ts0 = new Date(path.ts0);
path.ts1 = new Date(path.ts1);
});
return paths;
},
getPath: d => d.path,
getTimestamps: d => d.timestamps,
currentTime: ts1.valueOf() / 1000,
trailLength,
widthUnits: "meters",
widthMinPixels: 4,
getWidth: 10,
getColor: [ 174, 1, 126, 200 ],
stroked: true,
pickable: true,
...options
});
},
eventsLogLayer (options = {}) {
const labelColour = (d, i, t, c = [127, 65, 90]) => {
const label = d?.properties?.labels?.[0];
const colour = this.labels[label]?.view?.colour ?? "#cococo";
if (colour) {
if (colour[0] == "#") {
c = hexToArray(colour);
} else {
c = namedColourToArray(colour);
}
} else {
//return [127, 65, 90];
}
if (t != null) {
c[3] = t;
}
return c;
};
return new DougalEventsLayer({
id: 'log',
data: `/api/project/${this.$route.params.project}/event?mime=application/geo%2Bjson`,
...this.loadOptions(),
lineWidthMinPixels: 2,
getPosition: d => d.geometry.coordinates,
jitter: 0.00015,
getElevation: d => Math.min(Math.max(d.properties.remarks?.length || 10, 10), 200),
getFillColor: (d, i) => labelColour(d, i, 200),
getLineColor: (d, i) => labelColour(d, i, 200),
radius: 0.001,
radiusScale: 1,
// This just won't work with radiusUnits = "pixels".
// See: https://grok.com/share/c2hhcmQtMw%3D%3D_16578be4-20fd-4000-a765-f082503d0495
radiusUnits: "pixels",
radiusMinPixels: 1.5,
radiusMaxPixels: 2.5,
pickable: true,
...options
})
},
preplotSaillinesLinesLayer (options = {}) {
return new GeoJsonLayer({
id: 'psll',
data: `/api/project/${this.$route.params.project}/gis/preplot/line?class=V&v=${this.lineTStamp?.valueOf()}`,
...this.loadOptions(),
lineWidthMinPixels: 1,
getLineColor: (d) => d.properties.ntba ? [240, 248, 255, 200] : [85, 170, 255, 200],
getLineWidth: 1,
getPointRadius: 2,
radiusUnits: "pixels",
pointRadiusMinPixels: 2,
pickable: true,
...options
})
},
preplotLinesLayer (options = {}) {
return new GeoJsonLayer({
id: 'ppll',
data: `/api/project/${this.$route.params.project}/gis/preplot/line?v=${this.lineTStamp?.valueOf()}`,
...this.loadOptions(),
lineWidthMinPixels: 1,
getLineColor: (d) => d.properties.ntba ? [240, 248, 255, 200] : [85, 170, 255, 200],
getLineWidth: 1,
getPointRadius: 2,
radiusUnits: "pixels",
pointRadiusMinPixels: 2,
pickable: true,
...options
})
},
plannedLinesLinesLayer (options = {}) {
return new PathLayer({
id: 'planl',
data: [...this.plannedSequences], // Create new array to trigger Deck.gl update
dataTransform: (sequences) => {
// Raise the data 10 m above ground so that it's visible over heatmaps, etc.
return sequences.map( seq => ({
...seq,
geometry: {
...seq.geometry,
coordinates: seq.geometry.coordinates.map( pos => [...pos, 10] )
}
}))
},
getPath: d => d.geometry.coordinates,
//getSourcePosition: d => d.geometry.coordinates[0],
//getTargetPosition: d => d.geometry.coordinates[1],
widthUnits: "meters",
widthMinPixels: 4,
getWidth: 25,
//getLineWidth: 10,
getColor: (d) => {
const k = (d?.azimuth??0)/360*255;
return [ k, 128, k, 200 ];
},
stroked: true,
pickable: true,
...options
});
},
rawSequencesLinesLayer (options = {}) {
return new GeoJsonLayer({
id: 'seqrl',
data: `/api/project/${this.$route.params.project}/gis/raw/line?v=${this.sequenceTStamp?.valueOf()}`,
...this.loadOptions(),
lineWidthMinPixels: 1,
getLineColor: (d) => d.properties.ntbp ? [0xe6, 0x51, 0x00, 200] : [0xff, 0x98, 0x00, 200],
getLineWidth: 1,
getPointRadius: 2,
radiusUnits: "pixels",
pointRadiusMinPixels: 2,
pickable: true,
...options
})
},
finalSequencesLinesLayer (options = {}) {
return new GeoJsonLayer({
id: 'seqfl',
data: `/api/project/${this.$route.params.project}/gis/final/line?v=${this.sequenceTStamp?.valueOf()}`,
...this.loadOptions(),
lineWidthMinPixels: 1,
getLineColor: (d) => d.properties.pending ? [0xa7, 0xff, 0xab, 200] : [0x00, 0x96, 0x88, 200],
getLineWidth: 1,
getPointRadius: 2,
radiusUnits: "pixels",
pointRadiusMinPixels: 2,
pickable: true,
...options
})
},
preplotSaillinesPointLayer (options = {}) {
return new DougalSequenceLayer({
id: 'pslp',
data: `/api/project/${this.$route.params.project}/line/sail?v=${this.lineTStamp?.valueOf()}`, // API endpoint returning binary data
loaders: [DougalBinaryLoader],
...this.loadOptions({
fetch: {
method: 'GET',
headers: {
Authorization: `Bearer ${this.$store.getters.jwt}`,
Accept: 'application/vnd.aaltronav.dougal+octet-stream'
}
}
}),
getRadius: 2,
getFillColor: (d, {data, index}) => data.attributes.value2.value[index] ? [240, 248, 255, 200] : [85, 170, 255, 200],
//getFillColor: [0, 120, 220, 200],
pickable: true,
...options
});
},
preplotPointsLayer (options = {}) {
return new DougalSequenceLayer({
id: 'pplp',
data: `/api/project/${this.$route.params.project}/line/source?v=${this.lineTStamp?.valueOf()}`, // API endpoint returning binary data
loaders: [DougalBinaryLoader],
...this.loadOptions({
fetch: {
method: 'GET',
headers: {
Authorization: `Bearer ${this.$store.getters.jwt}`,
Accept: 'application/vnd.aaltronav.dougal+octet-stream'
}
}
}),
getRadius: 2,
getFillColor: (d, {data, index}) => data.attributes.value2.value[index] ? [240, 248, 255, 200] : [85, 170, 255, 200],
//getFillColor: [0, 120, 220, 200],
pickable: true,
...options
});
},
plannedLinesPointsLayer (options = {}) {
},
rawSequencesPointsLayer (options = {}) {
return new DougalSequenceLayer({
id: 'seqrp',
data: this.makeDataFromBinary(this.sequenceBinaryData),
getRadius: 2,
getFillColor: [0, 120, 220, 200],
pickable: true,
...options
});
},
finalSequencesPointsLayer (options = {}) {
return new DougalSequenceLayer({
id: 'seqfp',
data: this.makeDataFromBinary(this.sequenceBinaryDataFinal),
getRadius: 2,
getFillColor: [220, 120, 0, 200],
pickable: true,
...options
});
},
heatmapLayer(options = {}) {
const { positions, values } = this.heatmapValue?.startsWith("co_")
? this.sequenceBinaryDataFinal
: this.sequenceBinaryData;
if (!positions?.length || !values?.length) {
console.warn('No valid data for heatmapLayer');
return new HeatmapLayer({
id: 'seqrh',
data: [],
...options
});
}
let weights, offset = 0, scaler = 1;
let colorDomain = null;
let aggregation = "MEAN";
let transform = (v) => v;
switch (this.heatmapValue) {
case "total_error":
weights = Float32Array.from(values[3], (ei, i) => {
const ej = values[4][i];
return Math.sqrt(ei * ei + ej * ej) / 100; // Euclidean distance in meters
});
colorDomain = [2, 20]; // scale: 1 (already divided by 100 above)
break;
case "delta_i":
weights = values[3];
scaler = 0.1;
colorDomain = [100, 1200]; // scale: 100 (1 12 m)
break;
case "delta_j":
weights = values[4];
scaler = 0.1;
colorDomain = [10, 80]; // scale: 100 (0.1 0.8 m)
break;
case "co_total_error":
weights = Float32Array.from(values[3], (ei, i) => {
const ej = values[4][i];
return Math.sqrt(ei * ei + ej * ej) / 100; // Euclidean distance in meters
});
colorDomain = [10, 150]; // Scale: 100 (0.1 1 m)
break;
case "co_delta_i":
weights = values[5];
scaler = 0.1;
colorDomain = [10, 150];
break;
case "co_delta_j":
weights = values[6];
scaler = 0.1;
colorDomain = [0.2, 2];
break;
case "delta_μ":
weights = values[5];
scaler = 0.1;
break;
case "delta_σ":
weights = values[6];
scaler = 0.1;
colorDomain = [ 0.1, 1.5 ];
break;
case "delta_R":
weights = values[7];
scaler = 0.1;
colorDomain = [ 0.5, 1.0 ];
break;
case "press_μ":
weights = values[8];
offset = -2000;
colorDomain = [ 5, 50 ];
break;
case "press_σ":
weights = values[9];
colorDomain = [ 1, 19 ];
break;
case "press_R":
weights = values[10];
colorDomain = [ 3, 50 ];
break;
case "depth_μ":
weights = values[11];
offset = -6;
scaler = 0.1;
colorDomain = [ 0.1, 1 ];
break;
case "depth_σ":
weights = values[12];
scaler = 0.1;
break;
case "depth_R":
weights = values[13];
scaler = 0.1;
break;
case "fill_μ":
weights = values[14];
colorDomain = [ 300, 1000 ];
break;
case "fill_σ":
weights = values[15];
offset = -250;
colorDomain = [ 0, 250 ];
break;
case "fill_R":
weights = values[16];
offset = -500;
colorDomain = [ 0, 500 ];
break;
case "delay_μ":
weights = values[17];
offset = -150;
colorDomain = [ 1.5, 25 ];
//transform = (v) => {console.log("τ(v)", v); return v;};
break;
case "delay_σ":
weights = values[18];
break;
case "delay_R":
weights = values[19];
break;
case "no_fire":
weights = values[20];
transform = v => v >> 4;
aggregation = "SUM";
colorDomain = [ 0.1, 1.5 ];
break;
case "autofire":
weights = values[20];
transform = v => v & 0xf;
aggregation = "SUM";
colorDomain = [ 0.5, 1.5 ];
break;
case "misfire":
weights = values[20];
aggregation = "SUM";
colorDomain = [ 0.5, 1.5 ];
break;
}
const stats = {
min: d3a.min(weights),
mode: d3a.mode(weights),
mean: d3a.mean(weights),
max: d3a.max(weights),
sd: d3a.deviation(weights),
};
const sr0 = [ stats.mean - 2.1*stats.sd, stats.mean + 2.1*stats.sd ];
const sr1 = [ stats.mode - 2.1*stats.sd, stats.mode + 2.1*stats.sd ];
/*
console.log('Positions sample:', positions.slice(0, 10));
console.log('Weights sample:', weights.slice(0, 10));
console.log("Mode:", this.heatmapValue);
console.log('Weight stats:', stats);
console.log("Suggested ranges");
console.log(sr0);
console.log(sr1);
console.log("Actual ranges");
console.log(colorDomain);
*/
return new HeatmapLayer({
id: 'seqrh',
data: {
length: weights.length,
positions,
weights
/*
attributes: {
getPosition: { value: positions, type: 'float32', size: 2 },
getWeight: { value: weights, type: 'float32', size: 1 }
}
*/
},
getPosition: (d, {index, data}) => [ data.positions[index*2], data.positions[index*2+1] ],
getWeight: (d, {index, data}) => transform(Math.abs(data.weights[index] * scaler + offset)),
colorDomain,
radiusPixels: 25,
aggregation,
pickable: false,
...options
});
},
},
}
</script>

Some files were not shown because too many files have changed in this diff Show More