mirror of
https://gitlab.com/wgp/dougal/software.git
synced 2025-12-06 09:57:09 +00:00
Compare commits
733 Commits
v3
...
a8ff7f3b52
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a8ff7f3b52 | ||
|
|
15b62ff581 | ||
|
|
ade86be556 | ||
|
|
53594416a7 | ||
|
|
ff4b4a9c90 | ||
|
|
5842940d3b | ||
|
|
df6f1b2d32 | ||
|
|
c39afc1f3c | ||
|
|
a68000eac6 | ||
|
|
87aa78af00 | ||
|
|
3b9061aeae | ||
|
|
57dae4c755 | ||
|
|
b1344bebd8 | ||
|
|
3e91ccba8d | ||
|
|
fa0be9c0b7 | ||
|
|
dcbf5496f6 | ||
|
|
8007f46e37 | ||
|
|
4a7683cfd0 | ||
|
|
565a9d7e01 | ||
|
|
b07244c823 | ||
|
|
c909edc41f | ||
|
|
41ef511123 | ||
|
|
4196e9760b | ||
|
|
6b6f5ab511 | ||
|
|
7d8c78648d | ||
|
|
faf7e9c98f | ||
|
|
abf2709705 | ||
|
|
f5dfafd85a | ||
|
|
cf8b0937d9 | ||
|
|
d737f5d676 | ||
|
|
5fe19da586 | ||
|
|
0af0cf4b42 | ||
|
|
ccb8205d26 | ||
|
|
9b3fffdcfc | ||
|
|
dea1e9ee0d | ||
|
|
d45ec767ec | ||
|
|
67520ffc48 | ||
|
|
22a296ba26 | ||
|
|
f89435d80f | ||
|
|
a3f1dd490c | ||
|
|
2fcfcb4f84 | ||
|
|
b60db7e7ef | ||
|
|
4bb087fff7 | ||
|
|
15af5effc3 | ||
|
|
b5c6d04e62 | ||
|
|
571c5a8bca | ||
|
|
c45982829c | ||
|
|
f3958b37b7 | ||
|
|
58374adc68 | ||
|
|
32aea8a5ed | ||
|
|
023b65285f | ||
|
|
a320962669 | ||
|
|
0c0067b8d9 | ||
|
|
ef8466992c | ||
|
|
8e4e70cbdc | ||
|
|
4dadffbbe7 | ||
|
|
24dcebd0d9 | ||
|
|
12a762f44f | ||
|
|
ebf13abc28 | ||
|
|
b3552db02f | ||
|
|
cd882c0611 | ||
|
|
6fc9c020a4 | ||
|
|
75284322f1 | ||
|
|
e849c47f01 | ||
|
|
387d20a4f0 | ||
|
|
2fab06d340 | ||
|
|
7d2fb5558a | ||
|
|
764e2cfb23 | ||
|
|
bf1af1f76c | ||
|
|
09e4cd2467 | ||
|
|
2009d73a2b | ||
|
|
083ee812de | ||
|
|
84510e8dc9 | ||
|
|
7205ec42a8 | ||
|
|
73d85ef81f | ||
|
|
6c4dc35461 | ||
|
|
a5ebff077d | ||
|
|
2a894692ce | ||
|
|
25690eeb52 | ||
|
|
3f9776b61d | ||
|
|
8c81daefc0 | ||
|
|
c173610e87 | ||
|
|
301e5c0731 | ||
|
|
48d9f45fe0 | ||
|
|
cd23a78592 | ||
|
|
e368183bf0 | ||
|
|
02477b071b | ||
|
|
6651868ea7 | ||
|
|
c0b52a8245 | ||
|
|
90ce6f063e | ||
|
|
b2fa0c3d40 | ||
|
|
83ecaad4fa | ||
|
|
1c5fd2e34d | ||
|
|
aabcc74891 | ||
|
|
2a7b51b995 | ||
|
|
5d19ca7ca7 | ||
|
|
910195fc0f | ||
|
|
6e5570aa7c | ||
|
|
595c20f504 | ||
|
|
40d0038d80 | ||
|
|
acdf118a67 | ||
|
|
b9e0975d3d | ||
|
|
39d9c9d748 | ||
|
|
b8b25dcd62 | ||
|
|
db97382758 | ||
|
|
ae8e5d4ef6 | ||
|
|
2c1a24e4a5 | ||
|
|
0b83187372 | ||
|
|
3dd51c82ea | ||
|
|
17e6564e70 | ||
|
|
3a769e7fd0 | ||
|
|
7dde0a15c6 | ||
|
|
2872af8d60 | ||
|
|
4e581d5664 | ||
|
|
a188e9a099 | ||
|
|
cd6ad92d5c | ||
|
|
08dfe7ef0a | ||
|
|
6a5238496e | ||
|
|
bc237cb685 | ||
|
|
4957142fb1 | ||
|
|
5a19c81ed1 | ||
|
|
b583dc6c02 | ||
|
|
134e3bce4e | ||
|
|
f5ad9d7182 | ||
|
|
07874ffe0b | ||
|
|
695add5da6 | ||
|
|
6a94287cba | ||
|
|
c2ec2970f0 | ||
|
|
95d6d0054b | ||
|
|
5070be5ff3 | ||
|
|
d5e77bc946 | ||
|
|
f6faad17db | ||
|
|
94cdf83b13 | ||
|
|
6a788ae28b | ||
|
|
544117eec3 | ||
|
|
e5679ec14b | ||
|
|
a1c174994c | ||
|
|
2db8cc3116 | ||
|
|
99b1a841c5 | ||
|
|
6629e25644 | ||
|
|
7f5f64acb1 | ||
|
|
8f87df1e2f | ||
|
|
8399782409 | ||
|
|
9c86018653 | ||
|
|
a15c97078b | ||
|
|
d769ec48dd | ||
|
|
fe421f545c | ||
|
|
caa8fec8cc | ||
|
|
49fc260ace | ||
|
|
b7038f542c | ||
|
|
40ad0e7650 | ||
|
|
9006deb8be | ||
|
|
6e19b8e18f | ||
|
|
3d474ad8f8 | ||
|
|
821af18f29 | ||
|
|
9cf15ce9dd | ||
|
|
78838cbc41 | ||
|
|
8855da743b | ||
|
|
c67a60a7e6 | ||
|
|
81e06930f0 | ||
|
|
0263eab6d1 | ||
|
|
931219850e | ||
|
|
12369d5419 | ||
|
|
447003c3b5 | ||
|
|
be7157b62c | ||
|
|
8ef56f9946 | ||
|
|
f2df16fe55 | ||
|
|
96db6b1376 | ||
|
|
36d86c176a | ||
|
|
9c38af4bc0 | ||
|
|
be5c6f1fa3 | ||
|
|
17b9d60715 | ||
|
|
e2dd563054 | ||
|
|
67dcc2922b | ||
|
|
11e84f47eb | ||
|
|
1066a03b25 | ||
|
|
08440e3e21 | ||
|
|
d46eb3b455 | ||
|
|
864b430320 | ||
|
|
61cbefd0e9 | ||
|
|
29c484affa | ||
|
|
0806b80445 | ||
|
|
b5a3a22892 | ||
|
|
c13aa23e2f | ||
|
|
3366377ab0 | ||
|
|
59a90e352c | ||
|
|
0f207f8c2d | ||
|
|
c97eaa64f5 | ||
|
|
5b82f8540d | ||
|
|
d977d9c40b | ||
|
|
d16fb41f24 | ||
|
|
c376896ea6 | ||
|
|
2bcdee03d5 | ||
|
|
44113c89c0 | ||
|
|
17c6d9d1e5 | ||
|
|
06cc16721f | ||
|
|
af7485370c | ||
|
|
ad013ea642 | ||
|
|
48d5986415 | ||
|
|
471f4e8e64 | ||
|
|
4be99370e6 | ||
|
|
e464f5f887 | ||
|
|
cc8d790ad8 | ||
|
|
32c6e2c79f | ||
|
|
ba7221ae10 | ||
|
|
1cb9d4b1e2 | ||
|
|
2a0025cdbf | ||
|
|
f768f31b62 | ||
|
|
9f91b1317f | ||
|
|
3b69a15703 | ||
|
|
cd3bd8ab79 | ||
|
|
df193a99cd | ||
|
|
580e94a591 | ||
|
|
3413641c10 | ||
|
|
f092aff015 | ||
|
|
94c6406ea2 | ||
|
|
244d84a3bd | ||
|
|
89c565a0f5 | ||
|
|
31ac8d3c01 | ||
|
|
3bb78040b0 | ||
|
|
1433bda14e | ||
|
|
c0ae033de8 | ||
|
|
05eed7ef26 | ||
|
|
5d2ca513a6 | ||
|
|
b9c8069828 | ||
|
|
b80b8ffb52 | ||
|
|
c2eb82ffe7 | ||
|
|
e517e2f771 | ||
|
|
0afd54447f | ||
|
|
e6004dd62f | ||
|
|
f623954399 | ||
|
|
f8d882da5d | ||
|
|
808c9987af | ||
|
|
4db6d8dd7a | ||
|
|
9a47977f5f | ||
|
|
a58cce8565 | ||
|
|
5487a3a49b | ||
|
|
731778206c | ||
|
|
08e65b512d | ||
|
|
9b05388113 | ||
|
|
1b44389a1a | ||
|
|
0b3711b759 | ||
|
|
5a523d4941 | ||
|
|
122951e3a2 | ||
|
|
90216c12e4 | ||
|
|
9c26909a59 | ||
|
|
0427a3c18c | ||
|
|
c32e6f2b38 | ||
|
|
546d199c52 | ||
|
|
6562de97b9 | ||
|
|
c666a6368e | ||
|
|
d5af6df052 | ||
|
|
0c5ea7f30a | ||
|
|
302642f88d | ||
|
|
48e1369088 | ||
|
|
daa700e7dc | ||
|
|
8db2c8ce25 | ||
|
|
890e48e078 | ||
|
|
11829555cf | ||
|
|
07d8e97f74 | ||
|
|
fc379aba14 | ||
|
|
8cbacb9aa7 | ||
|
|
acb59035e4 | ||
|
|
b7d0ee7da7 | ||
|
|
3a0f720f2f | ||
|
|
6cf6fe29f4 | ||
|
|
6f0f2dadcc | ||
|
|
64fba1adc3 | ||
|
|
3ea82cb660 | ||
|
|
84c1385f88 | ||
|
|
b1b7332216 | ||
|
|
8e7451e17a | ||
|
|
bdeb2b8742 | ||
|
|
ccfabf84f7 | ||
|
|
5d4e219403 | ||
|
|
3b7e4c9f0b | ||
|
|
683f5680b1 | ||
|
|
ce901a03a1 | ||
|
|
f8e5b74c1a | ||
|
|
ec41d26a7a | ||
|
|
386fd59900 | ||
|
|
e47020a21e | ||
|
|
b8f58ac67c | ||
|
|
b3e27ed1b9 | ||
|
|
f5441d186f | ||
|
|
d58bc4d62e | ||
|
|
01d1691def | ||
|
|
bc444fc066 | ||
|
|
989ec84852 | ||
|
|
065f6617af | ||
|
|
825530c1fe | ||
|
|
1ef8eb871f | ||
|
|
2e9c603ab8 | ||
|
|
7f067ff760 | ||
|
|
487c297747 | ||
|
|
cfa771a830 | ||
|
|
3905e6f5d8 | ||
|
|
2657c42dcc | ||
|
|
63e6af545a | ||
|
|
d6fb7404b1 | ||
|
|
8188766a81 | ||
|
|
b7ae657137 | ||
|
|
1295ec2ee3 | ||
|
|
7c6d3fe5ee | ||
|
|
15570e0f3d | ||
|
|
d551e67042 | ||
|
|
6b216f7406 | ||
|
|
a7e02c526b | ||
|
|
55855d66e9 | ||
|
|
ae79d90fef | ||
|
|
c8b2047483 | ||
|
|
d21cde20fc | ||
|
|
10580ea3ec | ||
|
|
25f83d1eb3 | ||
|
|
dc294b5b50 | ||
|
|
b035d3481c | ||
|
|
ca4a14ffd9 | ||
|
|
d77f7f66db | ||
|
|
6b6f545b9f | ||
|
|
bdf62e2d8b | ||
|
|
1895168889 | ||
|
|
8c875ea2f9 | ||
|
|
addbe2d572 | ||
|
|
85f092b9e1 | ||
|
|
eb99d74e4a | ||
|
|
e65afdcaa1 | ||
|
|
0b7e9e1d01 | ||
|
|
9ad17de4cb | ||
|
|
071fd7438b | ||
|
|
9cc21ba06a | ||
|
|
712b20c596 | ||
|
|
8bbe3aee70 | ||
|
|
dc22bb95fd | ||
|
|
0ef2e60d15 | ||
|
|
289d50d9c1 | ||
|
|
3189a06d75 | ||
|
|
9ef551db76 | ||
|
|
e6669026fa | ||
|
|
12082b91a3 | ||
|
|
7db9155899 | ||
|
|
f8692afad3 | ||
|
|
028cab5188 | ||
|
|
fc73fbfb9f | ||
|
|
96a8d3689a | ||
|
|
7a7106e735 | ||
|
|
d5a10ca273 | ||
|
|
e398f2d3cd | ||
|
|
d154e75797 | ||
|
|
af0df23cc4 | ||
|
|
ec26285e53 | ||
|
|
83b3ec5103 | ||
|
|
86aaade428 | ||
|
|
fbb4e1efaf | ||
|
|
73fb7a5053 | ||
|
|
bc5dfe9c2a | ||
|
|
524420d945 | ||
|
|
e48c734ea9 | ||
|
|
5aaad01238 | ||
|
|
90782c1b09 | ||
|
|
4368cb8571 | ||
|
|
40bc1f9293 | ||
|
|
8c6eefed97 | ||
|
|
59971a43fe | ||
|
|
a2a5a783a3 | ||
|
|
d3bdeff8df | ||
|
|
4a2bed257d | ||
|
|
995e0b9f81 | ||
|
|
3488c8bf4d | ||
|
|
7e1023f6e8 | ||
|
|
41e058ac64 | ||
|
|
2086133109 | ||
|
|
bb70cf1a3d | ||
|
|
be0d7b269f | ||
|
|
934b921f69 | ||
|
|
c20b3b64c7 | ||
|
|
8ec918bc7c | ||
|
|
6fa0f8e659 | ||
|
|
a9f93cfd17 | ||
|
|
9785f4541b | ||
|
|
62ab06b4a7 | ||
|
|
c7270febfc | ||
|
|
2dffd93cfe | ||
|
|
867a534910 | ||
|
|
60aaaf9e04 | ||
|
|
b64a99ab19 | ||
|
|
69fce0e0dc | ||
|
|
8dd971ffec | ||
|
|
fd84eb1ebb | ||
|
|
53b4213a05 | ||
|
|
3fbc266809 | ||
|
|
66a758d91f | ||
|
|
6cebf376d0 | ||
|
|
02adbdf530 | ||
|
|
2357381ee6 | ||
|
|
5245e6a135 | ||
|
|
d93b8f8a9c | ||
|
|
8b47fc4753 | ||
|
|
a0b3568a10 | ||
|
|
8895a948cf | ||
|
|
afe04f5693 | ||
|
|
c3a56bf7e2 | ||
|
|
18fcf42bc3 | ||
|
|
ad48ac9998 | ||
|
|
7ab6be5c67 | ||
|
|
2f56d377c5 | ||
|
|
d1c041995d | ||
|
|
399e86be87 | ||
|
|
13f68d7314 | ||
|
|
80de0c1bb0 | ||
|
|
26a487aa47 | ||
|
|
53e7a06a18 | ||
|
|
efe64f0a8c | ||
|
|
313e9687bd | ||
|
|
09fb653812 | ||
|
|
0137bd84d5 | ||
|
|
f82f2c78c7 | ||
|
|
9f1fc3d19c | ||
|
|
873d7cfea7 | ||
|
|
2fa9d99eeb | ||
|
|
12b28cbb8d | ||
|
|
436a9b8289 | ||
|
|
b3dbc0f417 | ||
|
|
6d417a9272 | ||
|
|
b74419f770 | ||
|
|
cae57e2a64 | ||
|
|
cd739e603f | ||
|
|
beeba966dd | ||
|
|
544c4ead76 | ||
|
|
4486fc4afc | ||
|
|
a55d2cc6fc | ||
|
|
402a3f9cce | ||
|
|
1801fdb052 | ||
|
|
be904d8a00 | ||
|
|
2131cdf0c1 | ||
|
|
15242de2d9 | ||
|
|
b4aed52976 | ||
|
|
1b85b5cd4b | ||
|
|
f157f49312 | ||
|
|
3d42ce6fbc | ||
|
|
4595dddc24 | ||
|
|
642f5a7585 | ||
|
|
e7c29ba14c | ||
|
|
d919fb12db | ||
|
|
c21f9c239e | ||
|
|
2fb1c5fdcc | ||
|
|
c6b99563d9 | ||
|
|
76a90df768 | ||
|
|
ea8ea12429 | ||
|
|
7bd2319cd7 | ||
|
|
a9270157ea | ||
|
|
d2f94dbb88 | ||
|
|
1056122fff | ||
|
|
9bd0aca18f | ||
|
|
60932300c1 | ||
|
|
12307b7ae6 | ||
|
|
ceeaa4a8f3 | ||
|
|
3da54f9334 | ||
|
|
4c612ffe0a | ||
|
|
7076b51a25 | ||
|
|
fe5ca06060 | ||
|
|
71467dddf9 | ||
|
|
246f01efbe | ||
|
|
68bf853594 | ||
|
|
4a18cb8a81 | ||
|
|
c615727acf | ||
|
|
2e21526fca | ||
|
|
3709070985 | ||
|
|
2ac963aa4f | ||
|
|
db7b385d66 | ||
|
|
d91a1b1302 | ||
|
|
fa031d5fc9 | ||
|
|
620d5ccf47 | ||
|
|
f0fa2b75d5 | ||
|
|
46bb207dfb | ||
|
|
f7a386d179 | ||
|
|
e4607a095b | ||
|
|
4b0d42390f | ||
|
|
114e41557f | ||
|
|
e605320503 | ||
|
|
6606c7a6c1 | ||
|
|
e3bf671a49 | ||
|
|
3e08dfd45b | ||
|
|
f968cf3b3c | ||
|
|
b148ed2368 | ||
|
|
cb35e340e1 | ||
|
|
6c00f16b7e | ||
|
|
ca8dd68d10 | ||
|
|
656f776262 | ||
|
|
e1b40547f1 | ||
|
|
98021441bc | ||
|
|
4a8d3a99c1 | ||
|
|
7dee457fa1 | ||
|
|
bccac446e5 | ||
|
|
535b3bcc12 | ||
|
|
11e84a7e72 | ||
|
|
5ef55a9d8e | ||
|
|
f53e15df93 | ||
|
|
cf887b7852 | ||
|
|
a917976a3a | ||
|
|
c201229891 | ||
|
|
7ac997cd7d | ||
|
|
08e6c4a2de | ||
|
|
2c21f8f7ef | ||
|
|
a76aefe418 | ||
|
|
8d825fc53b | ||
|
|
b039a5f1fd | ||
|
|
5c1218e95e | ||
|
|
1bb5e2a41d | ||
|
|
1576b121e6 | ||
|
|
a06cdde449 | ||
|
|
121131e910 | ||
|
|
9136e9655d | ||
|
|
c646944886 | ||
|
|
0e664fc095 | ||
|
|
1498891004 | ||
|
|
89cb237f8d | ||
|
|
3386c57670 | ||
|
|
7285de5ec4 | ||
|
|
a95059f5e5 | ||
|
|
1ac81c34ce | ||
|
|
22387ba215 | ||
|
|
b77d41e952 | ||
|
|
aeecb7db7d | ||
|
|
ac9a683135 | ||
|
|
17a58f1396 | ||
|
|
b2a97a1987 | ||
|
|
f684e3e8d6 | ||
|
|
219425245f | ||
|
|
31419e860e | ||
|
|
65481d3086 | ||
|
|
d64a1fcee7 | ||
|
|
2365789d48 | ||
|
|
4c2a2617a1 | ||
|
|
5021888d03 | ||
|
|
bf633f7fdf | ||
|
|
847f49ad7c | ||
|
|
171feb9dd2 | ||
|
|
503a0de12f | ||
|
|
cf89a43f64 | ||
|
|
680e376ed1 | ||
|
|
a26974670a | ||
|
|
16a6cb59dc | ||
|
|
829e206831 | ||
|
|
83244fcd1a | ||
|
|
d9a6c77d0c | ||
|
|
b5aafe42ad | ||
|
|
025f3f774d | ||
|
|
f26e746c2b | ||
|
|
39eaf17121 | ||
|
|
1bb06938b1 | ||
|
|
851369a0b4 | ||
|
|
5065d62443 | ||
|
|
2d1e1e9532 | ||
|
|
051049581a | ||
|
|
da5ae18b0b | ||
|
|
ac9353c101 | ||
|
|
c4c5c44bf1 | ||
|
|
d3659ebf02 | ||
|
|
6b5070e634 | ||
|
|
09ff96ceee | ||
|
|
f231acf109 | ||
|
|
e576e1662c | ||
|
|
6a21ddd1cd | ||
|
|
c1e35b2459 | ||
|
|
eee2a96029 | ||
|
|
6f5e5a4d20 | ||
|
|
9e73cb7e00 | ||
|
|
d7ab4eec7c | ||
|
|
cdd96a4bc7 | ||
|
|
39a21766b6 | ||
|
|
0e33c18b5c | ||
|
|
7f411ac7dd | ||
|
|
ed1da11c9d | ||
|
|
66ec28dd83 | ||
|
|
b928d96774 | ||
|
|
73335f9c1e | ||
|
|
7b6b81dbc5 | ||
|
|
2e11c574c2 | ||
|
|
d07565807c | ||
|
|
6eccbf215a | ||
|
|
8abc05f04e | ||
|
|
8f587467f9 | ||
|
|
3d7a91c7ff | ||
|
|
3fd408074c | ||
|
|
f71cbd8f51 | ||
|
|
915df8ac16 | ||
|
|
d5ecb08a2d | ||
|
|
9388cd4861 | ||
|
|
180590b411 | ||
|
|
4ec37539bf | ||
|
|
8755fe01b6 | ||
|
|
0bfe54e0c2 | ||
|
|
29bc689b84 | ||
|
|
65682febc7 | ||
|
|
d408665d62 | ||
|
|
64fceb0a01 | ||
|
|
ab58e578c9 | ||
|
|
0e58b8fa5b | ||
|
|
99ac082f00 | ||
|
|
4d3fddc051 | ||
|
|
42456439a9 | ||
|
|
ee0c0e7308 | ||
|
|
998c272bf8 | ||
|
|
daddd1f0e8 | ||
|
|
17f20535cb | ||
|
|
0829ea3ea1 | ||
|
|
2069d9c3d7 | ||
|
|
8a2d526c50 | ||
|
|
8ad96d6f73 | ||
|
|
947faf8c05 | ||
|
|
a948556455 | ||
|
|
835384b730 | ||
|
|
c5b93794f4 | ||
|
|
056cd32f0e | ||
|
|
49bb413110 | ||
|
|
ceccc42050 | ||
|
|
aa3379e1c6 | ||
|
|
4063af0e25 | ||
|
|
d53e6060a4 | ||
|
|
85d8fc8cc0 | ||
|
|
0fe40b1839 | ||
|
|
21de4b757f | ||
|
|
96cdbb2cff | ||
|
|
d531643b58 | ||
|
|
a1779ef488 | ||
|
|
5239dece1e | ||
|
|
a7d7837816 | ||
|
|
ebcfc7df47 | ||
|
|
dc4b9002fe | ||
|
|
33618b6b82 | ||
|
|
597d407acc | ||
|
|
6162a5bdee | ||
|
|
696bbf7a17 | ||
|
|
821fcf0922 | ||
|
|
b1712d838f | ||
|
|
895b865505 | ||
|
|
5a2af5c49e | ||
|
|
24658f4017 | ||
|
|
6707cda75e | ||
|
|
1302a31b3d | ||
|
|
871a1e8f3a | ||
|
|
04e1144bab | ||
|
|
6312d94f3e | ||
|
|
ed91026319 | ||
|
|
441a4e296d | ||
|
|
c33c3f61df | ||
|
|
2cc293b724 | ||
|
|
ee129b2faa | ||
|
|
98d9b3b093 | ||
|
|
57b9b420f8 | ||
|
|
9e73f2603a | ||
|
|
707889be42 | ||
|
|
f9a70e0145 | ||
|
|
b71489cee1 | ||
|
|
0a9bde5f10 | ||
|
|
36d5862375 | ||
|
|
398c702004 | ||
|
|
b2d1798338 | ||
|
|
4f165b0c83 | ||
|
|
2c86944a51 | ||
|
|
5fc51de7d8 | ||
|
|
158e0fb788 | ||
|
|
941d15c1bc | ||
|
|
cd00f8b995 | ||
|
|
44515f8e78 | ||
|
|
54fbc76da5 | ||
|
|
c1b5196134 | ||
|
|
fb3d3be546 | ||
|
|
8e11e242ed | ||
|
|
8a815ce3ef | ||
|
|
91076a50ad | ||
|
|
e624dcdde0 | ||
|
|
a25676122c | ||
|
|
e4dfbe2c9a | ||
|
|
78fb34d049 | ||
|
|
38c4125f4f | ||
|
|
04d6cbafe3 | ||
|
|
e6319172d8 | ||
|
|
5230ff63e3 | ||
|
|
2b364bbff7 | ||
|
|
c4b330b2bb | ||
|
|
308eda6342 | ||
|
|
e8b1cb27f1 | ||
|
|
ed14fd0ced | ||
|
|
fb10e56487 | ||
|
|
56ed0cbc79 | ||
|
|
227e588782 | ||
|
|
53f2108e37 | ||
|
|
ccf4bbf547 | ||
|
|
c99a625b60 | ||
|
|
25ab623328 | ||
|
|
455888bdac | ||
|
|
b650ece0ee | ||
|
|
2cb96c0252 | ||
|
|
70cf59bb4c | ||
|
|
ec03627119 | ||
|
|
675c19f060 | ||
|
|
6721b1b96b | ||
|
|
b4f23822c4 | ||
|
|
3dd1aaeddb | ||
|
|
1e593e6d75 | ||
|
|
ddbcb90c1f | ||
|
|
229fdf20ef | ||
|
|
72e67d0e5d | ||
|
|
b26fefbc37 | ||
|
|
04e0482f60 | ||
|
|
62f90846a8 | ||
|
|
1f9c0e56fe | ||
|
|
fe9d3563a0 | ||
|
|
38a07dffc6 | ||
|
|
1a6500308f | ||
|
|
6033b45ed3 | ||
|
|
33edef6647 | ||
|
|
8f8e8b7492 | ||
|
|
ab5e3198aa | ||
|
|
60ed850d2d | ||
|
|
63b9cc5b16 | ||
|
|
f2edd2bec5 | ||
|
|
44ad59130f | ||
|
|
ecbb1e04ee | ||
|
|
7cb2c3ef49 | ||
|
|
ff4f6bfd78 | ||
|
|
fbe0cb5efa | ||
|
|
aa7cbed611 | ||
|
|
89061f6411 | ||
|
|
838883d8a3 | ||
|
|
cd196f1acd | ||
|
|
a2b894fceb | ||
|
|
c3b3a4c70f | ||
|
|
8118641231 | ||
|
|
6d8a199a3c | ||
|
|
5a44e20a5b | ||
|
|
374739c133 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -11,3 +11,5 @@ lib/www/client/dist/
|
||||
etc/surveys/*.yaml
|
||||
!etc/surveys/_*.yaml
|
||||
etc/ssl/*
|
||||
etc/config.yaml
|
||||
var/*
|
||||
|
||||
27
bin/check_mounts_present.py
Executable file
27
bin/check_mounts_present.py
Executable file
@@ -0,0 +1,27 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
Check if any of the directories provided in the imports.mounts configuration
|
||||
section are empty.
|
||||
|
||||
Returns 0 if all arguments are non-empty, 1 otherwise. It stops at the first
|
||||
empty directory.
|
||||
"""
|
||||
|
||||
import os
|
||||
import configuration
|
||||
|
||||
cfg = configuration.read()
|
||||
|
||||
if cfg and "imports" in cfg and "mounts" in cfg["imports"]:
|
||||
|
||||
mounts = cfg["imports"]["mounts"]
|
||||
for item in mounts:
|
||||
with os.scandir(item) as contents:
|
||||
if not any(contents):
|
||||
exit(1)
|
||||
|
||||
else:
|
||||
print("No mounts in configuration")
|
||||
|
||||
exit(0)
|
||||
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
import pathlib
|
||||
from glob import glob
|
||||
from yaml import full_load as _load
|
||||
|
||||
@@ -11,6 +12,18 @@ surveys should be under $HOME/etc/surveys/*.yaml. In both cases,
|
||||
$HOME is the home directory of the user running this script.
|
||||
"""
|
||||
|
||||
def is_relative_to(it, other):
|
||||
"""
|
||||
is_relative_to() is not present version Python 3.9, so we
|
||||
need this kludge to get Dougal to run on OpenSUSE 15.4
|
||||
"""
|
||||
|
||||
if "is_relative_to" in dir(it):
|
||||
return it.is_relative_to(other)
|
||||
|
||||
return str(it.absolute()).startswith(str(other.absolute()))
|
||||
|
||||
|
||||
prefix = os.environ.get("DOUGAL_ROOT", os.environ.get("HOME", ".")+"/software")
|
||||
|
||||
DOUGAL_ROOT = os.environ.get("DOUGAL_ROOT", os.environ.get("HOME", ".")+"/software")
|
||||
@@ -54,6 +67,10 @@ def files (globspec = None, include_archived = False):
|
||||
quickly and temporarily “disabling” a survey configuration by renaming
|
||||
the relevant file.
|
||||
"""
|
||||
|
||||
print("This method is obsolete")
|
||||
return
|
||||
|
||||
tuples = []
|
||||
|
||||
if globspec is None:
|
||||
@@ -87,3 +104,73 @@ def rxflags (flagstr):
|
||||
for flag in flagstr:
|
||||
flags |= cases.get(flag, 0)
|
||||
return flags
|
||||
|
||||
def translate_path (file):
|
||||
"""
|
||||
Translate a path from a Dougal import directory to an actual
|
||||
physical path on disk.
|
||||
|
||||
Any user files accessible by Dougal must be under a path prefixed
|
||||
by `(config.yaml).imports.paths`. The value of `imports.paths` may
|
||||
be either a string, in which case this represents the prefix under
|
||||
which all Dougal data resides, or a dictionary where the keys are
|
||||
logical paths and their values the corresponding physical path.
|
||||
"""
|
||||
cfg = read()
|
||||
root = pathlib.Path(DOUGAL_ROOT)
|
||||
filepath = pathlib.Path(file).resolve()
|
||||
import_paths = cfg["imports"]["paths"]
|
||||
|
||||
if filepath.is_absolute():
|
||||
if type(import_paths) == str:
|
||||
# Substitute the root for the real physical path
|
||||
# NOTE: `root` deals with import_paths not being absolute
|
||||
prefix = root.joinpath(pathlib.Path(import_paths)).resolve()
|
||||
return str(pathlib.Path(prefix).joinpath(*filepath.parts[2:]))
|
||||
else:
|
||||
# Look for a match on the second path element
|
||||
if filepath.parts[1] in import_paths:
|
||||
# NOTE: `root` deals with import_paths[…] not being absolute
|
||||
prefix = root.joinpath(import_paths[filepath.parts[1]])
|
||||
return str(pathlib.Path(prefix).joinpath(*filepath.parts[2:]))
|
||||
else:
|
||||
# This path is invalid
|
||||
raise TypeError("invalid path or file: {0!r}".format(filepath))
|
||||
else:
|
||||
# A relative filepath is always resolved relative to the logical root
|
||||
root = pathlib.Path("/")
|
||||
return translate_path(root.joinpath(filepath))
|
||||
|
||||
def untranslate_path (file):
|
||||
"""
|
||||
Attempt to convert a physical path into a logical one.
|
||||
See `translate_path()` above for details.
|
||||
"""
|
||||
cfg = read()
|
||||
dougal_root = pathlib.Path(DOUGAL_ROOT)
|
||||
filepath = pathlib.Path(file).resolve()
|
||||
import_paths = cfg["imports"]["paths"]
|
||||
physical_root = pathlib.Path("/")
|
||||
|
||||
if filepath.is_absolute():
|
||||
if type(import_paths) == str:
|
||||
if is_relative_to(filepath, import_paths):
|
||||
physical_root = pathlib.Path("/")
|
||||
physical_prefix = pathlib.Path(import_paths)
|
||||
return str(root.joinpath(filepath.relative_to(physical_prefix)))
|
||||
else:
|
||||
raise TypeError("invalid path or file: {0!r}".format(filepath))
|
||||
else:
|
||||
for key, value in import_paths.items():
|
||||
value = dougal_root.joinpath(value)
|
||||
physical_prefix = pathlib.Path(value)
|
||||
if is_relative_to(filepath, physical_prefix):
|
||||
logical_prefix = physical_root.joinpath(pathlib.Path(key)).resolve()
|
||||
return str(logical_prefix.joinpath(filepath.relative_to(physical_prefix)))
|
||||
|
||||
# If we got here with no matches, this is not a valid
|
||||
# Dougal data path
|
||||
raise TypeError("invalid path or file: {0!r}".format(filepath))
|
||||
else:
|
||||
# A relative filepath is always resolved relative to DOUGAL_ROOT
|
||||
return untranslate_path(root.joinpath(filepath))
|
||||
|
||||
@@ -11,11 +11,9 @@ from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
|
||||
142
bin/datastore.py
142
bin/datastore.py
@@ -52,7 +52,7 @@ class Datastore:
|
||||
|
||||
self.conn = psycopg2.connect(configuration.read()["db"]["connection_string"], **opts)
|
||||
|
||||
def set_autocommit(value = True):
|
||||
def set_autocommit(self, value = True):
|
||||
"""
|
||||
Enable or disable autocommit.
|
||||
|
||||
@@ -95,7 +95,7 @@ class Datastore:
|
||||
cursor.execute(qry, (filepath,))
|
||||
results = cursor.fetchall()
|
||||
if len(results):
|
||||
return (filepath, file_hash(filepath)) in results
|
||||
return (filepath, file_hash(configuration.translate_path(filepath))) in results
|
||||
|
||||
|
||||
def add_file(self, path, cursor = None):
|
||||
@@ -107,7 +107,8 @@ class Datastore:
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
hash = file_hash(path)
|
||||
realpath = configuration.translate_path(path)
|
||||
hash = file_hash(realpath)
|
||||
qry = "CALL add_file(%s, %s);"
|
||||
cur.execute(qry, (path, hash))
|
||||
if cursor is None:
|
||||
@@ -176,7 +177,7 @@ class Datastore:
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
hash = file_hash(path)
|
||||
hash = file_hash(configuration.translate_path(path))
|
||||
qry = """
|
||||
UPDATE raw_lines rl
|
||||
SET ntbp = %s
|
||||
@@ -255,6 +256,78 @@ class Datastore:
|
||||
|
||||
self.maybe_commit()
|
||||
|
||||
|
||||
def save_preplot_line_info(self, lines, filepath, filedata = None):
|
||||
"""
|
||||
Save preplot line information
|
||||
|
||||
Arguments:
|
||||
|
||||
lines (iterable): should be a collection of lines returned from
|
||||
one of the line info reading functions (see preplots.py).
|
||||
|
||||
filepath (string): the full path to the preplot file from where the lines
|
||||
have been read. It will be added to the survey's `file` table so that
|
||||
it can be monitored for changes.
|
||||
"""
|
||||
|
||||
with self.conn.cursor() as cursor:
|
||||
cursor.execute("BEGIN;")
|
||||
|
||||
# Check which preplot lines we actually have already imported,
|
||||
# as the line info file may contain extra lines.
|
||||
|
||||
qry = """
|
||||
SELECT line, class
|
||||
FROM preplot_lines
|
||||
ORDER BY line, class;
|
||||
"""
|
||||
cursor.execute(qry)
|
||||
preplot_lines = cursor.fetchall()
|
||||
|
||||
hash = self.add_file(filepath, cursor)
|
||||
count=0
|
||||
for line in lines:
|
||||
count += 1
|
||||
|
||||
if not (line["sail_line"], "V") in preplot_lines:
|
||||
print(f"\u001b[2KSkipping line {count} / {len(lines)}", end="\n", flush=True)
|
||||
continue
|
||||
|
||||
print(f"\u001b[2KSaving line {count} / {len(lines)} ", end="\n", flush=True)
|
||||
|
||||
sail_line = line["sail_line"]
|
||||
incr = line.get("incr", True)
|
||||
ntba = line.get("ntba", False)
|
||||
remarks = line.get("remarks", None)
|
||||
meta = json.dumps(line.get("meta", {}))
|
||||
source_lines = line.get("source_line", [])
|
||||
|
||||
for source_line in source_lines:
|
||||
qry = """
|
||||
INSERT INTO preplot_saillines AS ps
|
||||
(sailline, line, sailline_class, line_class, incr, ntba, remarks, meta, hash)
|
||||
VALUES
|
||||
(%s, %s, 'V', 'S', %s, %s, %s, %s, %s)
|
||||
ON CONFLICT (sailline, sailline_class, line, line_class, incr) DO UPDATE
|
||||
SET
|
||||
incr = EXCLUDED.incr,
|
||||
ntba = EXCLUDED.ntba,
|
||||
remarks = COALESCE(EXCLUDED.remarks, ps.remarks),
|
||||
meta = ps.meta || EXCLUDED.meta,
|
||||
hash = EXCLUDED.hash;
|
||||
"""
|
||||
|
||||
# NOTE Consider using cursor.executemany() instead. Then again,
|
||||
# we're only expecting a few hundred lines at most.
|
||||
cursor.execute(qry, (sail_line, source_line, incr, ntba, remarks, meta, hash))
|
||||
|
||||
if filedata is not None:
|
||||
self.save_file_data(filepath, json.dumps(filedata), cursor)
|
||||
|
||||
self.maybe_commit()
|
||||
|
||||
|
||||
def save_raw_p190(self, records, fileinfo, filepath, epsg = 0, filedata = None, ntbp = False):
|
||||
"""
|
||||
Save raw P1 data.
|
||||
@@ -588,7 +661,68 @@ class Datastore:
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
# we assume that we are in the middle of a transaction
|
||||
|
||||
def get_file_data(self, path, cursor = None):
|
||||
"""
|
||||
Retrieve arbitrary data associated with a file.
|
||||
"""
|
||||
|
||||
if cursor is None:
|
||||
cur = self.conn.cursor()
|
||||
else:
|
||||
cur = cursor
|
||||
|
||||
realpath = configuration.translate_path(path)
|
||||
hash = file_hash(realpath)
|
||||
|
||||
qry = """
|
||||
SELECT data
|
||||
FROM file_data
|
||||
WHERE hash = %s;
|
||||
"""
|
||||
|
||||
cur.execute(qry, (hash,))
|
||||
res = cur.fetchone()
|
||||
|
||||
if cursor is None:
|
||||
self.maybe_commit()
|
||||
# We do not commit if we've been passed a cursor, instead
|
||||
# we assume that we are in the middle of a transaction
|
||||
return res[0]
|
||||
|
||||
def surveys (self, include_archived = False):
|
||||
"""
|
||||
Return list of survey definitions.
|
||||
"""
|
||||
|
||||
if self.conn is None:
|
||||
self.connect()
|
||||
|
||||
if include_archived:
|
||||
qry = """
|
||||
SELECT meta, schema
|
||||
FROM public.projects;
|
||||
"""
|
||||
else:
|
||||
qry = """
|
||||
SELECT meta, schema
|
||||
FROM public.projects
|
||||
WHERE NOT (meta->'archived')::boolean IS true
|
||||
"""
|
||||
|
||||
with self.conn:
|
||||
with self.conn.cursor() as cursor:
|
||||
|
||||
cursor.execute(qry)
|
||||
results = cursor.fetchall()
|
||||
surveys = []
|
||||
for r in results:
|
||||
if r[0]:
|
||||
r[0]['schema'] = r[1]
|
||||
surveys.append(r[0])
|
||||
return surveys
|
||||
|
||||
|
||||
# TODO Does this need tweaking on account of #246?
|
||||
def apply_survey_configuration(self, cursor = None):
|
||||
if cursor is None:
|
||||
cur = self.conn.cursor()
|
||||
|
||||
163
bin/delimited.py
Normal file
163
bin/delimited.py
Normal file
@@ -0,0 +1,163 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
Delimited record importing functions.
|
||||
"""
|
||||
|
||||
import csv
|
||||
import builtins
|
||||
|
||||
def to_bool (v):
|
||||
try:
|
||||
return bool(int(v))
|
||||
except ValueError:
|
||||
if type(v) == str:
|
||||
return v.strip().lower().startswith("t")
|
||||
return False
|
||||
|
||||
transform = {
|
||||
"int": lambda v: builtins.int(float(v)),
|
||||
"float": float,
|
||||
"string": str,
|
||||
"bool": to_bool
|
||||
}
|
||||
|
||||
def cast_values (row, fields):
|
||||
|
||||
def enum_for (key):
|
||||
field = fields.get(key, {})
|
||||
def enum (val):
|
||||
if "enum" in field:
|
||||
ret_val = field.get("default", val)
|
||||
enums = field.get("enum", [])
|
||||
for enum_key in enums:
|
||||
if enum_key == val:
|
||||
ret_val = enums[enum_key]
|
||||
return ret_val
|
||||
return val
|
||||
return enum
|
||||
|
||||
# Get rid of any unwanted data
|
||||
if None in row:
|
||||
del(row[None])
|
||||
|
||||
for key in row:
|
||||
|
||||
val = row[key]
|
||||
enum = enum_for(key)
|
||||
transformer = transform.get(fields.get(key, {}).get("type"), str)
|
||||
|
||||
if type(val) == list:
|
||||
for i, v in enumerate(val):
|
||||
row[key][i] = transformer(enum(v))
|
||||
elif type(val) == dict:
|
||||
continue
|
||||
else:
|
||||
row[key] = transformer(enum(val))
|
||||
return row
|
||||
|
||||
def build_fieldnames (spec): #(arr, key, val):
|
||||
fieldnames = []
|
||||
|
||||
if "fields" in spec:
|
||||
for key in spec["fields"]:
|
||||
index = spec["fields"][key]["column"]
|
||||
try:
|
||||
fieldnames[index] = key
|
||||
except IndexError:
|
||||
assert index >= 0
|
||||
fieldnames.extend(((index + 1) - len(fieldnames)) * [None])
|
||||
fieldnames[index] = key
|
||||
|
||||
return fieldnames
|
||||
|
||||
|
||||
def from_file_delimited (path, spec):
|
||||
|
||||
fieldnames = build_fieldnames(spec)
|
||||
fields = spec.get("fields", [])
|
||||
delimiter = spec.get("delimiter", ",")
|
||||
firstRow = spec.get("firstRow", 0)
|
||||
headerRow = spec.get("headerRow", False)
|
||||
if headerRow:
|
||||
firstRow += 1
|
||||
|
||||
records = []
|
||||
with open(path, "r", errors="ignore") as fd:
|
||||
|
||||
if spec.get("type") == "x-sl+csv":
|
||||
fieldnames = None # Pick from header row
|
||||
firstRow = 0
|
||||
reader = csv.DictReader(fd, delimiter=delimiter)
|
||||
else:
|
||||
reader = csv.DictReader(fd, fieldnames=fieldnames, delimiter=delimiter)
|
||||
|
||||
row = 0
|
||||
for line in reader:
|
||||
skip = False
|
||||
|
||||
if row < firstRow:
|
||||
skip = True
|
||||
|
||||
if not skip:
|
||||
records.append(cast_values(dict(line), fields))
|
||||
|
||||
row += 1
|
||||
|
||||
return records
|
||||
|
||||
|
||||
def remap (line, headers):
|
||||
row = dict()
|
||||
for i, key in enumerate(headers):
|
||||
if "." in key[1:-1]:
|
||||
# This is an object
|
||||
k, attr = key.split(".")
|
||||
if not k in row:
|
||||
row[k] = dict()
|
||||
row[k][attr] = line[i]
|
||||
elif key in row:
|
||||
if type(row[key]) == list:
|
||||
row[key].append(line[i])
|
||||
else:
|
||||
row[key] = [ row[key], line[i] ]
|
||||
else:
|
||||
row[key] = line[i]
|
||||
return row
|
||||
|
||||
def from_file_saillines (path, spec):
|
||||
|
||||
fields = {
|
||||
"sail_line": { "type": "int" },
|
||||
"source_line": { "type": "int" },
|
||||
"incr": { "type": "bool" },
|
||||
"ntba": { "type": "bool" }
|
||||
}
|
||||
|
||||
# fields = spec.get("fields", sl_fields)
|
||||
delimiter = spec.get("delimiter", ",")
|
||||
firstRow = spec.get("firstRow", 0)
|
||||
|
||||
records = []
|
||||
with open(path, "r", errors="ignore") as fd:
|
||||
row = 0
|
||||
reader = csv.reader(fd, delimiter=delimiter)
|
||||
while row < firstRow:
|
||||
next(reader)
|
||||
row += 1
|
||||
headers = [ h.strip() for h in next(reader) if len(h.strip()) ]
|
||||
|
||||
for line in reader:
|
||||
records.append(cast_values(remap(line, headers), fields))
|
||||
|
||||
return records
|
||||
|
||||
|
||||
def from_file_p111 (path, spec):
|
||||
pass
|
||||
|
||||
def from_file (path, spec):
|
||||
if spec.get("type") == "x-sl+csv":
|
||||
return from_file_saillines(path, spec)
|
||||
else:
|
||||
return from_file_delimited(path, spec)
|
||||
128
bin/fwr.py
Normal file
128
bin/fwr.py
Normal file
@@ -0,0 +1,128 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
Fixed width record importing functions.
|
||||
"""
|
||||
|
||||
import builtins
|
||||
|
||||
def to_bool (v):
|
||||
try:
|
||||
return bool(int(v))
|
||||
except ValueError:
|
||||
if type(v) == str:
|
||||
return v.strip().lower().startswith("t")
|
||||
return False
|
||||
|
||||
transform = {
|
||||
"int": lambda v: builtins.int(float(v)),
|
||||
"float": float,
|
||||
"string": str,
|
||||
"str": str,
|
||||
"bool": to_bool
|
||||
}
|
||||
|
||||
def parse_line (line, fields, fixed = None):
|
||||
# print("parse_line", line, fields, fixed)
|
||||
data = dict()
|
||||
|
||||
if fixed:
|
||||
for value in fixed:
|
||||
start = value["offset"]
|
||||
end = start + len(value["text"])
|
||||
text = line[start:end]
|
||||
if text != value["text"]:
|
||||
return f"Expected text `{value['text']}` at position {start} but found `{text}` instead."
|
||||
|
||||
for key in fields:
|
||||
spec = fields[key]
|
||||
transformer = transform[spec.get("type", "str")]
|
||||
pos_from = spec["offset"]
|
||||
pos_to = pos_from + spec["length"]
|
||||
text = line[pos_from:pos_to]
|
||||
value = transformer(text)
|
||||
if "enum" in spec:
|
||||
if "default" in spec:
|
||||
value = spec["default"]
|
||||
for enum_key in spec["enum"]:
|
||||
if enum_key == text:
|
||||
enum_value = transformer(spec["enum"][enum_key])
|
||||
value = enum_value
|
||||
break
|
||||
|
||||
data[key] = value
|
||||
|
||||
# print("parse_line data =", data)
|
||||
return data
|
||||
|
||||
|
||||
specfields = {
|
||||
"sps1": {
|
||||
"line_name": { "offset": 1, "length": 16, "type": "int" },
|
||||
"point_number": { "offset": 17, "length": 8, "type": "int" },
|
||||
"easting": { "offset": 46, "length": 9, "type": "float" },
|
||||
"northing": { "offset": 55, "length": 10, "type": "float" }
|
||||
},
|
||||
"sps21": {
|
||||
"line_name": { "offset": 1, "length": 7, "type": "int" },
|
||||
"point_number": { "offset": 11, "length": 7, "type": "int" },
|
||||
"easting": { "offset": 46, "length": 9, "type": "float" },
|
||||
"northing": { "offset": 55, "length": 10, "type": "float" }
|
||||
},
|
||||
"p190": {
|
||||
"line_name": { "offset": 1, "length": 12, "type": "int" },
|
||||
"point_number": { "offset": 19, "length": 6, "type": "int" },
|
||||
"easting": { "offset": 46, "length": 9, "type": "float" },
|
||||
"northing": { "offset": 55, "length": 9, "type": "float" }
|
||||
},
|
||||
}
|
||||
|
||||
def from_file(path, spec):
|
||||
|
||||
# If spec.fields is not present, deduce it from spec.type ("sps1", "sps21", "p190", etc.)
|
||||
if "fields" in spec:
|
||||
fields = spec["fields"]
|
||||
elif "type" in spec and spec["type"] in specfields:
|
||||
fields = specfields[spec["type"]]
|
||||
else:
|
||||
# TODO: Should default to looking for spec.format and doing a legacy import on it
|
||||
return "Neither 'type' nor 'fields' given. I don't know how to import this fixed-width dataset."
|
||||
|
||||
firstRow = spec.get("firstRow", 0)
|
||||
|
||||
skipStart = [] # Skip lines starting with any of these values
|
||||
skipMatch = [] # Skip lines matching any of these values
|
||||
|
||||
if "type" in spec:
|
||||
if spec["type"] == "sps1" or spec["type"] == "sps21" or spec["type"] == "p190":
|
||||
skipStart = "H"
|
||||
skipMatch = "EOF"
|
||||
|
||||
records = []
|
||||
with open(path, "r", errors="ignore") as fd:
|
||||
row = 0
|
||||
line = fd.readline()
|
||||
|
||||
while line:
|
||||
skip = False
|
||||
|
||||
if row < firstRow:
|
||||
skip = True
|
||||
|
||||
if not skip:
|
||||
for v in skipStart:
|
||||
if line.startswith(v):
|
||||
skip = True
|
||||
break
|
||||
for v in skipMatch:
|
||||
if line == v:
|
||||
skip = True
|
||||
break
|
||||
|
||||
if not skip:
|
||||
records.append(parse_line(line, fields))
|
||||
|
||||
row += 1
|
||||
line = fd.readline()
|
||||
|
||||
return records
|
||||
@@ -9,11 +9,9 @@ from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
|
||||
@@ -15,6 +15,7 @@ import re
|
||||
import time
|
||||
import configuration
|
||||
import p111
|
||||
import fwr
|
||||
from datastore import Datastore
|
||||
|
||||
def add_pending_remark(db, sequence):
|
||||
@@ -51,12 +52,11 @@ def del_pending_remark(db, sequence):
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
db.connect()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
@@ -70,59 +70,94 @@ if __name__ == '__main__':
|
||||
print("No final P1/11 configuration")
|
||||
exit(0)
|
||||
|
||||
pattern = final_p111["pattern"]
|
||||
rx = re.compile(pattern["regex"])
|
||||
|
||||
lineNameInfo = final_p111.get("lineNameInfo")
|
||||
pattern = final_p111.get("pattern")
|
||||
if not lineNameInfo:
|
||||
if not pattern:
|
||||
print("ERROR! Missing final.p111.lineNameInfo in project configuration. Cannot import final P111")
|
||||
raise Exception("Missing final.p111.lineNameInfo")
|
||||
else:
|
||||
print("WARNING! No `lineNameInfo` in project configuration (final.p111). You should add it to the settings.")
|
||||
rx = None
|
||||
if pattern and pattern.get("regex"):
|
||||
rx = re.compile(pattern["regex"])
|
||||
|
||||
if "pending" in survey["final"]:
|
||||
pendingRx = re.compile(survey["final"]["pending"]["pattern"]["regex"])
|
||||
|
||||
for fileprefix in final_p111["paths"]:
|
||||
print(f"Path prefix: {fileprefix}")
|
||||
realprefix = configuration.translate_path(fileprefix)
|
||||
print(f"Path prefix: {fileprefix} → {realprefix}")
|
||||
|
||||
for globspec in final_p111["globs"]:
|
||||
for filepath in pathlib.Path(fileprefix).glob(globspec):
|
||||
filepath = str(filepath)
|
||||
print(f"Found {filepath}")
|
||||
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
|
||||
physical_filepath = str(physical_filepath)
|
||||
logical_filepath = configuration.untranslate_path(physical_filepath)
|
||||
print(f"Found {logical_filepath}")
|
||||
|
||||
pending = False
|
||||
if pendingRx:
|
||||
pending = pendingRx.search(filepath) is not None
|
||||
pending = pendingRx.search(physical_filepath) is not None
|
||||
|
||||
if not db.file_in_db(filepath):
|
||||
if not db.file_in_db(logical_filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(filepath)
|
||||
age = time.time() - os.path.getmtime(physical_filepath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", filepath)
|
||||
print("Skipping file because too new", logical_filepath)
|
||||
continue
|
||||
|
||||
print("Importing")
|
||||
|
||||
match = rx.match(os.path.basename(filepath))
|
||||
if not match:
|
||||
error_message = f"File path not match the expected format! ({filepath} ~ {pattern['regex']})"
|
||||
print(error_message, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
if rx:
|
||||
match = rx.match(os.path.basename(logical_filepath))
|
||||
if not match:
|
||||
error_message = f"File path not match the expected format! ({logical_filepath} ~ {pattern['regex']})"
|
||||
print(error_message, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
|
||||
file_info = dict(zip(pattern["captures"], match.groups()))
|
||||
file_info["meta"] = {}
|
||||
file_info = dict(zip(pattern["captures"], match.groups()))
|
||||
file_info["meta"] = {}
|
||||
|
||||
if lineNameInfo:
|
||||
basename = os.path.basename(physical_filepath)
|
||||
fields = lineNameInfo.get("fields", {})
|
||||
fixed = lineNameInfo.get("fixed")
|
||||
try:
|
||||
parsed_line = fwr.parse_line(basename, fields, fixed)
|
||||
except ValueError as err:
|
||||
parsed_line = "Line format error: " + str(err)
|
||||
if type(parsed_line) == str:
|
||||
print(parsed_line, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
|
||||
file_info = {}
|
||||
file_info["sequence"] = parsed_line["sequence"]
|
||||
file_info["line"] = parsed_line["line"]
|
||||
del(parsed_line["sequence"])
|
||||
del(parsed_line["line"])
|
||||
file_info["meta"] = {
|
||||
"fileInfo": parsed_line
|
||||
}
|
||||
|
||||
if pending:
|
||||
print("Skipping / removing final file because marked as PENDING", filepath)
|
||||
print("Skipping / removing final file because marked as PENDING", logical_filepath)
|
||||
db.del_sequence_final(file_info["sequence"])
|
||||
add_pending_remark(db, file_info["sequence"])
|
||||
continue
|
||||
else:
|
||||
del_pending_remark(db, file_info["sequence"])
|
||||
|
||||
p111_data = p111.from_file(filepath)
|
||||
p111_data = p111.from_file(physical_filepath)
|
||||
|
||||
print("Saving")
|
||||
|
||||
p111_records = p111.p111_type("S", p111_data)
|
||||
file_info["meta"]["lineName"] = p111.line_name(p111_data)
|
||||
|
||||
db.save_final_p111(p111_records, file_info, filepath, survey["epsg"])
|
||||
db.save_final_p111(p111_records, file_info, logical_filepath, survey["epsg"])
|
||||
else:
|
||||
print("Already in DB")
|
||||
if pending:
|
||||
|
||||
127
bin/import_map_layers.py
Executable file
127
bin/import_map_layers.py
Executable file
@@ -0,0 +1,127 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
Import SmartSource data.
|
||||
|
||||
For each survey in configuration.surveys(), check for new
|
||||
or modified final gun header files and (re-)import them into the
|
||||
database.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import pathlib
|
||||
import re
|
||||
import time
|
||||
import json
|
||||
import configuration
|
||||
from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
"""
|
||||
Imports map layers from the directories defined in the configuration object
|
||||
`import.map.layers`. The content of that key is an object with the following
|
||||
structure:
|
||||
|
||||
{
|
||||
layer1Name: [
|
||||
format: "geojson",
|
||||
path: "…", // Logical path to a directory
|
||||
globs: [
|
||||
"**/*.geojson", // List of globs matching map data files
|
||||
…
|
||||
]
|
||||
],
|
||||
|
||||
layer2Name: …
|
||||
…
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def process (layer_name, layer, physical_filepath):
|
||||
physical_filepath = str(physical_filepath)
|
||||
logical_filepath = configuration.untranslate_path(physical_filepath)
|
||||
print(f"Found {logical_filepath}")
|
||||
|
||||
if not db.file_in_db(logical_filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(physical_filepath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", logical_filepath)
|
||||
return
|
||||
|
||||
print("Importing")
|
||||
|
||||
file_info = {
|
||||
"type": "map_layer",
|
||||
"format": layer["format"],
|
||||
"name": layer_name,
|
||||
"tooltip": layer.get("tooltip"),
|
||||
"popup": layer.get("popup")
|
||||
}
|
||||
|
||||
db.save_file_data(logical_filepath, json.dumps(file_info))
|
||||
|
||||
else:
|
||||
file_info = db.get_file_data(logical_filepath)
|
||||
dirty = False
|
||||
if file_info:
|
||||
if file_info["name"] != layer_name:
|
||||
print("Renaming to", layer_name)
|
||||
file_info["name"] = layer_name
|
||||
dirty = True
|
||||
if file_info.get("tooltip") != layer.get("tooltip"):
|
||||
print("Changing tooltip to", layer.get("tooltip") or "null")
|
||||
file_info["tooltip"] = layer.get("tooltip")
|
||||
dirty = True
|
||||
if file_info.get("popup") != layer.get("popup"):
|
||||
print("Changing popup to", layer.get("popup") or "null")
|
||||
file_info["popup"] = layer.get("popup")
|
||||
dirty = True
|
||||
|
||||
if dirty:
|
||||
db.save_file_data(logical_filepath, json.dumps(file_info))
|
||||
else:
|
||||
print("Already in DB")
|
||||
|
||||
|
||||
print("Reading configuration")
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
|
||||
db.set_survey(survey["schema"])
|
||||
|
||||
try:
|
||||
map_layers = survey["imports"]["map"]["layers"]
|
||||
except KeyError:
|
||||
print("No map layers defined")
|
||||
continue
|
||||
|
||||
for layer_name, layer_items in map_layers.items():
|
||||
|
||||
for layer in layer_items:
|
||||
fileprefix = layer["path"]
|
||||
realprefix = configuration.translate_path(fileprefix)
|
||||
|
||||
if os.path.isfile(realprefix):
|
||||
|
||||
process(layer_name, layer, realprefix)
|
||||
|
||||
elif os.path.isdir(realprefix):
|
||||
|
||||
if not "globs" in layer:
|
||||
layer["globs"] = [ "**/*.geojson" ]
|
||||
|
||||
for globspec in layer["globs"]:
|
||||
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
|
||||
process(layer_name, layer, physical_filepath)
|
||||
|
||||
print("Done")
|
||||
@@ -15,38 +15,52 @@ import configuration
|
||||
import preplots
|
||||
from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
def preplots_sorter (preplot):
|
||||
rank = {
|
||||
"x-sl+csv": 10
|
||||
}
|
||||
return rank.get(preplot.get("type"), 0)
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading configuration")
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
db.set_survey(survey["schema"])
|
||||
for file in survey["preplots"]:
|
||||
|
||||
# We sort the preplots so that ancillary line info always comes last,
|
||||
# after the actual line + point data has been imported
|
||||
for file in sorted(survey["preplots"], key=preplots_sorter):
|
||||
realpath = configuration.translate_path(file["path"])
|
||||
|
||||
print(f"Preplot: {file['path']}")
|
||||
if not db.file_in_db(file["path"]):
|
||||
|
||||
age = time.time() - os.path.getmtime(file["path"])
|
||||
age = time.time() - os.path.getmtime(realpath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", file["path"])
|
||||
continue
|
||||
|
||||
print("Importing")
|
||||
try:
|
||||
preplot = preplots.from_file(file)
|
||||
preplot = preplots.from_file(file, realpath)
|
||||
except FileNotFoundError:
|
||||
print(f"File does not exist: {file['path']}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
if type(preplot) is list:
|
||||
print("Saving to DB")
|
||||
db.save_preplots(preplot, file["path"], file["class"], survey["epsg"], file)
|
||||
if file.get("type") == "x-sl+csv":
|
||||
db.save_preplot_line_info(preplot, file["path"], file)
|
||||
else:
|
||||
db.save_preplots(preplot, file["path"], file["class"], survey["epsg"], file)
|
||||
elif type(preplot) is str:
|
||||
print(preplot)
|
||||
else:
|
||||
|
||||
@@ -15,17 +15,17 @@ import re
|
||||
import time
|
||||
import configuration
|
||||
import p111
|
||||
import fwr
|
||||
from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
db.connect()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
@@ -39,45 +39,79 @@ if __name__ == '__main__':
|
||||
print("No raw P1/11 configuration")
|
||||
exit(0)
|
||||
|
||||
pattern = raw_p111["pattern"]
|
||||
rx = re.compile(pattern["regex"])
|
||||
lineNameInfo = raw_p111.get("lineNameInfo")
|
||||
pattern = raw_p111.get("pattern")
|
||||
if not lineNameInfo:
|
||||
if not pattern:
|
||||
print("ERROR! Missing raw.p111.lineNameInfo in project configuration. Cannot import raw P111")
|
||||
raise Exception("Missing raw.p111.lineNameInfo")
|
||||
else:
|
||||
print("WARNING! No `lineNameInfo` in project configuration (raw.p111). You should add it to the settings.")
|
||||
rx = None
|
||||
if pattern and pattern.get("regex"):
|
||||
rx = re.compile(pattern["regex"])
|
||||
|
||||
if "ntbp" in survey["raw"]:
|
||||
ntbpRx = re.compile(survey["raw"]["ntbp"]["pattern"]["regex"])
|
||||
|
||||
for fileprefix in raw_p111["paths"]:
|
||||
print(f"Path prefix: {fileprefix}")
|
||||
realprefix = configuration.translate_path(fileprefix)
|
||||
print(f"Path prefix: {fileprefix} → {realprefix}")
|
||||
|
||||
for globspec in raw_p111["globs"]:
|
||||
for filepath in pathlib.Path(fileprefix).glob(globspec):
|
||||
filepath = str(filepath)
|
||||
print(f"Found {filepath}")
|
||||
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
|
||||
physical_filepath = str(physical_filepath)
|
||||
logical_filepath = configuration.untranslate_path(physical_filepath)
|
||||
print(f"Found {logical_filepath}")
|
||||
|
||||
if ntbpRx:
|
||||
ntbp = ntbpRx.search(filepath) is not None
|
||||
ntbp = ntbpRx.search(physical_filepath) is not None
|
||||
else:
|
||||
ntbp = False
|
||||
|
||||
if not db.file_in_db(filepath):
|
||||
if not db.file_in_db(logical_filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(filepath)
|
||||
age = time.time() - os.path.getmtime(physical_filepath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", filepath)
|
||||
print("Skipping file because too new", logical_filepath)
|
||||
continue
|
||||
|
||||
print("Importing")
|
||||
|
||||
match = rx.match(os.path.basename(filepath))
|
||||
if not match:
|
||||
error_message = f"File path not match the expected format! ({filepath} ~ {pattern['regex']})"
|
||||
print(error_message, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
if rx:
|
||||
match = rx.match(os.path.basename(logical_filepath))
|
||||
if not match:
|
||||
error_message = f"File path not matching the expected format! ({logical_filepath} ~ {pattern['regex']})"
|
||||
print(error_message, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
|
||||
file_info = dict(zip(pattern["captures"], match.groups()))
|
||||
file_info["meta"] = {}
|
||||
file_info = dict(zip(pattern["captures"], match.groups()))
|
||||
file_info["meta"] = {}
|
||||
|
||||
p111_data = p111.from_file(filepath)
|
||||
if lineNameInfo:
|
||||
basename = os.path.basename(physical_filepath)
|
||||
fields = lineNameInfo.get("fields", {})
|
||||
fixed = lineNameInfo.get("fixed")
|
||||
try:
|
||||
parsed_line = fwr.parse_line(basename, fields, fixed)
|
||||
except ValueError as err:
|
||||
parsed_line = "Line format error: " + str(err)
|
||||
if type(parsed_line) == str:
|
||||
print(parsed_line, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
|
||||
file_info = {}
|
||||
file_info["sequence"] = parsed_line["sequence"]
|
||||
file_info["line"] = parsed_line["line"]
|
||||
del(parsed_line["sequence"])
|
||||
del(parsed_line["line"])
|
||||
file_info["meta"] = {
|
||||
"fileInfo": parsed_line
|
||||
}
|
||||
|
||||
p111_data = p111.from_file(physical_filepath)
|
||||
|
||||
print("Saving")
|
||||
|
||||
@@ -85,7 +119,7 @@ if __name__ == '__main__':
|
||||
if len(p111_records):
|
||||
file_info["meta"]["lineName"] = p111.line_name(p111_data)
|
||||
|
||||
db.save_raw_p111(p111_records, file_info, filepath, survey["epsg"], ntbp=ntbp)
|
||||
db.save_raw_p111(p111_records, file_info, logical_filepath, survey["epsg"], ntbp=ntbp)
|
||||
else:
|
||||
print("No source records found in file")
|
||||
else:
|
||||
@@ -93,7 +127,7 @@ if __name__ == '__main__':
|
||||
|
||||
# Update the NTBP status to whatever the latest is,
|
||||
# as it might have changed.
|
||||
db.set_ntbp(filepath, ntbp)
|
||||
db.set_ntbp(logical_filepath, ntbp)
|
||||
if ntbp:
|
||||
print("Sequence is NTBP")
|
||||
|
||||
|
||||
@@ -15,17 +15,17 @@ import re
|
||||
import time
|
||||
import configuration
|
||||
import smsrc
|
||||
import fwr
|
||||
from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
file_min_age = configuration.read().get('imports', {}).get('file_min_age', 10)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
db.connect()
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
@@ -34,49 +34,80 @@ if __name__ == '__main__':
|
||||
db.set_survey(survey["schema"])
|
||||
|
||||
try:
|
||||
raw_smsrc = survey["raw"]["smsrc"]
|
||||
raw_smsrc = survey["raw"]["source"]["smsrc"]["header"]
|
||||
except KeyError:
|
||||
print("No SmartSource data configuration")
|
||||
continue
|
||||
|
||||
flags = 0
|
||||
if "flags" in raw_smsrc:
|
||||
configuration.rxflags(raw_smsrc["flags"])
|
||||
# NOTE I've no idea what this is 🤔
|
||||
# flags = 0
|
||||
# if "flags" in raw_smsrc:
|
||||
# configuration.rxflags(raw_smsrc["flags"])
|
||||
|
||||
pattern = raw_smsrc["pattern"]
|
||||
rx = re.compile(pattern["regex"], flags)
|
||||
lineNameInfo = raw_smsrc.get("lineNameInfo")
|
||||
pattern = raw_smsrc.get("pattern")
|
||||
rx = None
|
||||
if pattern and pattern.get("regex"):
|
||||
rx = re.compile(pattern["regex"])
|
||||
|
||||
for fileprefix in raw_smsrc["paths"]:
|
||||
print(f"Path prefix: {fileprefix}")
|
||||
realprefix = configuration.translate_path(fileprefix)
|
||||
print(f"Path prefix: {fileprefix} → {realprefix}")
|
||||
|
||||
for globspec in raw_smsrc["globs"]:
|
||||
for filepath in pathlib.Path(fileprefix).glob(globspec):
|
||||
filepath = str(filepath)
|
||||
print(f"Found {filepath}")
|
||||
for physical_filepath in pathlib.Path(realprefix).glob(globspec):
|
||||
physical_filepath = str(physical_filepath)
|
||||
logical_filepath = configuration.untranslate_path(physical_filepath)
|
||||
print(f"Found {logical_filepath}")
|
||||
|
||||
if not db.file_in_db(filepath):
|
||||
if not db.file_in_db(logical_filepath):
|
||||
|
||||
age = time.time() - os.path.getmtime(filepath)
|
||||
age = time.time() - os.path.getmtime(physical_filepath)
|
||||
if age < file_min_age:
|
||||
print("Skipping file because too new", filepath)
|
||||
print("Skipping file because too new", logical_filepath)
|
||||
continue
|
||||
|
||||
print("Importing")
|
||||
|
||||
match = rx.match(os.path.basename(filepath))
|
||||
if not match:
|
||||
error_message = f"File path not matching the expected format! ({filepath} ~ {pattern['regex']})"
|
||||
print(error_message, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
if rx:
|
||||
match = rx.match(os.path.basename(logical_filepath))
|
||||
if not match:
|
||||
error_message = f"File path not matching the expected format! ({logical_filepath} ~ {pattern['regex']})"
|
||||
print(error_message, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
|
||||
file_info = dict(zip(pattern["captures"], match.groups()))
|
||||
file_info = dict(zip(pattern["captures"], match.groups()))
|
||||
file_info["meta"] = {}
|
||||
|
||||
smsrc_records = smsrc.from_file(filepath)
|
||||
|
||||
if lineNameInfo:
|
||||
basename = os.path.basename(physical_filepath)
|
||||
fields = lineNameInfo.get("fields", {})
|
||||
fixed = lineNameInfo.get("fixed")
|
||||
try:
|
||||
parsed_line = fwr.parse_line(basename, fields, fixed)
|
||||
except ValueError as err:
|
||||
parsed_line = "Line format error: " + str(err)
|
||||
if type(parsed_line) == str:
|
||||
print(parsed_line, file=sys.stderr)
|
||||
print("This file will be ignored!")
|
||||
continue
|
||||
|
||||
file_info = {}
|
||||
file_info["sequence"] = parsed_line["sequence"]
|
||||
file_info["line"] = parsed_line["line"]
|
||||
del(parsed_line["sequence"])
|
||||
del(parsed_line["line"])
|
||||
file_info["meta"] = {
|
||||
"fileInfo": parsed_line
|
||||
}
|
||||
|
||||
smsrc_records = smsrc.from_file(physical_filepath)
|
||||
|
||||
print("Saving")
|
||||
|
||||
db.save_raw_smsrc(smsrc_records, file_info, filepath)
|
||||
db.save_raw_smsrc(smsrc_records, file_info, logical_filepath)
|
||||
else:
|
||||
print("Already in DB")
|
||||
|
||||
|
||||
@@ -15,25 +15,4 @@ from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
configs = configuration.files(include_archived = True)
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
#db.connect()
|
||||
|
||||
print("Reading surveys")
|
||||
for config in configs:
|
||||
filepath = config[0]
|
||||
survey = config[1]
|
||||
print(f'Survey: {survey["id"]} ({filepath})')
|
||||
db.set_survey(survey["schema"])
|
||||
if not db.file_in_db(filepath):
|
||||
print("Saving to DB")
|
||||
db.save_file_data(filepath, json.dumps(survey))
|
||||
print("Applying survey configuration")
|
||||
db.apply_survey_configuration()
|
||||
else:
|
||||
print("Already in DB")
|
||||
|
||||
print("Done")
|
||||
print("This function is obsolete. Returning with no action")
|
||||
|
||||
@@ -7,7 +7,6 @@ P1/11 parsing functions.
|
||||
import math
|
||||
import re
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from parse_fwr import parse_fwr
|
||||
|
||||
def _int (string):
|
||||
return int(float(string))
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
def parse_fwr (string, widths, start=0):
|
||||
"""Parse a fixed-width record.
|
||||
|
||||
string: the string to parse.
|
||||
widths: a list of record widths. A negative width denotes a field to be skipped.
|
||||
start: optional start index.
|
||||
|
||||
Returns a list of strings.
|
||||
"""
|
||||
results = []
|
||||
current_index = start
|
||||
for width in widths:
|
||||
if width > 0:
|
||||
results.append(string[current_index : current_index + width])
|
||||
current_index += width
|
||||
else:
|
||||
current_index -= width
|
||||
|
||||
return results
|
||||
@@ -1,14 +1,51 @@
|
||||
import sps
|
||||
import fwr
|
||||
import delimited
|
||||
|
||||
"""
|
||||
Preplot importing functions.
|
||||
"""
|
||||
|
||||
def from_file (file):
|
||||
if not "type" in file or file["type"] == "sps":
|
||||
records = sps.from_file(file["path"], file["format"] if "format" in file else None )
|
||||
|
||||
def is_fixed_width (file):
|
||||
fixed_width_types = [ "sps1", "sps21", "p190", "fixed-width" ]
|
||||
return type(file) == dict and "type" in file and file["type"] in fixed_width_types
|
||||
|
||||
def is_delimited (file):
|
||||
delimited_types = [ "csv", "p111", "x-sl+csv" ]
|
||||
return type(file) == dict and "type" in file and file["type"] in delimited_types
|
||||
|
||||
def from_file (file, realpath = None):
|
||||
"""
|
||||
Return a list of dicts, where each dict has the structure:
|
||||
{
|
||||
"line_name": <int>,
|
||||
"points": [
|
||||
{
|
||||
"line_name": <int>,
|
||||
"point_number": <int>,
|
||||
"easting": <float>,
|
||||
"northing": <float>
|
||||
},
|
||||
…
|
||||
]
|
||||
}
|
||||
On error, return a string describing the error condition.
|
||||
"""
|
||||
|
||||
filepath = realpath or file["path"]
|
||||
if is_fixed_width(file):
|
||||
records = fwr.from_file(filepath, file)
|
||||
elif is_delimited(file):
|
||||
records = delimited.from_file(filepath, file)
|
||||
else:
|
||||
return "Not an SPS file"
|
||||
return "Unrecognised file format"
|
||||
|
||||
if type(records) == str:
|
||||
# This is an error message
|
||||
return records
|
||||
|
||||
if file.get("type") == "x-sl+csv":
|
||||
return records
|
||||
|
||||
lines = []
|
||||
line_names = set([r["line_name"] for r in records])
|
||||
|
||||
@@ -13,21 +13,27 @@ from datastore import Datastore
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = configuration.surveys()
|
||||
|
||||
print("Connecting to database")
|
||||
db = Datastore()
|
||||
|
||||
print("Reading configuration")
|
||||
surveys = db.surveys()
|
||||
|
||||
print("Reading surveys")
|
||||
for survey in surveys:
|
||||
print(f'Survey: {survey["id"]} ({survey["schema"]})')
|
||||
db.set_survey(survey["schema"])
|
||||
|
||||
for file in db.list_files():
|
||||
path = file[0]
|
||||
if not os.path.exists(path):
|
||||
print(path, "NOT FOUND")
|
||||
db.del_file(path)
|
||||
try:
|
||||
path = configuration.translate_path(file[0])
|
||||
if not os.path.exists(path):
|
||||
print(path, "NOT FOUND")
|
||||
db.del_file(file[0])
|
||||
except TypeError:
|
||||
# In case the logical path no longer matches
|
||||
# the Dougal configuration.
|
||||
print(file[0], "COULD NOT BE TRANSLATED TO A PHYSICAL PATH. DELETING")
|
||||
db.del_file(file[0])
|
||||
|
||||
print("Done")
|
||||
|
||||
@@ -90,6 +90,12 @@ function run () {
|
||||
rm $STDOUTLOG $STDERRLOG
|
||||
}
|
||||
|
||||
function cleanup () {
|
||||
if [[ -f $LOCKFILE ]]; then
|
||||
rm "$LOCKFILE"
|
||||
fi
|
||||
}
|
||||
|
||||
if [[ -f $LOCKFILE ]]; then
|
||||
PID=$(cat "$LOCKFILE")
|
||||
if pgrep -F "$LOCKFILE"; then
|
||||
@@ -107,6 +113,13 @@ echo "$$" > "$LOCKFILE" || {
|
||||
}
|
||||
print_info "Start run"
|
||||
|
||||
print_log "Check if data is accessible"
|
||||
$BINDIR/check_mounts_present.py || {
|
||||
print_warning "Import mounts not accessible. Inhibiting all tasks!"
|
||||
cleanup
|
||||
exit 253
|
||||
}
|
||||
|
||||
print_log "Purge deleted files"
|
||||
run $BINDIR/purge_deleted_files.py
|
||||
|
||||
@@ -119,18 +132,21 @@ run $BINDIR/import_preplots.py
|
||||
print_log "Import raw P1/11"
|
||||
run $BINDIR/import_raw_p111.py
|
||||
|
||||
print_log "Import raw P1/90"
|
||||
run $BINDIR/import_raw_p190.py
|
||||
#print_log "Import raw P1/90"
|
||||
#run $BINDIR/import_raw_p190.py
|
||||
|
||||
print_log "Import final P1/11"
|
||||
run $BINDIR/import_final_p111.py
|
||||
|
||||
print_log "Import final P1/90"
|
||||
run $BINDIR/import_final_p190.py
|
||||
#print_log "Import final P1/90"
|
||||
#run $BINDIR/import_final_p190.py
|
||||
|
||||
print_log "Import SmartSource data"
|
||||
run $BINDIR/import_smsrc.py
|
||||
|
||||
print_log "Import map user layers"
|
||||
run $BINDIR/import_map_layers.py
|
||||
|
||||
# if [[ -z "$RUNNER_NOEXPORT" ]]; then
|
||||
# print_log "Export system data"
|
||||
# run $BINDIR/system_exports.py
|
||||
|
||||
51
bin/sps.py
51
bin/sps.py
@@ -1,51 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
SPS importing functions.
|
||||
|
||||
And by SPS, we mean more or less any line-delimited, fixed-width record format.
|
||||
"""
|
||||
|
||||
import builtins
|
||||
from parse_fwr import parse_fwr
|
||||
|
||||
def int (v):
|
||||
return builtins.int(float(v))
|
||||
|
||||
def parse_line (string, spec):
|
||||
"""Parse a line from an SPS file."""
|
||||
names = spec["names"]
|
||||
widths = spec["widths"]
|
||||
normalisers = spec["normalisers"]
|
||||
record = [ t[0](t[1]) for t in zip(normalisers, parse_fwr(string, widths)) ]
|
||||
return dict(zip(names, record))
|
||||
|
||||
def from_file(path, spec = None):
|
||||
if spec is None:
|
||||
spec = {
|
||||
"names": [ "line_name", "point_number", "easting", "northing" ],
|
||||
"widths": [ -1, 10, 10, -25, 10, 10 ],
|
||||
"normalisers": [ int, int, float, float ]
|
||||
}
|
||||
else:
|
||||
normaliser_tokens = [ "int", "float", "str", "bool" ]
|
||||
spec["normalisers"] = [ eval(t) for t in spec["types"] if t in normaliser_tokens ]
|
||||
|
||||
records = []
|
||||
with open(path) as fd:
|
||||
cnt = 0
|
||||
line = fd.readline()
|
||||
while line:
|
||||
cnt = cnt+1
|
||||
|
||||
if line == "EOF":
|
||||
break
|
||||
|
||||
record = parse_line(line, spec)
|
||||
if record is not None:
|
||||
records.append(record)
|
||||
|
||||
line = fd.readline()
|
||||
|
||||
del spec["normalisers"]
|
||||
return records
|
||||
60
bin/update_comparisons.js
Executable file
60
bin/update_comparisons.js
Executable file
@@ -0,0 +1,60 @@
|
||||
#!/usr/bin/node
|
||||
|
||||
const cmp = require('../lib/www/server/lib/comparisons');
|
||||
|
||||
|
||||
async function main () {
|
||||
console.log("Retrieving project groups");
|
||||
const groups = await cmp.groups();
|
||||
|
||||
if (!Object.keys(groups??{})?.length) {
|
||||
console.log("No groups found");
|
||||
return 0;
|
||||
}
|
||||
|
||||
console.log(`Found ${groups.length} groups: ${Object.keys(groups).join(", ")}`);
|
||||
|
||||
for (const groupName of Object.keys(groups)) {
|
||||
const projects = groups[groupName];
|
||||
|
||||
console.log(`Fetching saved comparisons for ${groupName}`);
|
||||
|
||||
const comparisons = await cmp.getGroup(groupName);
|
||||
|
||||
// Check if there are any projects that have been modified since last comparison
|
||||
// or if there are any pairs that are no longer part of the group
|
||||
|
||||
const outdated = comparisons.filter( c => {
|
||||
const baseline_tstamp = projects.find( p => p.pid === c.baseline_pid )?.tstamp;
|
||||
const monitor_tstamp = projects.find( p => p.pid === c.monitor_pid )?.tstamp;
|
||||
return (c.tstamp < baseline_tstamp) || (c.tstamp < monitor_tstamp) ||
|
||||
baseline_tstamp == null || monitor_tstamp == null;
|
||||
});
|
||||
|
||||
for (const comparison of outdated) {
|
||||
console.log(`Removing stale comparison: ${comparison.baseline_pid} → ${comparison.monitor_pid}`);
|
||||
await cmp.remove(comparison.baseline_pid, comparison.monitor_pid);
|
||||
}
|
||||
|
||||
if (projects?.length < 2) {
|
||||
console.log(`Group ${groupName} has less than two projects. No comparisons are possible`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Re-run the comparisons that are not in the database. They may
|
||||
// be missing either beacause they were not there to start with
|
||||
// or because we just removed them due to being stale
|
||||
|
||||
console.log(`Recalculating group ${groupName}`);
|
||||
await cmp.saveGroup(groupName);
|
||||
}
|
||||
|
||||
console.log("Comparisons update done");
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
main();
|
||||
} else {
|
||||
module.exports = main;
|
||||
}
|
||||
@@ -32,6 +32,25 @@ imports:
|
||||
# least this many seconds ago.
|
||||
file_min_age: 60
|
||||
|
||||
# These paths refer to remote mounts which must be present in order
|
||||
# for imports to work. If any of these paths are empty, import actions
|
||||
# (including data deletion) will be inhibited. This is to cope with
|
||||
# things like transient network failures.
|
||||
mounts:
|
||||
- /srv/mnt/Data
|
||||
|
||||
# These paths can be exposed to end users via the API. They should
|
||||
# contain the locations were project data, or any other user data
|
||||
# that needs to be accessible by Dougal, is located.
|
||||
#
|
||||
# This key can be either a string or an object:
|
||||
# - If a string, it points to the root path for Dougal-accessible data.
|
||||
# - If an object, there is an implicit root and the first-level
|
||||
# paths are denoted by the keys, with the values being their
|
||||
# respective physical paths.
|
||||
# Non-absolute paths are relative to $DOUGAL_ROOT.
|
||||
paths: /srv/mnt/Data
|
||||
|
||||
queues:
|
||||
asaqc:
|
||||
request:
|
||||
@@ -1,3 +1,5 @@
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.12"}')
|
||||
\connect dougal
|
||||
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.5"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.12"}' WHERE public.info.key = 'version';
|
||||
SET value = public.info.value || '{"db_schema": "0.4.5"}' WHERE public.info.key = 'version';
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
-- PostgreSQL database dump
|
||||
--
|
||||
|
||||
-- Dumped from database version 14.2
|
||||
-- Dumped by pg_dump version 14.2
|
||||
-- Dumped from database version 14.8
|
||||
-- Dumped by pg_dump version 14.9
|
||||
|
||||
SET statement_timeout = 0;
|
||||
SET lock_timeout = 0;
|
||||
@@ -70,173 +70,171 @@ If the path matches that of an existing entry, delete that entry (which cascades
|
||||
CREATE PROCEDURE _SURVEY__TEMPLATE_.adjust_planner()
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
_planner_config jsonb;
|
||||
_planned_line planned_lines%ROWTYPE;
|
||||
_lag interval;
|
||||
_last_sequence sequences_summary%ROWTYPE;
|
||||
_deltatime interval;
|
||||
_shotinterval interval;
|
||||
_tstamp timestamptz;
|
||||
_incr integer;
|
||||
BEGIN
|
||||
DECLARE
|
||||
_planner_config jsonb;
|
||||
_planned_line planned_lines%ROWTYPE;
|
||||
_lag interval;
|
||||
_last_sequence sequences_summary%ROWTYPE;
|
||||
_deltatime interval;
|
||||
_shotinterval interval;
|
||||
_tstamp timestamptz;
|
||||
_incr integer;
|
||||
BEGIN
|
||||
|
||||
SET CONSTRAINTS planned_lines_pkey DEFERRED;
|
||||
SET CONSTRAINTS planned_lines_pkey DEFERRED;
|
||||
|
||||
SELECT data->'planner'
|
||||
INTO _planner_config
|
||||
FROM file_data
|
||||
WHERE data ? 'planner';
|
||||
SELECT project_configuration()->'planner'
|
||||
INTO _planner_config;
|
||||
|
||||
SELECT *
|
||||
INTO _last_sequence
|
||||
FROM sequences_summary
|
||||
ORDER BY sequence DESC
|
||||
LIMIT 1;
|
||||
SELECT *
|
||||
INTO _last_sequence
|
||||
FROM sequences_summary
|
||||
ORDER BY sequence DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT *
|
||||
INTO _planned_line
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
SELECT *
|
||||
INTO _planned_line
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
SELECT
|
||||
COALESCE(
|
||||
((lead(ts0) OVER (ORDER BY sequence)) - ts1),
|
||||
make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer)
|
||||
)
|
||||
INTO _lag
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
_incr = sign(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE '_planner_config: %', _planner_config;
|
||||
RAISE NOTICE '_last_sequence: %', _last_sequence;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
RAISE NOTICE '_incr: %', _incr;
|
||||
|
||||
-- Does the latest sequence match a planned sequence?
|
||||
IF _planned_line IS NULL THEN -- No it doesn't
|
||||
RAISE NOTICE 'Latest sequence shot does not match a planned sequence';
|
||||
SELECT * INTO _planned_line FROM planned_lines ORDER BY sequence ASC LIMIT 1;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
|
||||
IF _planned_line.sequence <= _last_sequence.sequence THEN
|
||||
RAISE NOTICE 'Renumbering the planned sequences starting from %', _planned_line.sequence + 1;
|
||||
-- Renumber the planned sequences starting from last shot sequence number + 1
|
||||
UPDATE planned_lines
|
||||
SET sequence = sequence + _last_sequence.sequence - _planned_line.sequence + 1;
|
||||
END IF;
|
||||
|
||||
-- The correction to make to the first planned line's ts0 will be based on either the last
|
||||
-- sequence's EOL + default line change time or the current time, whichever is later.
|
||||
_deltatime := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1) + make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer), current_timestamp) - _planned_line.ts0;
|
||||
|
||||
-- Is the first of the planned lines start time in the past? (±5 mins)
|
||||
IF _planned_line.ts0 < (current_timestamp - make_interval(mins => 5)) THEN
|
||||
RAISE NOTICE 'First planned line is in the past. Adjusting times by %', _deltatime;
|
||||
-- Adjust the start / end time of the planned lines by assuming that we are at
|
||||
-- `defaultLineChangeDuration` minutes away from SOL of the first planned line.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime;
|
||||
END IF;
|
||||
|
||||
ELSE -- Yes it does
|
||||
RAISE NOTICE 'Latest sequence does match a planned sequence: %, %', _planned_line.sequence, _planned_line.line;
|
||||
|
||||
-- Is it online?
|
||||
IF EXISTS(SELECT 1 FROM raw_lines_files WHERE sequence = _last_sequence.sequence AND hash = '*online*') THEN
|
||||
-- Yes it is
|
||||
RAISE NOTICE 'Sequence % is online', _last_sequence.sequence;
|
||||
|
||||
-- Let us get the SOL from the events log if we can
|
||||
RAISE NOTICE 'Trying to set fsp, ts0 from events log FSP, FGSP';
|
||||
WITH e AS (
|
||||
SELECT * FROM event_log
|
||||
WHERE
|
||||
sequence = _last_sequence.sequence
|
||||
AND ('FSP' = ANY(labels) OR 'FGSP' = ANY(labels))
|
||||
ORDER BY tstamp LIMIT 1
|
||||
SELECT
|
||||
COALESCE(
|
||||
((lead(ts0) OVER (ORDER BY sequence)) - ts1),
|
||||
make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer)
|
||||
)
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
fsp = COALESCE(e.point, fsp),
|
||||
ts0 = COALESCE(e.tstamp, ts0)
|
||||
FROM e
|
||||
WHERE planned_lines.sequence = _last_sequence.sequence;
|
||||
INTO _lag
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
-- Shot interval
|
||||
_shotinterval := (_last_sequence.ts1 - _last_sequence.ts0) / abs(_last_sequence.lsp - _last_sequence.fsp);
|
||||
_incr = sign(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE 'Estimating EOL from current shot interval: %', _shotinterval;
|
||||
RAISE NOTICE '_planner_config: %', _planner_config;
|
||||
RAISE NOTICE '_last_sequence: %', _last_sequence;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
RAISE NOTICE '_incr: %', _incr;
|
||||
|
||||
SELECT (abs(lsp-fsp) * _shotinterval + ts0) - ts1
|
||||
INTO _deltatime
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
-- Does the latest sequence match a planned sequence?
|
||||
IF _planned_line IS NULL THEN -- No it doesn't
|
||||
RAISE NOTICE 'Latest sequence shot does not match a planned sequence';
|
||||
SELECT * INTO _planned_line FROM planned_lines ORDER BY sequence ASC LIMIT 1;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
|
||||
---- Set ts1 for the current sequence
|
||||
--UPDATE planned_lines
|
||||
--SET
|
||||
--ts1 = (abs(lsp-fsp) * _shotinterval) + ts0
|
||||
--WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Adjustment is %', _deltatime;
|
||||
|
||||
IF abs(EXTRACT(EPOCH FROM _deltatime)) < 8 THEN
|
||||
RAISE NOTICE 'Adjustment too small (< 8 s), so not applying it';
|
||||
RETURN;
|
||||
IF _planned_line.sequence <= _last_sequence.sequence THEN
|
||||
RAISE NOTICE 'Renumbering the planned sequences starting from %', _planned_line.sequence + 1;
|
||||
-- Renumber the planned sequences starting from last shot sequence number + 1
|
||||
UPDATE planned_lines
|
||||
SET sequence = sequence + _last_sequence.sequence - _planned_line.sequence + 1;
|
||||
END IF;
|
||||
|
||||
-- Adjust ts1 for the current sequence
|
||||
UPDATE planned_lines
|
||||
SET ts1 = ts1 + _deltatime
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
-- The correction to make to the first planned line's ts0 will be based on either the last
|
||||
-- sequence's EOL + default line change time or the current time, whichever is later.
|
||||
_deltatime := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1) + make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer), current_timestamp) - _planned_line.ts0;
|
||||
|
||||
-- Now shift all sequences after
|
||||
UPDATE planned_lines
|
||||
SET ts0 = ts0 + _deltatime, ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _last_sequence.sequence;
|
||||
-- Is the first of the planned lines start time in the past? (±5 mins)
|
||||
IF _planned_line.ts0 < (current_timestamp - make_interval(mins => 5)) THEN
|
||||
RAISE NOTICE 'First planned line is in the past. Adjusting times by %', _deltatime;
|
||||
-- Adjust the start / end time of the planned lines by assuming that we are at
|
||||
-- `defaultLineChangeDuration` minutes away from SOL of the first planned line.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime;
|
||||
END IF;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences before %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence < _last_sequence.sequence;
|
||||
ELSE -- Yes it does
|
||||
RAISE NOTICE 'Latest sequence does match a planned sequence: %, %', _planned_line.sequence, _planned_line.line;
|
||||
|
||||
ELSE
|
||||
-- No it isn't
|
||||
RAISE NOTICE 'Sequence % is offline', _last_sequence.sequence;
|
||||
-- Is it online?
|
||||
IF EXISTS(SELECT 1 FROM raw_lines_files WHERE sequence = _last_sequence.sequence AND hash = '*online*') THEN
|
||||
-- Yes it is
|
||||
RAISE NOTICE 'Sequence % is online', _last_sequence.sequence;
|
||||
|
||||
-- We were supposed to finish at _planned_line.ts1 but we finished at:
|
||||
_tstamp := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1), current_timestamp);
|
||||
-- WARNING Next line is for testing only
|
||||
--_tstamp := COALESCE(_last_sequence.ts1_final, _last_sequence.ts1);
|
||||
-- So we need to adjust timestamps by:
|
||||
_deltatime := _tstamp - _planned_line.ts1;
|
||||
-- Let us get the SOL from the events log if we can
|
||||
RAISE NOTICE 'Trying to set fsp, ts0 from events log FSP, FGSP';
|
||||
WITH e AS (
|
||||
SELECT * FROM event_log
|
||||
WHERE
|
||||
sequence = _last_sequence.sequence
|
||||
AND ('FSP' = ANY(labels) OR 'FGSP' = ANY(labels))
|
||||
ORDER BY tstamp LIMIT 1
|
||||
)
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
fsp = COALESCE(e.point, fsp),
|
||||
ts0 = COALESCE(e.tstamp, ts0)
|
||||
FROM e
|
||||
WHERE planned_lines.sequence = _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Planned end: %, actual end: % (%, %)', _planned_line.ts1, _tstamp, _planned_line.sequence, _last_sequence.sequence;
|
||||
RAISE NOTICE 'Shifting times by % for sequences > %', _deltatime, _planned_line.sequence;
|
||||
-- NOTE: This won't work if sequences are not, err… sequential.
|
||||
-- NOTE: This has been known to happen in 2020.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _planned_line.sequence;
|
||||
-- Shot interval
|
||||
_shotinterval := (_last_sequence.ts1 - _last_sequence.ts0) / abs(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences up to %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence <= _last_sequence.sequence;
|
||||
RAISE NOTICE 'Estimating EOL from current shot interval: %', _shotinterval;
|
||||
|
||||
SELECT (abs(lsp-fsp) * _shotinterval + ts0) - ts1
|
||||
INTO _deltatime
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
---- Set ts1 for the current sequence
|
||||
--UPDATE planned_lines
|
||||
--SET
|
||||
--ts1 = (abs(lsp-fsp) * _shotinterval) + ts0
|
||||
--WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Adjustment is %', _deltatime;
|
||||
|
||||
IF abs(EXTRACT(EPOCH FROM _deltatime)) < 8 THEN
|
||||
RAISE NOTICE 'Adjustment too small (< 8 s), so not applying it';
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- Adjust ts1 for the current sequence
|
||||
UPDATE planned_lines
|
||||
SET ts1 = ts1 + _deltatime
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
-- Now shift all sequences after
|
||||
UPDATE planned_lines
|
||||
SET ts0 = ts0 + _deltatime, ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences before %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence < _last_sequence.sequence;
|
||||
|
||||
ELSE
|
||||
-- No it isn't
|
||||
RAISE NOTICE 'Sequence % is offline', _last_sequence.sequence;
|
||||
|
||||
-- We were supposed to finish at _planned_line.ts1 but we finished at:
|
||||
_tstamp := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1), current_timestamp);
|
||||
-- WARNING Next line is for testing only
|
||||
--_tstamp := COALESCE(_last_sequence.ts1_final, _last_sequence.ts1);
|
||||
-- So we need to adjust timestamps by:
|
||||
_deltatime := _tstamp - _planned_line.ts1;
|
||||
|
||||
RAISE NOTICE 'Planned end: %, actual end: % (%, %)', _planned_line.ts1, _tstamp, _planned_line.sequence, _last_sequence.sequence;
|
||||
RAISE NOTICE 'Shifting times by % for sequences > %', _deltatime, _planned_line.sequence;
|
||||
-- NOTE: This won't work if sequences are not, err… sequential.
|
||||
-- NOTE: This has been known to happen in 2020.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _planned_line.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences up to %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence <= _last_sequence.sequence;
|
||||
|
||||
END IF;
|
||||
|
||||
END IF;
|
||||
|
||||
END IF;
|
||||
END;
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
@@ -367,8 +365,8 @@ COMMENT ON PROCEDURE _SURVEY__TEMPLATE_.augment_event_data(IN maxspan numeric) I
|
||||
CREATE FUNCTION _SURVEY__TEMPLATE_.binning_parameters() RETURNS jsonb
|
||||
LANGUAGE sql STABLE LEAKPROOF PARALLEL SAFE
|
||||
AS $$
|
||||
SELECT data->'binning' binning FROM file_data WHERE data->>'binning' IS NOT NULL LIMIT 1;
|
||||
$$;
|
||||
SELECT project_configuration()->'binning' binning;
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION _SURVEY__TEMPLATE_.binning_parameters() OWNER TO postgres;
|
||||
@@ -401,6 +399,62 @@ $$;
|
||||
|
||||
ALTER FUNCTION _SURVEY__TEMPLATE_.clear_shot_qc() OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: event_log_uid_seq; Type: SEQUENCE; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE SEQUENCE _SURVEY__TEMPLATE_.event_log_uid_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
ALTER TABLE _SURVEY__TEMPLATE_.event_log_uid_seq OWNER TO postgres;
|
||||
|
||||
SET default_tablespace = '';
|
||||
|
||||
SET default_table_access_method = heap;
|
||||
|
||||
--
|
||||
-- Name: event_log_full; Type: TABLE; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE _SURVEY__TEMPLATE_.event_log_full (
|
||||
uid integer DEFAULT nextval('_SURVEY__TEMPLATE_.event_log_uid_seq'::regclass) NOT NULL,
|
||||
id integer NOT NULL,
|
||||
tstamp timestamp with time zone,
|
||||
sequence integer,
|
||||
point integer,
|
||||
remarks text DEFAULT ''::text NOT NULL,
|
||||
labels text[] DEFAULT ARRAY[]::text[] NOT NULL,
|
||||
meta jsonb DEFAULT '{}'::jsonb NOT NULL,
|
||||
validity tstzrange NOT NULL,
|
||||
CONSTRAINT event_log_full_check CHECK ((((tstamp IS NOT NULL) AND (sequence IS NOT NULL) AND (point IS NOT NULL)) OR ((tstamp IS NOT NULL) AND (sequence IS NULL) AND (point IS NULL)) OR ((tstamp IS NULL) AND (sequence IS NOT NULL) AND (point IS NOT NULL)))),
|
||||
CONSTRAINT event_log_full_validity_check CHECK ((NOT isempty(validity)))
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE _SURVEY__TEMPLATE_.event_log_full OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: event_log_changes(timestamp with time zone); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION _SURVEY__TEMPLATE_.event_log_changes(ts0 timestamp with time zone) RETURNS SETOF _SURVEY__TEMPLATE_.event_log_full
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
SELECT *
|
||||
FROM event_log_full
|
||||
WHERE lower(validity) > ts0 OR upper(validity) IS NOT NULL AND upper(validity) > ts0
|
||||
ORDER BY lower(validity);
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION _SURVEY__TEMPLATE_.event_log_changes(ts0 timestamp with time zone) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: event_log_full_insert(); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
@@ -883,46 +937,6 @@ $$;
|
||||
|
||||
ALTER FUNCTION _SURVEY__TEMPLATE_.ij_error(line double precision, point double precision, geom public.geometry) OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: event_log_uid_seq; Type: SEQUENCE; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE SEQUENCE _SURVEY__TEMPLATE_.event_log_uid_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
ALTER TABLE _SURVEY__TEMPLATE_.event_log_uid_seq OWNER TO postgres;
|
||||
|
||||
SET default_tablespace = '';
|
||||
|
||||
SET default_table_access_method = heap;
|
||||
|
||||
--
|
||||
-- Name: event_log_full; Type: TABLE; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE _SURVEY__TEMPLATE_.event_log_full (
|
||||
uid integer DEFAULT nextval('_SURVEY__TEMPLATE_.event_log_uid_seq'::regclass) NOT NULL,
|
||||
id integer NOT NULL,
|
||||
tstamp timestamp with time zone,
|
||||
sequence integer,
|
||||
point integer,
|
||||
remarks text DEFAULT ''::text NOT NULL,
|
||||
labels text[] DEFAULT ARRAY[]::text[] NOT NULL,
|
||||
meta jsonb DEFAULT '{}'::jsonb NOT NULL,
|
||||
validity tstzrange NOT NULL,
|
||||
CONSTRAINT event_log_full_check CHECK ((((tstamp IS NOT NULL) AND (sequence IS NOT NULL) AND (point IS NOT NULL)) OR ((tstamp IS NOT NULL) AND (sequence IS NULL) AND (point IS NULL)) OR ((tstamp IS NULL) AND (sequence IS NOT NULL) AND (point IS NOT NULL)))),
|
||||
CONSTRAINT event_log_full_validity_check CHECK ((NOT isempty(validity)))
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE _SURVEY__TEMPLATE_.event_log_full OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: event_log; Type: VIEW; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
@@ -1042,6 +1056,39 @@ ALTER PROCEDURE _SURVEY__TEMPLATE_.log_midnight_shots(IN dt0 date, IN dt1 date)
|
||||
COMMENT ON PROCEDURE _SURVEY__TEMPLATE_.log_midnight_shots(IN dt0 date, IN dt1 date) IS 'Add midnight shots between two dates dt0 and dt1 to the event_log, unless the events already exist.';
|
||||
|
||||
|
||||
--
|
||||
-- Name: project_configuration(); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE FUNCTION _SURVEY__TEMPLATE_.project_configuration() RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
schema_name text;
|
||||
configuration jsonb;
|
||||
BEGIN
|
||||
|
||||
SELECT nspname
|
||||
INTO schema_name
|
||||
FROM pg_namespace
|
||||
WHERE oid = (
|
||||
SELECT pronamespace
|
||||
FROM pg_proc
|
||||
WHERE oid = 'project_configuration'::regproc::oid
|
||||
);
|
||||
|
||||
SELECT meta
|
||||
INTO configuration
|
||||
FROM public.projects
|
||||
WHERE schema = schema_name;
|
||||
|
||||
RETURN configuration;
|
||||
END
|
||||
$$;
|
||||
|
||||
|
||||
ALTER FUNCTION _SURVEY__TEMPLATE_.project_configuration() OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: replace_placeholders(text, timestamp with time zone, integer, integer); Type: FUNCTION; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
@@ -2046,10 +2093,10 @@ CREATE VIEW _SURVEY__TEMPLATE_.preplot_summary AS
|
||||
ALTER TABLE _SURVEY__TEMPLATE_.preplot_summary OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: project_summary; Type: VIEW; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
-- Name: project_summary; Type: MATERIALIZED VIEW; Schema: _SURVEY__TEMPLATE_; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE VIEW _SURVEY__TEMPLATE_.project_summary AS
|
||||
CREATE MATERIALIZED VIEW _SURVEY__TEMPLATE_.project_summary AS
|
||||
WITH fls AS (
|
||||
SELECT avg((final_lines_summary.duration / ((final_lines_summary.num_points - 1))::double precision)) AS shooting_rate,
|
||||
avg((final_lines_summary.length / date_part('epoch'::text, final_lines_summary.duration))) AS speed,
|
||||
@@ -2092,7 +2139,8 @@ CREATE VIEW _SURVEY__TEMPLATE_.project_summary AS
|
||||
fls.speed AS shooting_rate
|
||||
FROM _SURVEY__TEMPLATE_.preplot_summary ps,
|
||||
fls,
|
||||
project;
|
||||
project
|
||||
WITH NO DATA;
|
||||
|
||||
|
||||
ALTER TABLE _SURVEY__TEMPLATE_.project_summary OWNER TO postgres;
|
||||
|
||||
162
etc/db/upgrades/upgrade26-v0.3.13-fix-missing-shots-summary.sql
Normal file
162
etc/db/upgrades/upgrade26-v0.3.13-fix-missing-shots-summary.sql
Normal file
@@ -0,0 +1,162 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.3.13
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- Fixes a bug in the `final_lines_summary` and `raw_lines_summary` views
|
||||
-- which results in the number of missing shots being miscounted on jobs
|
||||
-- using three sources.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
|
||||
CREATE OR REPLACE VIEW raw_lines_summary AS
|
||||
WITH summary AS (
|
||||
SELECT DISTINCT rs.sequence,
|
||||
first_value(rs.point) OVER w AS fsp,
|
||||
last_value(rs.point) OVER w AS lsp,
|
||||
first_value(rs.tstamp) OVER w AS ts0,
|
||||
last_value(rs.tstamp) OVER w AS ts1,
|
||||
count(rs.point) OVER w AS num_points,
|
||||
count(pp.point) OVER w AS num_preplots,
|
||||
public.st_distance(first_value(rs.geometry) OVER w, last_value(rs.geometry) OVER w) AS length,
|
||||
((public.st_azimuth(first_value(rs.geometry) OVER w, last_value(rs.geometry) OVER w) * (180)::double precision) / pi()) AS azimuth
|
||||
FROM (raw_shots rs
|
||||
LEFT JOIN preplot_points pp USING (line, point))
|
||||
WINDOW w AS (PARTITION BY rs.sequence ORDER BY rs.tstamp ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
|
||||
)
|
||||
SELECT rl.sequence,
|
||||
rl.line,
|
||||
s.fsp,
|
||||
s.lsp,
|
||||
s.ts0,
|
||||
s.ts1,
|
||||
(s.ts1 - s.ts0) AS duration,
|
||||
s.num_points,
|
||||
s.num_preplots,
|
||||
(SELECT count(*) AS count
|
||||
FROM missing_sequence_raw_points
|
||||
WHERE missing_sequence_raw_points.sequence = s.sequence) AS missing_shots,
|
||||
s.length,
|
||||
s.azimuth,
|
||||
rl.remarks,
|
||||
rl.ntbp,
|
||||
rl.meta
|
||||
FROM (summary s
|
||||
JOIN raw_lines rl USING (sequence));
|
||||
|
||||
|
||||
CREATE OR REPLACE VIEW final_lines_summary AS
|
||||
WITH summary AS (
|
||||
SELECT DISTINCT fs.sequence,
|
||||
first_value(fs.point) OVER w AS fsp,
|
||||
last_value(fs.point) OVER w AS lsp,
|
||||
first_value(fs.tstamp) OVER w AS ts0,
|
||||
last_value(fs.tstamp) OVER w AS ts1,
|
||||
count(fs.point) OVER w AS num_points,
|
||||
public.st_distance(first_value(fs.geometry) OVER w, last_value(fs.geometry) OVER w) AS length,
|
||||
((public.st_azimuth(first_value(fs.geometry) OVER w, last_value(fs.geometry) OVER w) * (180)::double precision) / pi()) AS azimuth
|
||||
FROM final_shots fs
|
||||
WINDOW w AS (PARTITION BY fs.sequence ORDER BY fs.tstamp ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
|
||||
)
|
||||
SELECT fl.sequence,
|
||||
fl.line,
|
||||
s.fsp,
|
||||
s.lsp,
|
||||
s.ts0,
|
||||
s.ts1,
|
||||
(s.ts1 - s.ts0) AS duration,
|
||||
s.num_points,
|
||||
( SELECT count(*) AS count
|
||||
FROM missing_sequence_final_points
|
||||
WHERE missing_sequence_final_points.sequence = s.sequence) AS missing_shots,
|
||||
s.length,
|
||||
s.azimuth,
|
||||
fl.remarks,
|
||||
fl.meta
|
||||
FROM (summary s
|
||||
JOIN final_lines fl USING (sequence));
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.3.13' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.3.12' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.3.13"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.3.13"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,122 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.4.0
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This adapts the schema to the change in how project configurations are
|
||||
-- handled (https://gitlab.com/wgp/dougal/software/-/merge_requests/29)
|
||||
-- by creating a project_configuration() function which returns the
|
||||
-- current project's configuration data.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION project_configuration()
|
||||
RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
schema_name text;
|
||||
configuration jsonb;
|
||||
BEGIN
|
||||
|
||||
SELECT nspname
|
||||
INTO schema_name
|
||||
FROM pg_namespace
|
||||
WHERE oid = (
|
||||
SELECT pronamespace
|
||||
FROM pg_proc
|
||||
WHERE oid = 'project_configuration'::regproc::oid
|
||||
);
|
||||
|
||||
SELECT meta
|
||||
INTO configuration
|
||||
FROM public.projects
|
||||
WHERE schema = schema_name;
|
||||
|
||||
RETURN configuration;
|
||||
END
|
||||
$$;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.3.12' AND current_db_version != '0.3.13' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.0"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.0"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,264 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.4.1
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This modifies adjust_planner() to use project_configuration()
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
|
||||
CREATE OR REPLACE PROCEDURE adjust_planner()
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
_planner_config jsonb;
|
||||
_planned_line planned_lines%ROWTYPE;
|
||||
_lag interval;
|
||||
_last_sequence sequences_summary%ROWTYPE;
|
||||
_deltatime interval;
|
||||
_shotinterval interval;
|
||||
_tstamp timestamptz;
|
||||
_incr integer;
|
||||
BEGIN
|
||||
|
||||
SET CONSTRAINTS planned_lines_pkey DEFERRED;
|
||||
|
||||
SELECT project_configuration()->'planner'
|
||||
INTO _planner_config;
|
||||
|
||||
SELECT *
|
||||
INTO _last_sequence
|
||||
FROM sequences_summary
|
||||
ORDER BY sequence DESC
|
||||
LIMIT 1;
|
||||
|
||||
SELECT *
|
||||
INTO _planned_line
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
SELECT
|
||||
COALESCE(
|
||||
((lead(ts0) OVER (ORDER BY sequence)) - ts1),
|
||||
make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer)
|
||||
)
|
||||
INTO _lag
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence AND line = _last_sequence.line;
|
||||
|
||||
_incr = sign(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE '_planner_config: %', _planner_config;
|
||||
RAISE NOTICE '_last_sequence: %', _last_sequence;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
RAISE NOTICE '_incr: %', _incr;
|
||||
|
||||
-- Does the latest sequence match a planned sequence?
|
||||
IF _planned_line IS NULL THEN -- No it doesn't
|
||||
RAISE NOTICE 'Latest sequence shot does not match a planned sequence';
|
||||
SELECT * INTO _planned_line FROM planned_lines ORDER BY sequence ASC LIMIT 1;
|
||||
RAISE NOTICE '_planned_line: %', _planned_line;
|
||||
|
||||
IF _planned_line.sequence <= _last_sequence.sequence THEN
|
||||
RAISE NOTICE 'Renumbering the planned sequences starting from %', _planned_line.sequence + 1;
|
||||
-- Renumber the planned sequences starting from last shot sequence number + 1
|
||||
UPDATE planned_lines
|
||||
SET sequence = sequence + _last_sequence.sequence - _planned_line.sequence + 1;
|
||||
END IF;
|
||||
|
||||
-- The correction to make to the first planned line's ts0 will be based on either the last
|
||||
-- sequence's EOL + default line change time or the current time, whichever is later.
|
||||
_deltatime := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1) + make_interval(mins => (_planner_config->>'defaultLineChangeDuration')::integer), current_timestamp) - _planned_line.ts0;
|
||||
|
||||
-- Is the first of the planned lines start time in the past? (±5 mins)
|
||||
IF _planned_line.ts0 < (current_timestamp - make_interval(mins => 5)) THEN
|
||||
RAISE NOTICE 'First planned line is in the past. Adjusting times by %', _deltatime;
|
||||
-- Adjust the start / end time of the planned lines by assuming that we are at
|
||||
-- `defaultLineChangeDuration` minutes away from SOL of the first planned line.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime;
|
||||
END IF;
|
||||
|
||||
ELSE -- Yes it does
|
||||
RAISE NOTICE 'Latest sequence does match a planned sequence: %, %', _planned_line.sequence, _planned_line.line;
|
||||
|
||||
-- Is it online?
|
||||
IF EXISTS(SELECT 1 FROM raw_lines_files WHERE sequence = _last_sequence.sequence AND hash = '*online*') THEN
|
||||
-- Yes it is
|
||||
RAISE NOTICE 'Sequence % is online', _last_sequence.sequence;
|
||||
|
||||
-- Let us get the SOL from the events log if we can
|
||||
RAISE NOTICE 'Trying to set fsp, ts0 from events log FSP, FGSP';
|
||||
WITH e AS (
|
||||
SELECT * FROM event_log
|
||||
WHERE
|
||||
sequence = _last_sequence.sequence
|
||||
AND ('FSP' = ANY(labels) OR 'FGSP' = ANY(labels))
|
||||
ORDER BY tstamp LIMIT 1
|
||||
)
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
fsp = COALESCE(e.point, fsp),
|
||||
ts0 = COALESCE(e.tstamp, ts0)
|
||||
FROM e
|
||||
WHERE planned_lines.sequence = _last_sequence.sequence;
|
||||
|
||||
-- Shot interval
|
||||
_shotinterval := (_last_sequence.ts1 - _last_sequence.ts0) / abs(_last_sequence.lsp - _last_sequence.fsp);
|
||||
|
||||
RAISE NOTICE 'Estimating EOL from current shot interval: %', _shotinterval;
|
||||
|
||||
SELECT (abs(lsp-fsp) * _shotinterval + ts0) - ts1
|
||||
INTO _deltatime
|
||||
FROM planned_lines
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
---- Set ts1 for the current sequence
|
||||
--UPDATE planned_lines
|
||||
--SET
|
||||
--ts1 = (abs(lsp-fsp) * _shotinterval) + ts0
|
||||
--WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Adjustment is %', _deltatime;
|
||||
|
||||
IF abs(EXTRACT(EPOCH FROM _deltatime)) < 8 THEN
|
||||
RAISE NOTICE 'Adjustment too small (< 8 s), so not applying it';
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- Adjust ts1 for the current sequence
|
||||
UPDATE planned_lines
|
||||
SET ts1 = ts1 + _deltatime
|
||||
WHERE sequence = _last_sequence.sequence;
|
||||
|
||||
-- Now shift all sequences after
|
||||
UPDATE planned_lines
|
||||
SET ts0 = ts0 + _deltatime, ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _last_sequence.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences before %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence < _last_sequence.sequence;
|
||||
|
||||
ELSE
|
||||
-- No it isn't
|
||||
RAISE NOTICE 'Sequence % is offline', _last_sequence.sequence;
|
||||
|
||||
-- We were supposed to finish at _planned_line.ts1 but we finished at:
|
||||
_tstamp := GREATEST(COALESCE(_last_sequence.ts1_final, _last_sequence.ts1), current_timestamp);
|
||||
-- WARNING Next line is for testing only
|
||||
--_tstamp := COALESCE(_last_sequence.ts1_final, _last_sequence.ts1);
|
||||
-- So we need to adjust timestamps by:
|
||||
_deltatime := _tstamp - _planned_line.ts1;
|
||||
|
||||
RAISE NOTICE 'Planned end: %, actual end: % (%, %)', _planned_line.ts1, _tstamp, _planned_line.sequence, _last_sequence.sequence;
|
||||
RAISE NOTICE 'Shifting times by % for sequences > %', _deltatime, _planned_line.sequence;
|
||||
-- NOTE: This won't work if sequences are not, err… sequential.
|
||||
-- NOTE: This has been known to happen in 2020.
|
||||
UPDATE planned_lines
|
||||
SET
|
||||
ts0 = ts0 + _deltatime,
|
||||
ts1 = ts1 + _deltatime
|
||||
WHERE sequence > _planned_line.sequence;
|
||||
|
||||
RAISE NOTICE 'Deleting planned sequences up to %', _planned_line.sequence;
|
||||
-- Remove all previous planner entries.
|
||||
DELETE
|
||||
FROM planned_lines
|
||||
WHERE sequence <= _last_sequence.sequence;
|
||||
|
||||
END IF;
|
||||
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.1"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.1"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,98 @@
|
||||
-- Fix wrong number of missing shots in summary views
|
||||
--
|
||||
-- New schema version: 0.4.2
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This modifies binning_parameters() to use project_configuration()
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION binning_parameters() RETURNS jsonb
|
||||
LANGUAGE sql STABLE LEAKPROOF PARALLEL SAFE
|
||||
AS $$
|
||||
SELECT project_configuration()->'binning' binning;
|
||||
$$;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.2"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.2"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
164
etc/db/upgrades/upgrade30-v0.4.3-large-notification-payloads.sql
Normal file
164
etc/db/upgrades/upgrade30-v0.4.3-large-notification-payloads.sql
Normal file
@@ -0,0 +1,164 @@
|
||||
-- Support notification payloads larger than Postgres' NOTIFY limit.
|
||||
--
|
||||
-- New schema version: 0.4.3
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects the public schema only.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This creates a new table where large notification payloads are stored
|
||||
-- temporarily and from which they might be recalled by the notification
|
||||
-- listeners. It also creates a purge_notifications() procedure used to
|
||||
-- clean up old notifications from the notifications log and finally,
|
||||
-- modifies notify() to support these changes. When a large payload is
|
||||
-- encountered, the payload is stored in the notify_payloads table and
|
||||
-- a trimmed down version containing a notification_id is sent to listeners
|
||||
-- instead. Listeners can then query notify_payloads to retrieve the full
|
||||
-- payloads. It is the application layer's responsibility to delete old
|
||||
-- notifications.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_schema () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating public schema';
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO public');
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.notify_payloads (
|
||||
id SERIAL,
|
||||
tstamp timestamptz NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
payload text NOT NULL DEFAULT '',
|
||||
PRIMARY KEY (id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS notify_payload_tstamp ON notify_payloads (tstamp);
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.notify() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
channel text := TG_ARGV[0];
|
||||
pid text;
|
||||
payload text;
|
||||
notification text;
|
||||
payload_id integer;
|
||||
BEGIN
|
||||
|
||||
SELECT projects.pid INTO pid FROM projects WHERE schema = TG_TABLE_SCHEMA;
|
||||
|
||||
payload := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'old', row_to_json(OLD),
|
||||
'new', row_to_json(NEW),
|
||||
'pid', pid
|
||||
)::text;
|
||||
|
||||
IF octet_length(payload) < 1000 THEN
|
||||
PERFORM pg_notify(channel, payload);
|
||||
ELSE
|
||||
-- We need to find another solution
|
||||
-- FIXME Consider storing the payload in a temporary memory table,
|
||||
-- referenced by some form of autogenerated ID. Then send the ID
|
||||
-- as the payload and then it's up to the user to fetch the original
|
||||
-- payload if interested. This needs a mechanism to expire older payloads
|
||||
-- in the interest of conserving memory.
|
||||
|
||||
INSERT INTO notify_payloads (payload) VALUES (payload) RETURNING id INTO payload_id;
|
||||
|
||||
notification := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'pid', pid,
|
||||
'payload_id', payload_id
|
||||
)::text;
|
||||
|
||||
PERFORM pg_notify(channel, notification);
|
||||
RAISE INFO 'Payload over limit';
|
||||
END IF;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$;
|
||||
|
||||
CREATE PROCEDURE public.purge_notifications (age_seconds numeric DEFAULT 120) AS $$
|
||||
DELETE FROM notify_payloads WHERE EXTRACT(epoch FROM CURRENT_TIMESTAMP - tstamp) > age_seconds;
|
||||
$$ LANGUAGE sql;
|
||||
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
-- This upgrade modified the `public` schema only, not individual
|
||||
-- project schemas.
|
||||
CALL pg_temp.upgrade_schema();
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_schema ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.3"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.3"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,104 @@
|
||||
-- Add event_log_changes function
|
||||
--
|
||||
-- New schema version: 0.4.4
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This adds a function event_log_changes which returns the subset of
|
||||
-- events from event_log_full which have been modified on or after a
|
||||
-- given timestamp.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE FUNCTION event_log_changes(ts0 timestamptz)
|
||||
RETURNS SETOF event_log_full
|
||||
LANGUAGE sql
|
||||
AS $$
|
||||
SELECT *
|
||||
FROM event_log_full
|
||||
WHERE lower(validity) > ts0 OR upper(validity) IS NOT NULL AND upper(validity) > ts0
|
||||
ORDER BY lower(validity);
|
||||
$$;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.4"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.4"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,147 @@
|
||||
-- Turn project_summary into a materialised view
|
||||
--
|
||||
-- New schema version: 0.4.5
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- The project_summary view is quite a bottleneck. While it itself is
|
||||
-- not the real culprit (rather the underlying views are), this is one
|
||||
-- relatively cheap way of improving responsiveness from the client's
|
||||
-- point of view.
|
||||
-- We leave the details of how / when to refresh the view to the non-
|
||||
-- database code.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
DROP VIEW project_summary;
|
||||
|
||||
CREATE MATERIALIZED VIEW project_summary AS
|
||||
WITH fls AS (
|
||||
SELECT
|
||||
avg((final_lines_summary.duration / ((final_lines_summary.num_points - 1))::double precision)) AS shooting_rate,
|
||||
avg((final_lines_summary.length / date_part('epoch'::text, final_lines_summary.duration))) AS speed,
|
||||
sum(final_lines_summary.duration) AS prod_duration,
|
||||
sum(final_lines_summary.length) AS prod_distance
|
||||
FROM final_lines_summary
|
||||
), project AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
p.name,
|
||||
p.schema
|
||||
FROM public.projects p
|
||||
WHERE (split_part(current_setting('search_path'::text), ','::text, 1) = p.schema)
|
||||
)
|
||||
SELECT
|
||||
project.pid,
|
||||
project.name,
|
||||
project.schema,
|
||||
( SELECT count(*) AS count
|
||||
FROM preplot_lines
|
||||
WHERE (preplot_lines.class = 'V'::bpchar)) AS lines,
|
||||
ps.total,
|
||||
ps.virgin,
|
||||
ps.prime,
|
||||
ps.other,
|
||||
ps.ntba,
|
||||
ps.remaining,
|
||||
( SELECT to_json(fs.*) AS to_json
|
||||
FROM final_shots fs
|
||||
ORDER BY fs.tstamp
|
||||
LIMIT 1) AS fsp,
|
||||
( SELECT to_json(fs.*) AS to_json
|
||||
FROM final_shots fs
|
||||
ORDER BY fs.tstamp DESC
|
||||
LIMIT 1) AS lsp,
|
||||
( SELECT count(*) AS count
|
||||
FROM raw_lines rl) AS seq_raw,
|
||||
( SELECT count(*) AS count
|
||||
FROM final_lines rl) AS seq_final,
|
||||
fls.prod_duration,
|
||||
fls.prod_distance,
|
||||
fls.speed AS shooting_rate
|
||||
FROM preplot_summary ps,
|
||||
fls,
|
||||
project;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.4.5' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.4.5"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.4.5"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
164
etc/db/upgrades/upgrade33-v0.5.0-sailline-ancillary-data.sql
Normal file
164
etc/db/upgrades/upgrade33-v0.5.0-sailline-ancillary-data.sql
Normal file
@@ -0,0 +1,164 @@
|
||||
-- Sailline ancillary data
|
||||
--
|
||||
-- New schema version: 0.5.0
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- Issue #264 calls for associating sail and acquisition lines as well
|
||||
-- as indicating expected acquisition direction, and other data which
|
||||
-- cannot be provided via standard import formats such as SPS or P1/90.
|
||||
--
|
||||
-- We support this via an additional table that holds most of the required
|
||||
-- data. This data can simply be inferred from regular preplots, e.g., line
|
||||
-- direction can be deduced from preplot point order, and sail / source
|
||||
-- line offsets can be taken from P1/90 headers or from a configuration
|
||||
-- parameter. Alternatively, and in preference, the data can be provided
|
||||
-- explicitly, which is what issue #264 asks for.
|
||||
--
|
||||
-- In principle, this makes at least some of the attributes of `preplot_lines`
|
||||
-- redundant (at least `incr` and `ntba`) but we will leave them there for
|
||||
-- the time being as technical debt.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS preplot_saillines
|
||||
(
|
||||
sailline integer NOT NULL,
|
||||
line integer NOT NULL,
|
||||
sailline_class character(1) NOT NULL,
|
||||
line_class character(1) NOT NULL,
|
||||
incr boolean NOT NULL DEFAULT true,
|
||||
ntba boolean NOT NULL DEFAULT false,
|
||||
remarks text NOT NULL DEFAULT '',
|
||||
meta jsonb NOT NULL DEFAULT '{}'::jsonb,
|
||||
hash text NULL, -- Theoretically the info in this table could all be inferred.
|
||||
PRIMARY KEY (sailline, sailline_class, line, line_class, incr),
|
||||
CONSTRAINT fk_sailline FOREIGN KEY (sailline, sailline_class)
|
||||
REFERENCES preplot_lines (line, class)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT fk_line FOREIGN KEY (line, line_class)
|
||||
REFERENCES preplot_lines (line, class)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT fk_hash FOREIGN KEY (hash)
|
||||
REFERENCES files (hash) MATCH SIMPLE
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE,
|
||||
CHECK (sailline_class = 'V' AND sailline_class != line_class)
|
||||
);
|
||||
|
||||
COMMENT ON TABLE preplot_saillines
|
||||
IS 'We explicitly associate each preplot sailline (aka vessel line) with zero or more source lines. This information can be inferred from preplot files, e.g., via a sailline offset value, or explicitly provided.';
|
||||
|
||||
-- Let us copy whatever information we can from existing tables or views
|
||||
|
||||
INSERT INTO preplot_saillines
|
||||
(sailline, line, sailline_class, line_class, incr, ntba, remarks, meta)
|
||||
SELECT DISTINCT
|
||||
sailline, psp.line, 'V' sailline_class, psp.class line_class, pl.incr, pl.ntba, pl.remarks, pl.meta
|
||||
FROM preplot_saillines_points psp
|
||||
INNER JOIN preplot_lines pl ON psp.sailline = pl.line AND pl.class = 'V'
|
||||
ORDER BY sailline
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- We need to recreate the preplot_saillines_points view
|
||||
|
||||
CREATE OR REPLACE VIEW preplot_saillines_points AS
|
||||
SELECT psl.sailline,
|
||||
psl.ntba AS sailline_ntba,
|
||||
psl.line,
|
||||
pps.point,
|
||||
pps.class,
|
||||
pps.ntba,
|
||||
pps.geometry,
|
||||
pps.meta
|
||||
FROM preplot_saillines psl
|
||||
INNER JOIN preplot_points pps
|
||||
ON psl.line = pps.line AND psl.line_class = pps.class;
|
||||
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.5.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.4.5' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.5.0"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.5.0"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
119
etc/db/upgrades/upgrade34-v0.5.1-fix-sequences-detail-view.sql
Normal file
119
etc/db/upgrades/upgrade34-v0.5.1-fix-sequences-detail-view.sql
Normal file
@@ -0,0 +1,119 @@
|
||||
-- Sailline ancillary data
|
||||
--
|
||||
-- New schema version: 0.5.1
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- The sequences_detail view wrongly associates source lines and shot
|
||||
-- points when it should be associating saillines and shot points instead.
|
||||
--
|
||||
-- This updates fixes that issue (#307).
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE VIEW sequences_detail
|
||||
AS
|
||||
SELECT rl.sequence,
|
||||
rl.line AS sailline,
|
||||
rs.line,
|
||||
rs.point,
|
||||
rs.tstamp,
|
||||
rs.objref AS objrefraw,
|
||||
fs.objref AS objreffinal,
|
||||
st_transform(pp.geometry, 4326) AS geometrypreplot,
|
||||
st_transform(rs.geometry, 4326) AS geometryraw,
|
||||
st_transform(fs.geometry, 4326) AS geometryfinal,
|
||||
ij_error(rs.line::double precision, rs.point::double precision, rs.geometry) AS errorraw,
|
||||
ij_error(rs.line::double precision, rs.point::double precision, fs.geometry) AS errorfinal,
|
||||
json_build_object('preplot', pp.meta, 'raw', rs.meta, 'final', fs.meta) AS meta
|
||||
FROM raw_lines rl
|
||||
INNER JOIN preplot_saillines psl ON rl.line = psl.sailline
|
||||
INNER JOIN raw_shots rs ON rs.sequence = rl.sequence AND rs.line = psl.line
|
||||
INNER JOIN preplot_points pp ON psl.line = pp.line AND psl.line_class = pp.class AND rs.point = pp.point
|
||||
LEFT JOIN final_shots fs ON rl.sequence = fs.sequence AND rs.point = fs.point;
|
||||
|
||||
ALTER TABLE sequences_detail
|
||||
OWNER TO postgres;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.5.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.5.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.5.1"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.5.1"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,145 @@
|
||||
-- Fix preplot_lines_summary view
|
||||
--
|
||||
-- New schema version: 0.5.2
|
||||
--
|
||||
-- WARNING: This update is buggy and does not give the desired
|
||||
-- results. Schema version 0.5.4 fixes this.
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- Following introduction of `preplot_saillines` (0.5.0), the incr and
|
||||
-- ntba statuses are stored in a separate table, not in `preplot_lines`
|
||||
-- (TODO: a future upgrade should remove those columns from `preplot_lines`)
|
||||
--
|
||||
-- Now any views referencing `incr` and `ntba` must be updated to point to
|
||||
-- the new location of those attributes.
|
||||
--
|
||||
-- This update fixes #312.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE VIEW preplot_lines_summary
|
||||
AS
|
||||
WITH summary AS (
|
||||
SELECT DISTINCT pp.line, pp.class,
|
||||
first_value(pp.point) OVER w AS p0,
|
||||
last_value(pp.point) OVER w AS p1,
|
||||
count(pp.point) OVER w AS num_points,
|
||||
st_distance(first_value(pp.geometry) OVER w, last_value(pp.geometry) OVER w) AS length,
|
||||
st_azimuth(first_value(pp.geometry) OVER w, last_value(pp.geometry) OVER w) * 180::double precision / pi() AS azimuth0,
|
||||
st_azimuth(last_value(pp.geometry) OVER w, first_value(pp.geometry) OVER w) * 180::double precision / pi() AS azimuth1
|
||||
FROM preplot_points pp
|
||||
WHERE pp.class = 'V'::bpchar
|
||||
WINDOW w AS (PARTITION BY pp.line ORDER BY pp.point ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
|
||||
)
|
||||
SELECT psl.line,
|
||||
CASE
|
||||
WHEN psl.incr THEN s.p0
|
||||
ELSE s.p1
|
||||
END AS fsp,
|
||||
CASE
|
||||
WHEN psl.incr THEN s.p1
|
||||
ELSE s.p0
|
||||
END AS lsp,
|
||||
s.num_points,
|
||||
s.length,
|
||||
CASE
|
||||
WHEN psl.incr THEN s.azimuth0
|
||||
ELSE s.azimuth1
|
||||
END AS azimuth,
|
||||
psl.incr,
|
||||
psl.remarks
|
||||
FROM summary s
|
||||
JOIN preplot_saillines psl ON psl.sailline_class = s.class AND s.line = psl.line
|
||||
ORDER BY psl.line, incr;
|
||||
|
||||
|
||||
ALTER TABLE preplot_lines_summary
|
||||
OWNER TO postgres;
|
||||
COMMENT ON VIEW preplot_lines_summary
|
||||
IS 'Summarises ''V'' (vessel sailline) preplot lines.';
|
||||
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.5.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.5.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.5.2"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.5.2"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,132 @@
|
||||
-- Fix final_lines_summary view
|
||||
--
|
||||
-- New schema version: 0.5.3
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This fixes a long-standing bug, where if the sail and source lines are
|
||||
-- the same, the number of missing shots will be miscounted.
|
||||
--
|
||||
-- This update fixes #313.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE VIEW final_lines_summary
|
||||
AS
|
||||
WITH summary AS (
|
||||
SELECT DISTINCT fs.sequence,
|
||||
first_value(fs.point) OVER w AS fsp,
|
||||
last_value(fs.point) OVER w AS lsp,
|
||||
first_value(fs.tstamp) OVER w AS ts0,
|
||||
last_value(fs.tstamp) OVER w AS ts1,
|
||||
count(fs.point) OVER w AS num_points,
|
||||
count(pp.point) OVER w AS num_preplots,
|
||||
st_distance(first_value(fs.geometry) OVER w, last_value(fs.geometry) OVER w) AS length,
|
||||
st_azimuth(first_value(fs.geometry) OVER w, last_value(fs.geometry) OVER w) * 180::double precision / pi() AS azimuth
|
||||
FROM final_shots fs
|
||||
LEFT JOIN preplot_points pp USING (line, point)
|
||||
WINDOW w AS (PARTITION BY fs.sequence ORDER BY fs.tstamp ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
|
||||
)
|
||||
SELECT fl.sequence,
|
||||
fl.line,
|
||||
s.fsp,
|
||||
s.lsp,
|
||||
s.ts0,
|
||||
s.ts1,
|
||||
s.ts1 - s.ts0 AS duration,
|
||||
s.num_points,
|
||||
(( SELECT count(*) AS count
|
||||
FROM preplot_points
|
||||
WHERE preplot_points.line = fl.line AND (preplot_points.point >= s.fsp AND preplot_points.point <= s.lsp OR preplot_points.point >= s.lsp AND preplot_points.point <= s.fsp))) - s.num_preplots AS missing_shots,
|
||||
s.length,
|
||||
s.azimuth,
|
||||
fl.remarks,
|
||||
fl.meta
|
||||
FROM summary s
|
||||
JOIN final_lines fl USING (sequence);
|
||||
|
||||
ALTER TABLE final_lines_summary
|
||||
OWNER TO postgres;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.5.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.5.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.5.3"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.5.3"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,145 @@
|
||||
-- Fix preplot_lines_summary view
|
||||
--
|
||||
-- New schema version: 0.5.4
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects all schemas in the database.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- Fixes upgrade 35 (0.5.2). The original description of 0.5.2 is included
|
||||
-- below for ease of reference:
|
||||
--
|
||||
-- Following introduction of `preplot_saillines` (0.5.0), the incr and
|
||||
-- ntba statuses are stored in a separate table, not in `preplot_lines`
|
||||
-- (TODO: a future upgrade should remove those columns from `preplot_lines`)
|
||||
--
|
||||
-- Now any views referencing `incr` and `ntba` must be updated to point to
|
||||
-- the new location of those attributes.
|
||||
--
|
||||
-- This update fixes #312.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_survey_schema (schema_name text) AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', schema_name;
|
||||
-- We need to set the search path because some of the trigger
|
||||
-- functions reference other tables in survey schemas assuming
|
||||
-- they are in the search path.
|
||||
EXECUTE format('SET search_path TO %I,public', schema_name);
|
||||
|
||||
CREATE OR REPLACE VIEW preplot_lines_summary
|
||||
AS
|
||||
WITH summary AS (
|
||||
SELECT DISTINCT pp.line,
|
||||
pp.class,
|
||||
first_value(pp.point) OVER w AS p0,
|
||||
last_value(pp.point) OVER w AS p1,
|
||||
count(pp.point) OVER w AS num_points,
|
||||
st_distance(first_value(pp.geometry) OVER w, last_value(pp.geometry) OVER w) AS length,
|
||||
st_azimuth(first_value(pp.geometry) OVER w, last_value(pp.geometry) OVER w) * 180::double precision / pi() AS azimuth0,
|
||||
st_azimuth(last_value(pp.geometry) OVER w, first_value(pp.geometry) OVER w) * 180::double precision / pi() AS azimuth1
|
||||
FROM preplot_points pp
|
||||
WHERE pp.class = 'V'::bpchar
|
||||
WINDOW w AS (PARTITION BY pp.line ORDER BY pp.point ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)
|
||||
)
|
||||
SELECT DISTINCT psl.sailline AS line,
|
||||
CASE
|
||||
WHEN psl.incr THEN s.p0
|
||||
ELSE s.p1
|
||||
END AS fsp,
|
||||
CASE
|
||||
WHEN psl.incr THEN s.p1
|
||||
ELSE s.p0
|
||||
END AS lsp,
|
||||
s.num_points,
|
||||
s.length,
|
||||
CASE
|
||||
WHEN psl.incr THEN s.azimuth0
|
||||
ELSE s.azimuth1
|
||||
END AS azimuth,
|
||||
psl.incr,
|
||||
psl.remarks
|
||||
FROM summary s
|
||||
JOIN preplot_saillines psl ON psl.sailline_class = s.class AND s.line = psl.sailline
|
||||
ORDER BY psl.sailline, psl.incr;
|
||||
|
||||
ALTER TABLE preplot_lines_summary
|
||||
OWNER TO postgres;
|
||||
COMMENT ON VIEW preplot_lines_summary
|
||||
IS 'Summarises ''V'' (vessel sailline) preplot lines.';
|
||||
|
||||
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.5.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.5.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
FOR row IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'survey_%'
|
||||
ORDER BY schema_name
|
||||
LOOP
|
||||
CALL pg_temp.upgrade_survey_schema(row.schema_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_survey_schema (schema_name text);
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.5.4"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.5.4"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
110
etc/db/upgrades/upgrade38-v0.6.0-add-keystore-table.sql
Normal file
110
etc/db/upgrades/upgrade38-v0.6.0-add-keystore-table.sql
Normal file
@@ -0,0 +1,110 @@
|
||||
-- Fix final_lines_summary view
|
||||
--
|
||||
-- New schema version: 0.6.0
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade only affects the `public` schema.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update adds a `keystore` table, intended for storing arbitrary
|
||||
-- key / value pairs which, unlike, the `info` tables, is not meant to
|
||||
-- be directly accessible via the API. Its main purpose as of this writing
|
||||
-- is to store user definitions (see #176, #177, #180).
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS keystore (
|
||||
type TEXT NOT NULL, -- A class of data to be stored
|
||||
key TEXT NOT NULL, -- A key that is unique for the class and access type
|
||||
last_modified TIMESTAMP -- To detect update conflicts
|
||||
DEFAULT CURRENT_TIMESTAMP,
|
||||
data jsonb,
|
||||
PRIMARY KEY (type, key) -- Composite primary key
|
||||
);
|
||||
|
||||
-- Create a function to update the last_modified timestamp
|
||||
CREATE OR REPLACE FUNCTION update_last_modified()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.last_modified = CURRENT_TIMESTAMP;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Create a trigger that calls the function before each update
|
||||
CREATE OR REPLACE TRIGGER update_keystore_last_modified
|
||||
BEFORE UPDATE ON keystore
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_last_modified();
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.5.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.0"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.0"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
108
etc/db/upgrades/upgrade39-v0.6.1-add-default-user.sql
Normal file
108
etc/db/upgrades/upgrade39-v0.6.1-add-default-user.sql
Normal file
@@ -0,0 +1,108 @@
|
||||
-- Fix final_lines_summary view
|
||||
--
|
||||
-- New schema version: 0.6.1
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade only affects the `public` schema.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update adds a default user to the system (see #176, #177, #180).
|
||||
-- The default user can only be invoked by connecting from localhost.
|
||||
--
|
||||
-- This user has full access to every project via the organisations
|
||||
-- permissions wildcard: `{"*": {read: true, write: true, edit: true}}`
|
||||
-- and can be used to bootstrap the system by creating other users
|
||||
-- and assigning organisational permissions.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
INSERT INTO keystore (type, key, data)
|
||||
VALUES ('user', '6f1e7159-4ca0-4ae4-ab4e-89078166cc10', '
|
||||
{
|
||||
"id": "6f1e7159-4ca0-4ae4-ab4e-89078166cc10",
|
||||
"ip": "127.0.0.0/24",
|
||||
"name": "☠️",
|
||||
"colour": "red",
|
||||
"active": true,
|
||||
"organisations": {
|
||||
"*": {
|
||||
"read": true,
|
||||
"write": true,
|
||||
"edit": true
|
||||
}
|
||||
}
|
||||
}
|
||||
'::jsonb)
|
||||
ON CONFLICT (type, key) DO NOTHING;
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.0' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.1"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.1"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,106 @@
|
||||
-- Fix final_lines_summary view
|
||||
--
|
||||
-- New schema version: 0.6.2
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade only affects the `public` schema.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update adds an "organisations" section to the configuration,
|
||||
-- with a default configured organisation of "WGP" with full access.
|
||||
-- This is so that projects can be made accessible after migrating
|
||||
-- to the new permissions architecture.
|
||||
--
|
||||
-- In addition, projects with an id starting with "eq" are assumed to
|
||||
-- be Equinor projects, and an additional organisation is added with
|
||||
-- read-only access. This is intended for clients, which should be
|
||||
-- assigned to the "Equinor organisation".
|
||||
--
|
||||
-- Finally, we assign the vessel to the "WGP" organisation (full access)
|
||||
-- so that we can actually use administrative endpoints.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
-- Add "organisations" section to configurations, if not already present
|
||||
UPDATE projects
|
||||
SET
|
||||
meta = jsonb_set(meta, '{organisations}', '{"WGP": {"read": true, "write": true, "edit": true}}'::jsonb, true)
|
||||
WHERE meta->'organisations' IS NULL;
|
||||
|
||||
-- Add (or overwrite!) "organisations.Equinor" giving read-only access (can be changed later via API)
|
||||
UPDATE projects
|
||||
SET
|
||||
meta = jsonb_set(meta, '{organisations, Equinor}', '{"read": true, "write": false, "edit": false}'::jsonb, true)
|
||||
WHERE pid LIKE 'eq%';
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.1' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.2"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.2"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
109
etc/db/upgrades/upgrade41-v0.6.3-add-comparisons.sql
Normal file
109
etc/db/upgrades/upgrade41-v0.6.3-add-comparisons.sql
Normal file
@@ -0,0 +1,109 @@
|
||||
-- Add procedure to decimate old nav data
|
||||
--
|
||||
-- New schema version: 0.6.3
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade creates a new schema called `comparisons`.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update adds a `comparisons` table to a `comparisons` schema.
|
||||
-- The `comparisons.comparisons` table holds 4D prospect comparison data.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
-- BEGIN
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS comparisons
|
||||
AUTHORIZATION postgres;
|
||||
|
||||
COMMENT ON SCHEMA comparisons
|
||||
IS 'Holds 4D comparison data and logic';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS comparisons.comparisons
|
||||
(
|
||||
type text COLLATE pg_catalog."default" NOT NULL,
|
||||
baseline_pid text COLLATE pg_catalog."default" NOT NULL,
|
||||
monitor_pid text COLLATE pg_catalog."default" NOT NULL,
|
||||
data bytea,
|
||||
meta jsonb NOT NULL DEFAULT '{}'::jsonb,
|
||||
CONSTRAINT comparisons_pkey PRIMARY KEY (baseline_pid, monitor_pid, type)
|
||||
)
|
||||
|
||||
TABLESPACE pg_default;
|
||||
|
||||
ALTER TABLE IF EXISTS comparisons.comparisons
|
||||
OWNER to postgres;
|
||||
|
||||
-- END
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.2' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.3"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.3"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
169
etc/db/upgrades/upgrade42-v0.6.4-notify-exclude-columns.sql
Normal file
169
etc/db/upgrades/upgrade42-v0.6.4-notify-exclude-columns.sql
Normal file
@@ -0,0 +1,169 @@
|
||||
-- Add procedure to decimate old nav data
|
||||
--
|
||||
-- New schema version: 0.6.4
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects the public schema only.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update modifies notify() to accept, as optional arguments, the
|
||||
-- names of columns that are to be *excluded* from the notification.
|
||||
-- It is intended for tables with large columns which are however of
|
||||
-- no particular interest in a notification.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
-- BEGIN
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.notify()
|
||||
RETURNS trigger
|
||||
LANGUAGE 'plpgsql'
|
||||
COST 100
|
||||
VOLATILE NOT LEAKPROOF
|
||||
AS $BODY$
|
||||
DECLARE
|
||||
channel text := TG_ARGV[0];
|
||||
pid text;
|
||||
payload text;
|
||||
notification text;
|
||||
payload_id integer;
|
||||
old_json jsonb;
|
||||
new_json jsonb;
|
||||
excluded_col text;
|
||||
i integer;
|
||||
BEGIN
|
||||
|
||||
-- Fetch pid
|
||||
SELECT projects.pid INTO pid FROM projects WHERE schema = TG_TABLE_SCHEMA;
|
||||
|
||||
-- Build old and new as jsonb, excluding specified columns if provided
|
||||
IF OLD IS NOT NULL THEN
|
||||
old_json := row_to_json(OLD)::jsonb;
|
||||
FOR i IN 1 .. TG_NARGS - 1 LOOP
|
||||
excluded_col := TG_ARGV[i];
|
||||
old_json := old_json - excluded_col;
|
||||
END LOOP;
|
||||
ELSE
|
||||
old_json := NULL;
|
||||
END IF;
|
||||
|
||||
IF NEW IS NOT NULL THEN
|
||||
new_json := row_to_json(NEW)::jsonb;
|
||||
FOR i IN 1 .. TG_NARGS - 1 LOOP
|
||||
excluded_col := TG_ARGV[i];
|
||||
new_json := new_json - excluded_col;
|
||||
END LOOP;
|
||||
ELSE
|
||||
new_json := NULL;
|
||||
END IF;
|
||||
|
||||
-- Build payload
|
||||
payload := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'old', old_json,
|
||||
'new', new_json,
|
||||
'pid', pid
|
||||
)::text;
|
||||
|
||||
-- Handle large payloads
|
||||
IF octet_length(payload) < 1000 THEN
|
||||
PERFORM pg_notify(channel, payload);
|
||||
ELSE
|
||||
-- Store large payload and notify with ID (as before)
|
||||
INSERT INTO notify_payloads (payload) VALUES (payload) RETURNING id INTO payload_id;
|
||||
|
||||
notification := json_build_object(
|
||||
'tstamp', CURRENT_TIMESTAMP,
|
||||
'operation', TG_OP,
|
||||
'schema', TG_TABLE_SCHEMA,
|
||||
'table', TG_TABLE_NAME,
|
||||
'pid', pid,
|
||||
'payload_id', payload_id
|
||||
)::text;
|
||||
|
||||
PERFORM pg_notify(channel, notification);
|
||||
RAISE INFO 'Payload over limit';
|
||||
END IF;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$BODY$;
|
||||
|
||||
ALTER FUNCTION public.notify()
|
||||
OWNER TO postgres;
|
||||
|
||||
-- END
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.3' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.4"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.4"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,96 @@
|
||||
-- Add procedure to decimate old nav data
|
||||
--
|
||||
-- New schema version: 0.6.5
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects the public schema only.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This update modifies notify() to accept, as optional arguments, the
|
||||
-- names of columns that are to be *excluded* from the notification.
|
||||
-- It is intended for tables with large columns which are however of
|
||||
-- no particular interest in a notification.
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
-- BEGIN
|
||||
|
||||
CREATE OR REPLACE TRIGGER comparisons_tg
|
||||
AFTER INSERT OR DELETE OR UPDATE
|
||||
ON comparisons.comparisons
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.notify('comparisons', 'data');
|
||||
|
||||
-- END
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.5' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.4' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.5"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.5"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -0,0 +1,157 @@
|
||||
-- Add procedure to decimate old nav data
|
||||
--
|
||||
-- New schema version: 0.6.6
|
||||
--
|
||||
-- ATTENTION:
|
||||
--
|
||||
-- ENSURE YOU HAVE BACKED UP THE DATABASE BEFORE RUNNING THIS SCRIPT.
|
||||
--
|
||||
--
|
||||
-- NOTE: This upgrade affects the public schema only.
|
||||
-- NOTE: Each application starts a transaction, which must be committed
|
||||
-- or rolled back.
|
||||
--
|
||||
-- This adds a last_project_update(pid) function. It takes a project ID
|
||||
-- and returns the last known timestamp from that project. Timestamps
|
||||
-- are derived from multiple sources:
|
||||
--
|
||||
-- - raw_shots table
|
||||
-- - final_shots table
|
||||
-- - events_log_full table
|
||||
-- - info table where key = 'qc'
|
||||
-- - files table, from the hashes (which contain the file's mtime)
|
||||
-- - project configuration, looking for an _updatedOn property
|
||||
--
|
||||
-- To apply, run as the dougal user:
|
||||
--
|
||||
-- psql <<EOF
|
||||
-- \i $THIS_FILE
|
||||
-- COMMIT;
|
||||
-- EOF
|
||||
--
|
||||
-- NOTE: It can be applied multiple times without ill effect.
|
||||
--
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.show_notice (notice text) AS $$
|
||||
BEGIN
|
||||
RAISE NOTICE '%', notice;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade_database () AS $outer$
|
||||
BEGIN
|
||||
|
||||
RAISE NOTICE 'Updating schema %', 'public';
|
||||
SET search_path TO public;
|
||||
|
||||
-- BEGIN
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.last_project_update(p_pid text)
|
||||
RETURNS timestamp with time zone
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
DECLARE
|
||||
v_last_ts timestamptz := NULL;
|
||||
v_current_ts timestamptz;
|
||||
v_current_str text;
|
||||
v_current_unix numeric;
|
||||
v_sid_rec record;
|
||||
BEGIN
|
||||
-- From raw_shots, final_shots, info, and files
|
||||
FOR v_sid_rec IN SELECT schema FROM public.projects WHERE pid = p_pid
|
||||
LOOP
|
||||
-- From raw_shots
|
||||
EXECUTE 'SELECT max(tstamp) FROM ' || v_sid_rec.schema || '.raw_shots' INTO v_current_ts;
|
||||
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
|
||||
v_last_ts := v_current_ts;
|
||||
END IF;
|
||||
|
||||
-- From final_shots
|
||||
EXECUTE 'SELECT max(tstamp) FROM ' || v_sid_rec.schema || '.final_shots' INTO v_current_ts;
|
||||
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
|
||||
v_last_ts := v_current_ts;
|
||||
END IF;
|
||||
|
||||
-- From info where key = 'qc'
|
||||
EXECUTE 'SELECT value->>''updatedOn'' FROM ' || v_sid_rec.schema || '.info WHERE key = ''qc''' INTO v_current_str;
|
||||
IF v_current_str IS NOT NULL THEN
|
||||
v_current_ts := v_current_str::timestamptz;
|
||||
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
|
||||
v_last_ts := v_current_ts;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- From files hash second part, only for valid colon-separated hashes
|
||||
EXECUTE 'SELECT max( split_part(hash, '':'', 2)::numeric ) FROM ' || v_sid_rec.schema || '.files WHERE hash ~ ''^[0-9]+:[0-9]+\\.[0-9]+:[0-9]+\\.[0-9]+:[0-9a-f]+$''' INTO v_current_unix;
|
||||
IF v_current_unix IS NOT NULL THEN
|
||||
v_current_ts := to_timestamp(v_current_unix);
|
||||
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
|
||||
v_last_ts := v_current_ts;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- From event_log_full
|
||||
EXECUTE 'SELECT max(tstamp) FROM ' || v_sid_rec.schema || '.event_log_full' INTO v_current_ts;
|
||||
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
|
||||
v_last_ts := v_current_ts;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
-- From projects.meta->_updatedOn
|
||||
SELECT (meta->>'_updatedOn')::timestamptz FROM public.projects WHERE pid = p_pid INTO v_current_ts;
|
||||
IF v_current_ts > v_last_ts OR v_last_ts IS NULL THEN
|
||||
v_last_ts := v_current_ts;
|
||||
END IF;
|
||||
|
||||
RETURN v_last_ts;
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- END
|
||||
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE PROCEDURE pg_temp.upgrade () AS $outer$
|
||||
DECLARE
|
||||
row RECORD;
|
||||
current_db_version TEXT;
|
||||
BEGIN
|
||||
|
||||
SELECT value->>'db_schema' INTO current_db_version FROM public.info WHERE key = 'version';
|
||||
|
||||
IF current_db_version >= '0.6.6' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Patch already applied';
|
||||
END IF;
|
||||
|
||||
IF current_db_version != '0.6.5' THEN
|
||||
RAISE EXCEPTION
|
||||
USING MESSAGE='Invalid database version: ' || current_db_version,
|
||||
HINT='Ensure all previous patches have been applied.';
|
||||
END IF;
|
||||
|
||||
CALL pg_temp.upgrade_database();
|
||||
END;
|
||||
$outer$ LANGUAGE plpgsql;
|
||||
|
||||
CALL pg_temp.upgrade();
|
||||
|
||||
CALL pg_temp.show_notice('Cleaning up');
|
||||
DROP PROCEDURE pg_temp.upgrade_database ();
|
||||
DROP PROCEDURE pg_temp.upgrade ();
|
||||
|
||||
CALL pg_temp.show_notice('Updating db_schema version');
|
||||
INSERT INTO public.info VALUES ('version', '{"db_schema": "0.6.6"}')
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = public.info.value || '{"db_schema": "0.6.6"}' WHERE public.info.key = 'version';
|
||||
|
||||
|
||||
CALL pg_temp.show_notice('All done. You may now run "COMMIT;" to persist the changes');
|
||||
DROP PROCEDURE pg_temp.show_notice (notice text);
|
||||
|
||||
--
|
||||
--NOTE Run `COMMIT;` now if all went well
|
||||
--
|
||||
@@ -45,11 +45,13 @@
|
||||
name: "No fire"
|
||||
id: no_fire
|
||||
check: |
|
||||
const currentShot = currentItem;
|
||||
const gunData = currentItem._("raw_meta.smsrc");
|
||||
(gunData && gunData.guns && gunData.guns.length != gunData.num_active)
|
||||
? `Source ${gunData.src_number}: No fire (${gunData.guns.length - gunData.num_active} guns)`
|
||||
: true;
|
||||
// const currentShot = currentItem;
|
||||
// const gunData = currentItem._("raw_meta.smsrc");
|
||||
// (gunData && gunData.guns && gunData.guns.length != gunData.num_active)
|
||||
// ? `Source ${gunData.src_number}: No fire (${gunData.guns.length - gunData.num_active} guns)`
|
||||
// : true;
|
||||
// Disabled due to changes in Smartsource software. It now returns all guns on every shot, not just active ones.
|
||||
true
|
||||
|
||||
-
|
||||
name: "Pressure errors"
|
||||
|
||||
968
lib/modules/@dougal/binary/classes.js
Normal file
968
lib/modules/@dougal/binary/classes.js
Normal file
@@ -0,0 +1,968 @@
|
||||
const codeToType = {
|
||||
0: Int8Array,
|
||||
1: Uint8Array,
|
||||
2: Int16Array,
|
||||
3: Uint16Array,
|
||||
4: Int32Array,
|
||||
5: Uint32Array,
|
||||
7: Float32Array,
|
||||
8: Float64Array,
|
||||
9: BigInt64Array,
|
||||
10: BigUint64Array
|
||||
};
|
||||
|
||||
const typeToBytes = {
|
||||
Int8Array: 1,
|
||||
Uint8Array: 1,
|
||||
Int16Array: 2,
|
||||
Uint16Array: 2,
|
||||
Int32Array: 4,
|
||||
Uint32Array: 4,
|
||||
Float32Array: 4,
|
||||
Float64Array: 8,
|
||||
BigInt64Array: 8,
|
||||
BigUint64Array: 8
|
||||
};
|
||||
|
||||
function readTypedValue(view, offset, type) {
|
||||
switch (type) {
|
||||
case Int8Array: return view.getInt8(offset);
|
||||
case Uint8Array: return view.getUint8(offset);
|
||||
case Int16Array: return view.getInt16(offset, true);
|
||||
case Uint16Array: return view.getUint16(offset, true);
|
||||
case Int32Array: return view.getInt32(offset, true);
|
||||
case Uint32Array: return view.getUint32(offset, true);
|
||||
case Float32Array: return view.getFloat32(offset, true);
|
||||
case Float64Array: return view.getFloat64(offset, true);
|
||||
case BigInt64Array: return view.getBigInt64(offset, true);
|
||||
case BigUint64Array: return view.getBigUint64(offset, true);
|
||||
default: throw new Error(`Unsupported type: ${type.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
function writeTypedValue(view, offset, value, type) {
|
||||
switch (type) {
|
||||
case Int8Array: view.setInt8(offset, value); break;
|
||||
case Uint8Array: view.setUint8(offset, value); break;
|
||||
case Int16Array: view.setInt16(offset, value, true); break;
|
||||
case Uint16Array: view.setUint16(offset, value, true); break;
|
||||
case Int32Array: view.setInt32(offset, value, true); break;
|
||||
case Uint32Array: view.setUint32(offset, value, true); break;
|
||||
case Float32Array: view.setFloat32(offset, value, true); break;
|
||||
case Float64Array: view.setFloat64(offset, value, true); break;
|
||||
case BigInt64Array: view.setBigInt64(offset, BigInt(value), true); break;
|
||||
case BigUint64Array: view.setBigUint64(offset, BigInt(value), true); break;
|
||||
default: throw new Error(`Unsupported type: ${type.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
class DougalBinaryBundle extends ArrayBuffer {
|
||||
|
||||
static HEADER_LENGTH = 4; // Length of a bundle header
|
||||
|
||||
/** Clone an existing ByteArray into a DougalBinaryBundle
|
||||
*/
|
||||
static clone (buffer) {
|
||||
const clone = new DougalBinaryBundle(buffer.byteLength);
|
||||
const uint8Array = new Uint8Array(buffer);
|
||||
const uint8ArrayClone = new Uint8Array(clone);
|
||||
uint8ArrayClone.set(uint8Array);
|
||||
return clone;
|
||||
}
|
||||
|
||||
constructor (length, options) {
|
||||
super (length, options);
|
||||
}
|
||||
|
||||
/** Get the count of bundles in this ByteArray.
|
||||
*
|
||||
* Stops at the first non-bundle looking offset
|
||||
*/
|
||||
get bundleCount () {
|
||||
let count = 0;
|
||||
let currentBundleOffset = 0;
|
||||
const view = new DataView(this);
|
||||
|
||||
while (currentBundleOffset < this.byteLength) {
|
||||
|
||||
const currentBundleHeader = view.getUint32(currentBundleOffset, true);
|
||||
if ((currentBundleHeader & 0xff) !== 0x1c) {
|
||||
// This is not a bundle
|
||||
return count;
|
||||
}
|
||||
let currentBundleLength = currentBundleHeader >>> 8;
|
||||
|
||||
currentBundleOffset += currentBundleLength + DougalBinaryBundle.HEADER_LENGTH;
|
||||
count++;
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
|
||||
/** Get the number of chunks in the bundles of this ByteArray
|
||||
*/
|
||||
get chunkCount () {
|
||||
let count = 0;
|
||||
let bundleOffset = 0;
|
||||
const view = new DataView(this);
|
||||
|
||||
while (bundleOffset < this.byteLength) {
|
||||
const header = view.getUint32(bundleOffset, true);
|
||||
if ((header & 0xFF) !== 0x1C) break;
|
||||
const length = header >>> 8;
|
||||
if (bundleOffset + 4 + length > this.byteLength) break;
|
||||
|
||||
let chunkOffset = bundleOffset + 4; // relative to buffer start
|
||||
|
||||
while (chunkOffset < bundleOffset + 4 + length) {
|
||||
const chunkType = view.getUint8(chunkOffset);
|
||||
if (chunkType !== 0x11 && chunkType !== 0x12) break;
|
||||
|
||||
const cCount = view.getUint16(chunkOffset + 2, true);
|
||||
const ΔelemC = view.getUint8(chunkOffset + 10);
|
||||
const elemC = view.getUint8(chunkOffset + 11);
|
||||
|
||||
let localOffset = 12; // header size
|
||||
|
||||
localOffset += ΔelemC + elemC; // preface
|
||||
|
||||
// initial values
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(chunkOffset + 12 + k);
|
||||
const baseCode = typeByte & 0xF;
|
||||
const baseType = codeToType[baseCode];
|
||||
if (!baseType) throw new Error('Invalid base type code');
|
||||
localOffset += typeToBytes[baseType.name];
|
||||
}
|
||||
|
||||
// pad after initial
|
||||
while (localOffset % 4 !== 0) localOffset++;
|
||||
|
||||
if (chunkType === 0x11) { // Sequential
|
||||
// record data: Δelems incrs
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(chunkOffset + 12 + k);
|
||||
const incrCode = typeByte >> 4;
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!incrType) throw new Error('Invalid incr type code');
|
||||
localOffset += cCount * typeToBytes[incrType.name];
|
||||
}
|
||||
|
||||
// elems
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
localOffset += cCount * typeToBytes[type.name];
|
||||
}
|
||||
} else { // Interleaved
|
||||
// Compute exact stride for interleaved record data
|
||||
let ΔelemStride = 0;
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(chunkOffset + 12 + k);
|
||||
const incrCode = typeByte >> 4;
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!incrType) throw new Error('Invalid incr type code');
|
||||
ΔelemStride += typeToBytes[incrType.name];
|
||||
}
|
||||
let elemStride = 0;
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
elemStride += typeToBytes[type.name];
|
||||
}
|
||||
const recordStride = ΔelemStride + elemStride;
|
||||
localOffset += cCount * recordStride;
|
||||
}
|
||||
|
||||
// pad after record
|
||||
while (localOffset % 4 !== 0) localOffset++;
|
||||
|
||||
chunkOffset += localOffset;
|
||||
count++;
|
||||
}
|
||||
|
||||
bundleOffset += 4 + length;
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
/** Return an array of DougalBinaryChunkSequential or DougalBinaryChunkInterleaved instances
|
||||
*/
|
||||
chunks () {
|
||||
const chunks = [];
|
||||
let bundleOffset = 0;
|
||||
const view = new DataView(this);
|
||||
|
||||
while (bundleOffset < this.byteLength) {
|
||||
const header = view.getUint32(bundleOffset, true);
|
||||
if ((header & 0xFF) !== 0x1C) break;
|
||||
const length = header >>> 8;
|
||||
if (bundleOffset + 4 + length > this.byteLength) break;
|
||||
|
||||
let chunkOffset = bundleOffset + 4;
|
||||
|
||||
while (chunkOffset < bundleOffset + 4 + length) {
|
||||
const chunkType = view.getUint8(chunkOffset);
|
||||
if (chunkType !== 0x11 && chunkType !== 0x12) break;
|
||||
|
||||
const cCount = view.getUint16(chunkOffset + 2, true);
|
||||
const ΔelemC = view.getUint8(chunkOffset + 10);
|
||||
const elemC = view.getUint8(chunkOffset + 11);
|
||||
|
||||
let localOffset = 12;
|
||||
|
||||
localOffset += ΔelemC + elemC;
|
||||
|
||||
// initial values
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(chunkOffset + 12 + k);
|
||||
const baseCode = typeByte & 0xF;
|
||||
const baseType = codeToType[baseCode];
|
||||
if (!baseType) throw new Error('Invalid base type code');
|
||||
localOffset += typeToBytes[baseType.name];
|
||||
}
|
||||
|
||||
// pad after initial
|
||||
while (localOffset % 4 !== 0) localOffset++;
|
||||
|
||||
if (chunkType === 0x11) { // Sequential
|
||||
// record data: Δelems incrs
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(chunkOffset + 12 + k);
|
||||
const incrCode = typeByte >> 4;
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!incrType) throw new Error('Invalid incr type code');
|
||||
localOffset += cCount * typeToBytes[incrType.name];
|
||||
}
|
||||
|
||||
// elems
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
localOffset += cCount * typeToBytes[type.name];
|
||||
}
|
||||
} else { // Interleaved
|
||||
// Compute exact stride for interleaved record data
|
||||
let ΔelemStride = 0;
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(chunkOffset + 12 + k);
|
||||
const incrCode = typeByte >> 4;
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!incrType) throw new Error('Invalid incr type code');
|
||||
ΔelemStride += typeToBytes[incrType.name];
|
||||
}
|
||||
let elemStride = 0;
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
elemStride += typeToBytes[type.name];
|
||||
}
|
||||
const recordStride = ΔelemStride + elemStride;
|
||||
localOffset += cCount * recordStride;
|
||||
}
|
||||
|
||||
// pad after record
|
||||
while (localOffset % 4 !== 0) localOffset++;
|
||||
|
||||
switch (chunkType) {
|
||||
case 0x11:
|
||||
chunks.push(new DougalBinaryChunkSequential(this, chunkOffset, localOffset));
|
||||
break;
|
||||
case 0x12:
|
||||
chunks.push(new DougalBinaryChunkInterleaved(this, chunkOffset, localOffset));
|
||||
break;
|
||||
default:
|
||||
throw new Error('Invalid chunk type');
|
||||
}
|
||||
|
||||
chunkOffset += localOffset;
|
||||
}
|
||||
|
||||
bundleOffset += 4 + length;
|
||||
}
|
||||
|
||||
return chunks;
|
||||
}
|
||||
|
||||
/** Return a ByteArray containing all data from all
|
||||
* chunks including reconstructed i, j and incremental
|
||||
* values as follows:
|
||||
*
|
||||
* <i_0> <i_1> … <i_x> // i values (constant)
|
||||
* <j_0> <j_1> … <j_x> // j values (j0 + Δj*i)
|
||||
* <Δelem_0_0> <Δelem_0_1> … <Δelem_0_x> // reconstructed Δelem0 (uses baseType)
|
||||
* <Δelem_1_0> <Δelem_1_1> … <Δelem_1_x> // reconstructed Δelem1
|
||||
* …
|
||||
* <Δelem_y_0> <Δelem_y_1> … <Δelem_y_x> // reconstructed Δelem1
|
||||
* <elem_0_0> <elem_0_1> … <elem_0_x> // First elem
|
||||
* <elem_1_0> <elem_1_1> … <elem_1_x> // Second elem
|
||||
* …
|
||||
* <elem_z_0> <elem_z_1> … <elem_z_x> // Last elem
|
||||
*
|
||||
* It does not matter whether the underlying chunks are
|
||||
* sequential or interleaved. This function will transform
|
||||
* as necessary.
|
||||
*
|
||||
*/
|
||||
getDataSequentially () {
|
||||
const chunks = this.chunks();
|
||||
if (chunks.length === 0) return new ArrayBuffer(0);
|
||||
|
||||
const firstChunk = chunks[0];
|
||||
const ΔelemC = firstChunk.ΔelemCount;
|
||||
const elemC = firstChunk.elemCount;
|
||||
|
||||
// Check consistency across chunks
|
||||
for (const chunk of chunks) {
|
||||
if (chunk.ΔelemCount !== ΔelemC || chunk.elemCount !== elemC) {
|
||||
throw new Error('Inconsistent chunk structures');
|
||||
}
|
||||
}
|
||||
|
||||
// Get types from first chunk
|
||||
const view = new DataView(firstChunk);
|
||||
const ΔelemBaseTypes = [];
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(12 + k);
|
||||
const baseCode = typeByte & 0xF;
|
||||
const baseType = codeToType[baseCode];
|
||||
if (!baseType) throw new Error('Invalid base type code');
|
||||
ΔelemBaseTypes.push(baseType);
|
||||
}
|
||||
const elemTypes = [];
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
elemTypes.push(type);
|
||||
}
|
||||
|
||||
// Compute total records
|
||||
const totalN = chunks.reduce((sum, c) => sum + c.jCount, 0);
|
||||
|
||||
// Compute sizes
|
||||
const size_i = totalN * 2; // Uint16 for i
|
||||
const size_j = totalN * 4; // Int32 for j
|
||||
let size_Δelems = 0;
|
||||
for (const t of ΔelemBaseTypes) {
|
||||
size_Δelems += totalN * typeToBytes[t.name];
|
||||
}
|
||||
let size_elems = 0;
|
||||
for (const t of elemTypes) {
|
||||
size_elems += totalN * typeToBytes[t.name];
|
||||
}
|
||||
const totalSize = size_i + size_j + size_Δelems + size_elems;
|
||||
|
||||
const ab = new ArrayBuffer(totalSize);
|
||||
const dv = new DataView(ab);
|
||||
|
||||
// Write i's
|
||||
let off = 0;
|
||||
for (const chunk of chunks) {
|
||||
const i = chunk.i;
|
||||
for (let idx = 0; idx < chunk.jCount; idx++) {
|
||||
dv.setUint16(off, i, true);
|
||||
off += 2;
|
||||
}
|
||||
}
|
||||
|
||||
// Write j's
|
||||
off = size_i;
|
||||
for (const chunk of chunks) {
|
||||
const j0 = chunk.j0;
|
||||
const Δj = chunk.Δj;
|
||||
for (let idx = 0; idx < chunk.jCount; idx++) {
|
||||
const j = j0 + idx * Δj;
|
||||
dv.setInt32(off, j, true);
|
||||
off += 4;
|
||||
}
|
||||
}
|
||||
|
||||
// Write Δelems
|
||||
off = size_i + size_j;
|
||||
for (let m = 0; m < ΔelemC; m++) {
|
||||
const type = ΔelemBaseTypes[m];
|
||||
const bytes = typeToBytes[type.name];
|
||||
for (const chunk of chunks) {
|
||||
const arr = chunk.Δelem(m);
|
||||
for (let idx = 0; idx < chunk.jCount; idx++) {
|
||||
writeTypedValue(dv, off, arr[idx], type);
|
||||
off += bytes;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Write elems
|
||||
for (let m = 0; m < elemC; m++) {
|
||||
const type = elemTypes[m];
|
||||
const bytes = typeToBytes[type.name];
|
||||
for (const chunk of chunks) {
|
||||
const arr = chunk.elem(m);
|
||||
for (let idx = 0; idx < chunk.jCount; idx++) {
|
||||
writeTypedValue(dv, off, arr[idx], type);
|
||||
off += bytes;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ab;
|
||||
}
|
||||
|
||||
/** Return a ByteArray containing all data from all
|
||||
* chunks including reconstructed i, j and incremental
|
||||
* values, interleaved as follows:
|
||||
*
|
||||
* <i_0> <j_0> <Δelem_0_0> <Δelem_1_0> … <Δelem_y_0> <elem_0_0> <elem_1_0> … <elem_z_0>
|
||||
* <i_1> <j_1> <Δelem_0_1> <Δelem_1_1> … <Δelem_y_1> <elem_0_1> <elem_1_1> … <elem_z_1>
|
||||
* <i_x> <j_x> <Δelem_0_x> <Δelem_1_x> … <Δelem_y_x> <elem_0_x> <elem_1_x> … <elem_z_x>
|
||||
*
|
||||
* It does not matter whether the underlying chunks are
|
||||
* sequential or interleaved. This function will transform
|
||||
* as necessary.
|
||||
*
|
||||
*/
|
||||
getDataInterleaved () {
|
||||
const chunks = this.chunks();
|
||||
if (chunks.length === 0) return new ArrayBuffer(0);
|
||||
|
||||
const firstChunk = chunks[0];
|
||||
const ΔelemC = firstChunk.ΔelemCount;
|
||||
const elemC = firstChunk.elemCount;
|
||||
|
||||
// Check consistency across chunks
|
||||
for (const chunk of chunks) {
|
||||
if (chunk.ΔelemCount !== ΔelemC || chunk.elemCount !== elemC) {
|
||||
throw new Error('Inconsistent chunk structures');
|
||||
}
|
||||
}
|
||||
|
||||
// Get types from first chunk
|
||||
const view = new DataView(firstChunk);
|
||||
const ΔelemBaseTypes = [];
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(12 + k);
|
||||
const baseCode = typeByte & 0xF;
|
||||
const baseType = codeToType[baseCode];
|
||||
if (!baseType) throw new Error('Invalid base type code');
|
||||
ΔelemBaseTypes.push(baseType);
|
||||
}
|
||||
const elemTypes = [];
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
elemTypes.push(type);
|
||||
}
|
||||
|
||||
// Compute total records
|
||||
const totalN = chunks.reduce((sum, c) => sum + c.jCount, 0);
|
||||
|
||||
// Compute record size
|
||||
const recordSize = 2 + 4 + // i (Uint16) + j (Int32)
|
||||
ΔelemBaseTypes.reduce((sum, t) => sum + typeToBytes[t.name], 0) +
|
||||
elemTypes.reduce((sum, t) => sum + typeToBytes[t.name], 0);
|
||||
const totalSize = totalN * recordSize;
|
||||
|
||||
const ab = new ArrayBuffer(totalSize);
|
||||
const dv = new DataView(ab);
|
||||
|
||||
let off = 0;
|
||||
for (const chunk of chunks) {
|
||||
const i = chunk.i;
|
||||
const j0 = chunk.j0;
|
||||
const Δj = chunk.Δj;
|
||||
for (let idx = 0; idx < chunk.jCount; idx++) {
|
||||
dv.setUint16(off, i, true);
|
||||
off += 2;
|
||||
const j = j0 + idx * Δj;
|
||||
dv.setInt32(off, j, true);
|
||||
off += 4;
|
||||
for (let m = 0; m < ΔelemC; m++) {
|
||||
const type = ΔelemBaseTypes[m];
|
||||
const bytes = typeToBytes[type.name];
|
||||
const arr = chunk.Δelem(m);
|
||||
writeTypedValue(dv, off, arr[idx], type);
|
||||
off += bytes;
|
||||
}
|
||||
for (let m = 0; m < elemC; m++) {
|
||||
const type = elemTypes[m];
|
||||
const bytes = typeToBytes[type.name];
|
||||
const arr = chunk.elem(m);
|
||||
writeTypedValue(dv, off, arr[idx], type);
|
||||
off += bytes;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ab;
|
||||
}
|
||||
|
||||
get records () {
|
||||
const data = [];
|
||||
for (const record of this) {
|
||||
data.push(record.slice(1));
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
const chunks = this.chunks();
|
||||
let chunkIndex = 0;
|
||||
let chunkIterator = chunks.length > 0 ? chunks[0][Symbol.iterator]() : null;
|
||||
|
||||
return {
|
||||
next() {
|
||||
if (!chunkIterator) {
|
||||
return { done: true };
|
||||
}
|
||||
|
||||
let result = chunkIterator.next();
|
||||
while (result.done && chunkIndex < chunks.length - 1) {
|
||||
chunkIndex++;
|
||||
chunkIterator = chunks[chunkIndex][Symbol.iterator]();
|
||||
result = chunkIterator.next();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
class DougalBinaryChunkSequential extends ArrayBuffer {
|
||||
|
||||
constructor (buffer, offset, length) {
|
||||
super(length);
|
||||
new Uint8Array(this).set(new Uint8Array(buffer, offset, length));
|
||||
this._ΔelemCaches = new Array(this.ΔelemCount);
|
||||
this._elemCaches = new Array(this.elemCount);
|
||||
this._ΔelemBlockOffsets = null;
|
||||
this._elemBlockOffsets = null;
|
||||
this._recordOffset = null;
|
||||
}
|
||||
|
||||
_getRecordOffset() {
|
||||
if (this._recordOffset !== null) return this._recordOffset;
|
||||
const view = new DataView(this);
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
const elemC = this.elemCount;
|
||||
|
||||
let recordOffset = 12 + ΔelemC + elemC;
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const tb = view.getUint8(12 + k);
|
||||
const bc = tb & 0xF;
|
||||
const bt = codeToType[bc];
|
||||
recordOffset += typeToBytes[bt.name];
|
||||
}
|
||||
while (recordOffset % 4 !== 0) recordOffset++;
|
||||
this._recordOffset = recordOffset;
|
||||
return recordOffset;
|
||||
}
|
||||
|
||||
_initBlockOffsets() {
|
||||
if (this._ΔelemBlockOffsets !== null) return;
|
||||
const view = new DataView(this);
|
||||
const count = this.jCount;
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
const elemC = this.elemCount;
|
||||
|
||||
const recordOffset = this._getRecordOffset();
|
||||
|
||||
this._ΔelemBlockOffsets = [];
|
||||
let o = recordOffset;
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
this._ΔelemBlockOffsets[k] = o;
|
||||
const tb = view.getUint8(12 + k);
|
||||
const ic = tb >> 4;
|
||||
const it = codeToType[ic];
|
||||
o += count * typeToBytes[it.name];
|
||||
}
|
||||
|
||||
this._elemBlockOffsets = [];
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
this._elemBlockOffsets[k] = o;
|
||||
const tc = view.getUint8(12 + ΔelemC + k);
|
||||
const t = codeToType[tc];
|
||||
o += count * typeToBytes[t.name];
|
||||
}
|
||||
}
|
||||
|
||||
/** Return the user-defined value
|
||||
*/
|
||||
get udv () {
|
||||
return new DataView(this).getUint8(1);
|
||||
}
|
||||
|
||||
/** Return the number of j elements in this chunk
|
||||
*/
|
||||
get jCount () {
|
||||
return new DataView(this).getUint16(2, true);
|
||||
}
|
||||
|
||||
/** Return the i value in this chunk
|
||||
*/
|
||||
get i () {
|
||||
return new DataView(this).getUint16(4, true);
|
||||
}
|
||||
|
||||
/** Return the j0 value in this chunk
|
||||
*/
|
||||
get j0 () {
|
||||
return new DataView(this).getUint16(6, true);
|
||||
}
|
||||
|
||||
/** Return the Δj value in this chunk
|
||||
*/
|
||||
get Δj () {
|
||||
return new DataView(this).getInt16(8, true);
|
||||
}
|
||||
|
||||
/** Return the Δelem_count value in this chunk
|
||||
*/
|
||||
get ΔelemCount () {
|
||||
return new DataView(this).getUint8(10);
|
||||
}
|
||||
|
||||
/** Return the elem_count value in this chunk
|
||||
*/
|
||||
get elemCount () {
|
||||
return new DataView(this).getUint8(11);
|
||||
}
|
||||
|
||||
/** Return a TypedArray (e.g., Uint16Array, …) for the n-th Δelem in the chunk
|
||||
*/
|
||||
Δelem (n) {
|
||||
if (this._ΔelemCaches[n]) return this._ΔelemCaches[n];
|
||||
|
||||
if (n < 0 || n >= this.ΔelemCount) throw new Error(`Invalid Δelem index: ${n}`);
|
||||
const view = new DataView(this);
|
||||
const count = this.jCount;
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
|
||||
const typeByte = view.getUint8(12 + n);
|
||||
const baseCode = typeByte & 0xF;
|
||||
const incrCode = typeByte >> 4;
|
||||
const baseType = codeToType[baseCode];
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!baseType || !incrType) throw new Error('Invalid type codes for Δelem');
|
||||
|
||||
// Find offset for initial value of this Δelem
|
||||
let initialOffset = 12 + ΔelemC + this.elemCount;
|
||||
for (let k = 0; k < n; k++) {
|
||||
const tb = view.getUint8(12 + k);
|
||||
const bc = tb & 0xF;
|
||||
const bt = codeToType[bc];
|
||||
initialOffset += typeToBytes[bt.name];
|
||||
}
|
||||
|
||||
let current = readTypedValue(view, initialOffset, baseType);
|
||||
|
||||
// Advance to start of record data (after all initials and pad)
|
||||
const recordOffset = this._getRecordOffset();
|
||||
|
||||
// Find offset for deltas of this Δelem (skip previous Δelems' delta blocks)
|
||||
this._initBlockOffsets();
|
||||
const deltaOffset = this._ΔelemBlockOffsets[n];
|
||||
|
||||
// Reconstruct the array
|
||||
const arr = new baseType(count);
|
||||
const isBigInt = baseType === BigInt64Array || baseType === BigUint64Array;
|
||||
arr[0] = current;
|
||||
for (let idx = 1; idx < count; idx++) {
|
||||
let delta = readTypedValue(view, deltaOffset + idx * typeToBytes[incrType.name], incrType);
|
||||
if (isBigInt) {
|
||||
delta = BigInt(delta);
|
||||
current += delta;
|
||||
} else {
|
||||
current += delta;
|
||||
}
|
||||
arr[idx] = current;
|
||||
}
|
||||
|
||||
this._ΔelemCaches[n] = arr;
|
||||
return arr;
|
||||
}
|
||||
|
||||
/** Return a TypedArray (e.g., Uint16Array, …) for the n-th elem in the chunk
|
||||
*/
|
||||
elem (n) {
|
||||
if (this._elemCaches[n]) return this._elemCaches[n];
|
||||
|
||||
if (n < 0 || n >= this.elemCount) throw new Error(`Invalid elem index: ${n}`);
|
||||
const view = new DataView(this);
|
||||
const count = this.jCount;
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
const elemC = this.elemCount;
|
||||
|
||||
const typeCode = view.getUint8(12 + ΔelemC + n);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid type code for elem');
|
||||
|
||||
// Find offset for this elem's data block
|
||||
this._initBlockOffsets();
|
||||
const elemOffset = this._elemBlockOffsets[n];
|
||||
|
||||
// Create and populate the array
|
||||
const arr = new type(count);
|
||||
const bytes = typeToBytes[type.name];
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
arr[idx] = readTypedValue(view, elemOffset + idx * bytes, type);
|
||||
}
|
||||
|
||||
this._elemCaches[n] = arr;
|
||||
return arr;
|
||||
}
|
||||
|
||||
getRecord (index) {
|
||||
if (index < 0 || index >= this.jCount) throw new Error(`Invalid record index: ${index}`);
|
||||
|
||||
const arr = [this.udv, this.i, this.j0 + index * this.Δj];
|
||||
|
||||
for (let m = 0; m < this.ΔelemCount; m++) {
|
||||
const values = this.Δelem(m);
|
||||
arr.push(values[index]);
|
||||
}
|
||||
|
||||
for (let m = 0; m < this.elemCount; m++) {
|
||||
const values = this.elem(m);
|
||||
arr.push(values[index]);
|
||||
}
|
||||
|
||||
return arr;
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
let index = 0;
|
||||
const chunk = this;
|
||||
return {
|
||||
next() {
|
||||
if (index < chunk.jCount) {
|
||||
return { value: chunk.getRecord(index++), done: false };
|
||||
} else {
|
||||
return { done: true };
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
class DougalBinaryChunkInterleaved extends ArrayBuffer {
|
||||
constructor(buffer, offset, length) {
|
||||
super(length);
|
||||
new Uint8Array(this).set(new Uint8Array(buffer, offset, length));
|
||||
this._incrStrides = [];
|
||||
this._elemStrides = [];
|
||||
this._incrOffsets = [];
|
||||
this._elemOffsets = [];
|
||||
this._recordStride = 0;
|
||||
this._recordOffset = null;
|
||||
this._initStrides();
|
||||
this._ΔelemCaches = new Array(this.ΔelemCount);
|
||||
this._elemCaches = new Array(this.elemCount);
|
||||
}
|
||||
|
||||
_getRecordOffset() {
|
||||
if (this._recordOffset !== null) return this._recordOffset;
|
||||
const view = new DataView(this);
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
const elemC = this.elemCount;
|
||||
|
||||
let recordOffset = 12 + ΔelemC + elemC;
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const tb = view.getUint8(12 + k);
|
||||
const bc = tb & 0xF;
|
||||
const bt = codeToType[bc];
|
||||
recordOffset += typeToBytes[bt.name];
|
||||
}
|
||||
while (recordOffset % 4 !== 0) recordOffset++;
|
||||
this._recordOffset = recordOffset;
|
||||
return recordOffset;
|
||||
}
|
||||
|
||||
_initStrides() {
|
||||
const view = new DataView(this);
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
const elemC = this.elemCount;
|
||||
|
||||
// Compute incr strides and offsets
|
||||
let incrOffset = 0;
|
||||
for (let k = 0; k < ΔelemC; k++) {
|
||||
const typeByte = view.getUint8(12 + k);
|
||||
const incrCode = typeByte >> 4;
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!incrType) throw new Error('Invalid incr type code');
|
||||
this._incrOffsets.push(incrOffset);
|
||||
const bytes = typeToBytes[incrType.name];
|
||||
this._incrStrides.push(bytes);
|
||||
incrOffset += bytes;
|
||||
this._recordStride += bytes;
|
||||
}
|
||||
|
||||
// Compute elem strides and offsets
|
||||
let elemOffset = incrOffset;
|
||||
for (let k = 0; k < elemC; k++) {
|
||||
const typeCode = view.getUint8(12 + ΔelemC + k);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid elem type code');
|
||||
this._elemOffsets.push(elemOffset);
|
||||
const bytes = typeToBytes[type.name];
|
||||
this._elemStrides.push(bytes);
|
||||
elemOffset += bytes;
|
||||
this._recordStride += bytes;
|
||||
}
|
||||
}
|
||||
|
||||
get udv() {
|
||||
return new DataView(this).getUint8(1);
|
||||
}
|
||||
|
||||
get jCount() {
|
||||
return new DataView(this).getUint16(2, true);
|
||||
}
|
||||
|
||||
get i() {
|
||||
return new DataView(this).getUint16(4, true);
|
||||
}
|
||||
|
||||
get j0() {
|
||||
return new DataView(this).getUint16(6, true);
|
||||
}
|
||||
|
||||
get Δj() {
|
||||
return new DataView(this).getInt16(8, true);
|
||||
}
|
||||
|
||||
get ΔelemCount() {
|
||||
return new DataView(this).getUint8(10);
|
||||
}
|
||||
|
||||
get elemCount() {
|
||||
return new DataView(this).getUint8(11);
|
||||
}
|
||||
|
||||
Δelem(n) {
|
||||
if (this._ΔelemCaches[n]) return this._ΔelemCaches[n];
|
||||
|
||||
if (n < 0 || n >= this.ΔelemCount) throw new Error(`Invalid Δelem index: ${n}`);
|
||||
const view = new DataView(this);
|
||||
const count = this.jCount;
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
|
||||
const typeByte = view.getUint8(12 + n);
|
||||
const baseCode = typeByte & 0xF;
|
||||
const incrCode = typeByte >> 4;
|
||||
const baseType = codeToType[baseCode];
|
||||
const incrType = codeToType[incrCode];
|
||||
if (!baseType || !incrType) throw new Error('Invalid type codes for Δelem');
|
||||
|
||||
// Find offset for initial value of this Δelem
|
||||
let initialOffset = 12 + ΔelemC + this.elemCount;
|
||||
for (let k = 0; k < n; k++) {
|
||||
const tb = view.getUint8(12 + k);
|
||||
const bc = tb & 0xF;
|
||||
const bt = codeToType[bc];
|
||||
initialOffset += typeToBytes[bt.name];
|
||||
}
|
||||
|
||||
let current = readTypedValue(view, initialOffset, baseType);
|
||||
|
||||
// Find offset to start of record data
|
||||
const recordOffset = this._getRecordOffset();
|
||||
|
||||
// Use precomputed offset for this Δelem
|
||||
const deltaOffset = recordOffset + this._incrOffsets[n];
|
||||
|
||||
// Reconstruct the array
|
||||
const arr = new baseType(count);
|
||||
const isBigInt = baseType === BigInt64Array || baseType === BigUint64Array;
|
||||
arr[0] = current;
|
||||
for (let idx = 1; idx < count; idx++) {
|
||||
let delta = readTypedValue(view, deltaOffset + idx * this._recordStride, incrType);
|
||||
if (isBigInt) {
|
||||
delta = BigInt(delta);
|
||||
current += delta;
|
||||
} else {
|
||||
current += delta;
|
||||
}
|
||||
arr[idx] = current;
|
||||
}
|
||||
|
||||
this._ΔelemCaches[n] = arr;
|
||||
return arr;
|
||||
}
|
||||
|
||||
elem(n) {
|
||||
if (this._elemCaches[n]) return this._elemCaches[n];
|
||||
|
||||
if (n < 0 || n >= this.elemCount) throw new Error(`Invalid elem index: ${n}`);
|
||||
const view = new DataView(this);
|
||||
const count = this.jCount;
|
||||
const ΔelemC = this.ΔelemCount;
|
||||
|
||||
const typeCode = view.getUint8(12 + ΔelemC + n);
|
||||
const type = codeToType[typeCode];
|
||||
if (!type) throw new Error('Invalid type code for elem');
|
||||
|
||||
// Find offset to start of record data
|
||||
const recordOffset = this._getRecordOffset();
|
||||
|
||||
// Use precomputed offset for this elem (relative to start of record data)
|
||||
const elemOffset = recordOffset + this._elemOffsets[n];
|
||||
|
||||
// Create and populate the array
|
||||
const arr = new type(count);
|
||||
const bytes = typeToBytes[type.name];
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
arr[idx] = readTypedValue(view, elemOffset + idx * this._recordStride, type);
|
||||
}
|
||||
|
||||
this._elemCaches[n] = arr;
|
||||
return arr;
|
||||
}
|
||||
|
||||
getRecord (index) {
|
||||
if (index < 0 || index >= this.jCount) throw new Error(`Invalid record index: ${index}`);
|
||||
|
||||
const arr = [this.udv, this.i, this.j0 + index * this.Δj];
|
||||
|
||||
for (let m = 0; m < this.ΔelemCount; m++) {
|
||||
const values = this.Δelem(m);
|
||||
arr.push(values[index]);
|
||||
}
|
||||
|
||||
for (let m = 0; m < this.elemCount; m++) {
|
||||
const values = this.elem(m);
|
||||
arr.push(values[index]);
|
||||
}
|
||||
|
||||
return arr;
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
let index = 0;
|
||||
const chunk = this;
|
||||
return {
|
||||
next() {
|
||||
if (index < chunk.jCount) {
|
||||
return { value: chunk.getRecord(index++), done: false };
|
||||
} else {
|
||||
return { done: true };
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
module.exports = { DougalBinaryBundle, DougalBinaryChunkSequential, DougalBinaryChunkInterleaved }
|
||||
327
lib/modules/@dougal/binary/decode.js
Normal file
327
lib/modules/@dougal/binary/decode.js
Normal file
@@ -0,0 +1,327 @@
|
||||
const codeToType = {
|
||||
0: Int8Array,
|
||||
1: Uint8Array,
|
||||
2: Int16Array,
|
||||
3: Uint16Array,
|
||||
4: Int32Array,
|
||||
5: Uint32Array,
|
||||
7: Float32Array,
|
||||
8: Float64Array,
|
||||
9: BigInt64Array,
|
||||
10: BigUint64Array
|
||||
};
|
||||
|
||||
const typeToBytes = {
|
||||
Int8Array: 1,
|
||||
Uint8Array: 1,
|
||||
Int16Array: 2,
|
||||
Uint16Array: 2,
|
||||
Int32Array: 4,
|
||||
Uint32Array: 4,
|
||||
Float32Array: 4,
|
||||
Float64Array: 8,
|
||||
BigInt64Array: 8,
|
||||
BigUint64Array: 8
|
||||
};
|
||||
|
||||
function sequential(binary) {
|
||||
if (!(binary instanceof Uint8Array) || binary.length < 4) {
|
||||
throw new Error('Invalid binary input');
|
||||
}
|
||||
|
||||
const view = new DataView(binary.buffer, binary.byteOffset, binary.byteLength);
|
||||
let offset = 0;
|
||||
|
||||
// Initialize result (assuming single i value for simplicity; extend for multiple i values if needed)
|
||||
const result = { i: null, j: [], Δelems: [], elems: [] };
|
||||
|
||||
// Process bundles
|
||||
while (offset < binary.length) {
|
||||
// Read bundle header
|
||||
if (offset + 4 > binary.length) throw new Error('Incomplete bundle header');
|
||||
|
||||
const bundleHeader = view.getUint32(offset, true);
|
||||
if ((bundleHeader & 0xFF) !== 0x1C) throw new Error('Invalid bundle marker');
|
||||
const bundleLength = bundleHeader >> 8;
|
||||
offset += 4;
|
||||
const bundleEnd = offset + bundleLength;
|
||||
|
||||
if (bundleEnd > binary.length) throw new Error('Bundle length exceeds input size');
|
||||
|
||||
// Process chunks in bundle
|
||||
while (offset < bundleEnd) {
|
||||
// Read chunk header
|
||||
if (offset + 12 > bundleEnd) throw new Error('Incomplete chunk header');
|
||||
const chunkType = view.getUint8(offset);
|
||||
if (chunkType !== 0x11) throw new Error(`Unsupported chunk type: ${chunkType}`);
|
||||
offset += 1; // Skip udv
|
||||
offset += 1;
|
||||
const count = view.getUint16(offset, true); offset += 2;
|
||||
if (count > 65535) throw new Error('Chunk count exceeds 65535');
|
||||
const iValue = view.getUint16(offset, true); offset += 2;
|
||||
const j0 = view.getUint16(offset, true); offset += 2;
|
||||
const Δj = view.getInt16(offset, true); offset += 2;
|
||||
const ΔelemCount = view.getUint8(offset++); // Δelem_count
|
||||
const elemCount = view.getUint8(offset++); // elem_count
|
||||
|
||||
// Set i value (assuming all chunks share the same i)
|
||||
if (result.i === null) result.i = iValue;
|
||||
else if (result.i !== iValue) throw new Error('Multiple i values not supported');
|
||||
|
||||
// Read preface (element types)
|
||||
const ΔelemTypes = [];
|
||||
for (let i = 0; i < ΔelemCount; i++) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete Δelem types');
|
||||
const typeByte = view.getUint8(offset++);
|
||||
const baseCode = typeByte & 0x0F;
|
||||
const incrCode = typeByte >> 4;
|
||||
if (!codeToType[baseCode] || !codeToType[incrCode]) {
|
||||
throw new Error(`Invalid type code in Δelem: ${typeByte}`);
|
||||
}
|
||||
ΔelemTypes.push({ baseType: codeToType[baseCode], incrType: codeToType[incrCode] });
|
||||
}
|
||||
const elemTypes = [];
|
||||
for (let i = 0; i < elemCount; i++) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete elem types');
|
||||
const typeCode = view.getUint8(offset++);
|
||||
if (!codeToType[typeCode]) throw new Error(`Invalid type code in elem: ${typeCode}`);
|
||||
elemTypes.push(codeToType[typeCode]);
|
||||
}
|
||||
|
||||
// Initialize Δelems and elems arrays if first chunk
|
||||
if (!result.Δelems.length && ΔelemCount > 0) {
|
||||
result.Δelems = Array(ΔelemCount).fill().map(() => []);
|
||||
}
|
||||
if (!result.elems.length && elemCount > 0) {
|
||||
result.elems = Array(elemCount).fill().map(() => []);
|
||||
}
|
||||
|
||||
// Read initial values for Δelems
|
||||
const initialValues = [];
|
||||
for (const { baseType } of ΔelemTypes) {
|
||||
if (offset + typeToBytes[baseType.name] > bundleEnd) {
|
||||
throw new Error('Incomplete initial values');
|
||||
}
|
||||
initialValues.push(readTypedValue(view, offset, baseType));
|
||||
offset += typeToBytes[baseType.name];
|
||||
}
|
||||
// Skip padding
|
||||
while (offset % 4 !== 0) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete padding after initial values');
|
||||
offset++;
|
||||
}
|
||||
|
||||
// Reconstruct j values
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
result.j.push(j0 + idx * Δj);
|
||||
}
|
||||
|
||||
// Read record data (non-interleaved)
|
||||
for (let i = 0; i < ΔelemCount; i++) {
|
||||
let current = initialValues[i];
|
||||
const values = result.Δelems[i];
|
||||
const incrType = ΔelemTypes[i].incrType;
|
||||
const isBigInt = typeof current === 'bigint';
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
if (offset + typeToBytes[incrType.name] > bundleEnd) {
|
||||
throw new Error('Incomplete Δelem data');
|
||||
}
|
||||
let delta = readTypedValue(view, offset, incrType);
|
||||
if (idx === 0) {
|
||||
values.push(isBigInt ? Number(current) : current);
|
||||
} else {
|
||||
if (isBigInt) {
|
||||
delta = BigInt(delta);
|
||||
current += delta;
|
||||
values.push(Number(current));
|
||||
} else {
|
||||
current += delta;
|
||||
values.push(current);
|
||||
}
|
||||
}
|
||||
offset += typeToBytes[incrType.name];
|
||||
}
|
||||
}
|
||||
for (let i = 0; i < elemCount; i++) {
|
||||
const values = result.elems[i];
|
||||
const type = elemTypes[i];
|
||||
const isBigInt = type === BigInt64Array || type === BigUint64Array;
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
if (offset + typeToBytes[type.name] > bundleEnd) {
|
||||
throw new Error('Incomplete elem data');
|
||||
}
|
||||
let value = readTypedValue(view, offset, type);
|
||||
values.push(isBigInt ? Number(value) : value);
|
||||
offset += typeToBytes[type.name];
|
||||
}
|
||||
}
|
||||
// Skip padding
|
||||
while (offset % 4 !== 0) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete padding after record data');
|
||||
offset++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
function interleaved(binary) {
|
||||
if (!(binary instanceof Uint8Array) || binary.length < 4) {
|
||||
throw new Error('Invalid binary input');
|
||||
}
|
||||
|
||||
const view = new DataView(binary.buffer, binary.byteOffset, binary.byteLength);
|
||||
let offset = 0;
|
||||
|
||||
// Initialize result (assuming single i value for simplicity; extend for multiple i values if needed)
|
||||
const result = { i: null, j: [], Δelems: [], elems: [] };
|
||||
|
||||
// Process bundles
|
||||
while (offset < binary.length) {
|
||||
// Read bundle header
|
||||
if (offset + 4 > binary.length) throw new Error('Incomplete bundle header');
|
||||
|
||||
const bundleHeader = view.getUint32(offset, true);
|
||||
if ((bundleHeader & 0xFF) !== 0x1C) throw new Error('Invalid bundle marker');
|
||||
const bundleLength = bundleHeader >> 8;
|
||||
offset += 4;
|
||||
const bundleEnd = offset + bundleLength;
|
||||
|
||||
if (bundleEnd > binary.length) throw new Error('Bundle length exceeds input size');
|
||||
|
||||
// Process chunks in bundle
|
||||
while (offset < bundleEnd) {
|
||||
// Read chunk header
|
||||
if (offset + 12 > bundleEnd) throw new Error('Incomplete chunk header');
|
||||
const chunkType = view.getUint8(offset);
|
||||
if (chunkType !== 0x12) throw new Error(`Unsupported chunk type: ${chunkType}`);
|
||||
offset += 1; // Skip udv
|
||||
offset += 1;
|
||||
const count = view.getUint16(offset, true); offset += 2;
|
||||
if (count > 65535) throw new Error('Chunk count exceeds 65535');
|
||||
const iValue = view.getUint16(offset, true); offset += 2;
|
||||
const j0 = view.getUint16(offset, true); offset += 2;
|
||||
const Δj = view.getInt16(offset, true); offset += 2;
|
||||
const ΔelemCount = view.getUint8(offset++); // Δelem_count
|
||||
const elemCount = view.getUint8(offset++); // elem_count
|
||||
|
||||
// Set i value (assuming all chunks share the same i)
|
||||
if (result.i === null) result.i = iValue;
|
||||
else if (result.i !== iValue) throw new Error('Multiple i values not supported');
|
||||
|
||||
// Read preface (element types)
|
||||
const ΔelemTypes = [];
|
||||
for (let i = 0; i < ΔelemCount; i++) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete Δelem types');
|
||||
const typeByte = view.getUint8(offset++);
|
||||
const baseCode = typeByte & 0x0F;
|
||||
const incrCode = typeByte >> 4;
|
||||
if (!codeToType[baseCode] || !codeToType[incrCode]) {
|
||||
throw new Error(`Invalid type code in Δelem: ${typeByte}`);
|
||||
}
|
||||
ΔelemTypes.push({ baseType: codeToType[baseCode], incrType: codeToType[incrCode] });
|
||||
}
|
||||
const elemTypes = [];
|
||||
for (let i = 0; i < elemCount; i++) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete elem types');
|
||||
const typeCode = view.getUint8(offset++);
|
||||
if (!codeToType[typeCode]) throw new Error(`Invalid type code in elem: ${typeCode}`);
|
||||
elemTypes.push(codeToType[typeCode]);
|
||||
}
|
||||
|
||||
// Initialize Δelems and elems arrays if first chunk
|
||||
if (!result.Δelems.length && ΔelemCount > 0) {
|
||||
result.Δelems = Array(ΔelemCount).fill().map(() => []);
|
||||
}
|
||||
if (!result.elems.length && elemCount > 0) {
|
||||
result.elems = Array(elemCount).fill().map(() => []);
|
||||
}
|
||||
|
||||
// Read initial values for Δelems
|
||||
const initialValues = [];
|
||||
for (const { baseType } of ΔelemTypes) {
|
||||
if (offset + typeToBytes[baseType.name] > bundleEnd) {
|
||||
throw new Error('Incomplete initial values');
|
||||
}
|
||||
initialValues.push(readTypedValue(view, offset, baseType));
|
||||
offset += typeToBytes[baseType.name];
|
||||
}
|
||||
// Skip padding
|
||||
while (offset % 4 !== 0) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete padding after initial values');
|
||||
offset++;
|
||||
}
|
||||
|
||||
// Reconstruct j values
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
result.j.push(j0 + idx * Δj);
|
||||
}
|
||||
|
||||
// Read interleaved record data
|
||||
for (let idx = 0; idx < count; idx++) {
|
||||
// Read Δelems
|
||||
for (let i = 0; i < ΔelemCount; i++) {
|
||||
const values = result.Δelems[i];
|
||||
const incrType = ΔelemTypes[i].incrType;
|
||||
const isBigInt = typeof initialValues[i] === 'bigint';
|
||||
if (offset + typeToBytes[incrType.name] > bundleEnd) {
|
||||
throw new Error('Incomplete Δelem data');
|
||||
}
|
||||
let delta = readTypedValue(view, offset, incrType);
|
||||
offset += typeToBytes[incrType.name];
|
||||
if (idx === 0) {
|
||||
values.push(isBigInt ? Number(initialValues[i]) : initialValues[i]);
|
||||
} else {
|
||||
if (isBigInt) {
|
||||
delta = BigInt(delta);
|
||||
initialValues[i] += delta;
|
||||
values.push(Number(initialValues[i]));
|
||||
} else {
|
||||
initialValues[i] += delta;
|
||||
values.push(initialValues[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Read elems
|
||||
for (let i = 0; i < elemCount; i++) {
|
||||
const values = result.elems[i];
|
||||
const type = elemTypes[i];
|
||||
const isBigInt = type === BigInt64Array || type === BigUint64Array;
|
||||
if (offset + typeToBytes[type.name] > bundleEnd) {
|
||||
throw new Error('Incomplete elem data');
|
||||
}
|
||||
let value = readTypedValue(view, offset, type);
|
||||
values.push(isBigInt ? Number(value) : value);
|
||||
offset += typeToBytes[type.name];
|
||||
}
|
||||
}
|
||||
// Skip padding
|
||||
while (offset % 4 !== 0) {
|
||||
if (offset >= bundleEnd) throw new Error('Incomplete padding after record data');
|
||||
offset++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function readTypedValue(view, offset, type) {
|
||||
switch (type) {
|
||||
case Int8Array: return view.getInt8(offset);
|
||||
case Uint8Array: return view.getUint8(offset);
|
||||
case Int16Array: return view.getInt16(offset, true);
|
||||
case Uint16Array: return view.getUint16(offset, true);
|
||||
case Int32Array: return view.getInt32(offset, true);
|
||||
case Uint32Array: return view.getUint32(offset, true);
|
||||
case Float32Array: return view.getFloat32(offset, true);
|
||||
case Float64Array: return view.getFloat64(offset, true);
|
||||
case BigInt64Array: return view.getBigInt64(offset, true);
|
||||
case BigUint64Array: return view.getBigUint64(offset, true);
|
||||
default: throw new Error(`Unsupported type: ${type.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { sequential, interleaved };
|
||||
380
lib/modules/@dougal/binary/encode.js
Normal file
380
lib/modules/@dougal/binary/encode.js
Normal file
@@ -0,0 +1,380 @@
|
||||
const typeToCode = {
|
||||
Int8Array: 0,
|
||||
Uint8Array: 1,
|
||||
Int16Array: 2,
|
||||
Uint16Array: 3,
|
||||
Int32Array: 4,
|
||||
Uint32Array: 5,
|
||||
Float32Array: 7, // Float16 not natively supported in JS, use Float32
|
||||
Float64Array: 8,
|
||||
BigInt64Array: 9,
|
||||
BigUint64Array: 10
|
||||
};
|
||||
|
||||
const typeToBytes = {
|
||||
Int8Array: 1,
|
||||
Uint8Array: 1,
|
||||
Int16Array: 2,
|
||||
Uint16Array: 2,
|
||||
Int32Array: 4,
|
||||
Uint32Array: 4,
|
||||
Float32Array: 4,
|
||||
Float64Array: 8,
|
||||
BigInt64Array: 8,
|
||||
BigUint64Array: 8
|
||||
};
|
||||
|
||||
function sequential(json, iGetter, jGetter, Δelems = [], elems = [], udv = 0) {
|
||||
if (!Array.isArray(json) || !json.length) return new Uint8Array(0);
|
||||
if (typeof iGetter !== 'function' || typeof jGetter !== 'function') throw new Error('i and j must be getter functions');
|
||||
Δelems.forEach((elem, idx) => {
|
||||
if (typeof elem.key !== 'function') throw new Error(`Δelems[${idx}].key must be a getter function`);
|
||||
});
|
||||
elems.forEach((elem, idx) => {
|
||||
if (typeof elem.key !== 'function') throw new Error(`elems[${idx}].key must be a getter function`);
|
||||
});
|
||||
|
||||
// Group records by i value
|
||||
const groups = new Map();
|
||||
for (const record of json) {
|
||||
const iValue = iGetter(record);
|
||||
if (iValue == null) throw new Error('Missing i value from getter');
|
||||
if (!groups.has(iValue)) groups.set(iValue, []);
|
||||
groups.get(iValue).push(record);
|
||||
}
|
||||
|
||||
const maxBundleSize = 0xFFFFFF; // Max bundle length (24 bits)
|
||||
const buffers = [];
|
||||
|
||||
// Process each group (i value)
|
||||
for (const [iValue, records] of groups) {
|
||||
// Sort records by j to ensure consistent order
|
||||
records.sort((a, b) => jGetter(a) - jGetter(b));
|
||||
const jValues = records.map(jGetter);
|
||||
if (jValues.some(v => v == null)) throw new Error('Missing j value from getter');
|
||||
|
||||
// Split records into chunks based on Δj continuity
|
||||
const chunks = [];
|
||||
let currentChunk = [records[0]];
|
||||
let currentJ0 = jValues[0];
|
||||
let currentΔj = records.length > 1 ? jValues[1] - jValues[0] : 0;
|
||||
|
||||
for (let idx = 1; idx < records.length; idx++) {
|
||||
const chunkIndex = chunks.reduce((sum, c) => sum + c.records.length, 0);
|
||||
const expectedJ = currentJ0 + (idx - chunkIndex) * currentΔj;
|
||||
if (jValues[idx] !== expectedJ || idx - chunkIndex >= 65536) {
|
||||
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
|
||||
currentChunk = [records[idx]];
|
||||
currentJ0 = jValues[idx];
|
||||
currentΔj = idx + 1 < records.length ? jValues[idx + 1] - jValues[idx] : 0;
|
||||
} else {
|
||||
currentChunk.push(records[idx]);
|
||||
}
|
||||
}
|
||||
if (currentChunk.length > 0) {
|
||||
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
|
||||
}
|
||||
|
||||
// Calculate total size for all chunks in this group by simulating offsets
|
||||
const chunkSizes = chunks.map(({ records: chunkRecords }) => {
|
||||
if (chunkRecords.length > 65535) throw new Error(`Chunk size exceeds 65535 for i=${iValue}`);
|
||||
let simulatedOffset = 0; // Relative to chunk start
|
||||
simulatedOffset += 12; // Header
|
||||
simulatedOffset += Δelems.length + elems.length; // Preface
|
||||
simulatedOffset += Δelems.reduce((sum, e) => sum + typeToBytes[e.baseType.name], 0); // Initial values
|
||||
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after initial
|
||||
simulatedOffset += chunkRecords.length * (
|
||||
Δelems.reduce((sum, e) => sum + typeToBytes[e.incrType.name], 0) +
|
||||
elems.reduce((sum, e) => sum + typeToBytes[e.type.name], 0)
|
||||
); // Record data
|
||||
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after record
|
||||
return simulatedOffset;
|
||||
});
|
||||
const totalChunkSize = chunkSizes.reduce((sum, size) => sum + size, 0);
|
||||
|
||||
// Start a new bundle if needed
|
||||
const lastBundle = buffers[buffers.length - 1];
|
||||
if (!lastBundle || lastBundle.offset + totalChunkSize > maxBundleSize) {
|
||||
buffers.push({ offset: 4, buffer: null, view: null });
|
||||
}
|
||||
|
||||
// Initialize DataView for current bundle
|
||||
const currentBundle = buffers[buffers.length - 1];
|
||||
if (!currentBundle.buffer) {
|
||||
const requiredSize = totalChunkSize + 4;
|
||||
currentBundle.buffer = new ArrayBuffer(requiredSize);
|
||||
currentBundle.view = new DataView(currentBundle.buffer);
|
||||
}
|
||||
|
||||
// Process each chunk
|
||||
for (const { records: chunkRecords, j0, Δj } of chunks) {
|
||||
const chunkSize = chunkSizes.shift();
|
||||
|
||||
// Ensure buffer is large enough
|
||||
if (currentBundle.offset + chunkSize > currentBundle.buffer.byteLength) {
|
||||
const newSize = currentBundle.offset + chunkSize;
|
||||
const newBuffer = new ArrayBuffer(newSize);
|
||||
new Uint8Array(newBuffer).set(new Uint8Array(currentBundle.buffer));
|
||||
currentBundle.buffer = newBuffer;
|
||||
currentBundle.view = new DataView(newBuffer);
|
||||
}
|
||||
|
||||
// Write chunk header
|
||||
let offset = currentBundle.offset;
|
||||
currentBundle.view.setUint8(offset++, 0x11); // Chunk type
|
||||
currentBundle.view.setUint8(offset++, udv); // udv
|
||||
currentBundle.view.setUint16(offset, chunkRecords.length, true); offset += 2; // count
|
||||
currentBundle.view.setUint16(offset, iValue, true); offset += 2; // i
|
||||
currentBundle.view.setUint16(offset, j0, true); offset += 2; // j0
|
||||
currentBundle.view.setInt16(offset, Δj, true); offset += 2; // Δj
|
||||
currentBundle.view.setUint8(offset++, Δelems.length); // Δelem_count
|
||||
currentBundle.view.setUint8(offset++, elems.length); // elem_count
|
||||
|
||||
// Write chunk preface (element types)
|
||||
for (const elem of Δelems) {
|
||||
const baseCode = typeToCode[elem.baseType.name];
|
||||
const incrCode = typeToCode[elem.incrType.name];
|
||||
currentBundle.view.setUint8(offset++, (incrCode << 4) | baseCode);
|
||||
}
|
||||
for (const elem of elems) {
|
||||
currentBundle.view.setUint8(offset++, typeToCode[elem.type.name]);
|
||||
}
|
||||
|
||||
// Write initial values for Δelems
|
||||
for (const elem of Δelems) {
|
||||
const value = elem.key(chunkRecords[0]);
|
||||
if (value == null) throw new Error('Missing Δelem value from getter');
|
||||
writeTypedValue(currentBundle.view, offset, value, elem.baseType);
|
||||
offset += typeToBytes[elem.baseType.name];
|
||||
}
|
||||
// Pad to 4-byte boundary
|
||||
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
|
||||
|
||||
// Write record data (non-interleaved)
|
||||
for (const elem of Δelems) {
|
||||
let prev = elem.key(chunkRecords[0]);
|
||||
for (let idx = 0; idx < chunkRecords.length; idx++) {
|
||||
const value = idx === 0 ? 0 : elem.key(chunkRecords[idx]) - prev;
|
||||
writeTypedValue(currentBundle.view, offset, value, elem.incrType);
|
||||
offset += typeToBytes[elem.incrType.name];
|
||||
prev = elem.key(chunkRecords[idx]);
|
||||
}
|
||||
}
|
||||
for (const elem of elems) {
|
||||
for (const record of chunkRecords) {
|
||||
const value = elem.key(record);
|
||||
if (value == null) throw new Error('Missing elem value from getter');
|
||||
writeTypedValue(currentBundle.view, offset, value, elem.type);
|
||||
offset += typeToBytes[elem.type.name];
|
||||
}
|
||||
}
|
||||
// Pad to 4-byte boundary
|
||||
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
|
||||
|
||||
// Update bundle offset
|
||||
currentBundle.offset = offset;
|
||||
}
|
||||
|
||||
// Update bundle header
|
||||
currentBundle.view.setUint32(0, 0x1C | ((currentBundle.offset - 4) << 8), true);
|
||||
}
|
||||
|
||||
// Combine buffers into final Uint8Array
|
||||
const finalLength = buffers.reduce((sum, b) => sum + b.offset, 0);
|
||||
const result = new Uint8Array(finalLength);
|
||||
let offset = 0;
|
||||
for (const { buffer, offset: bundleOffset } of buffers) {
|
||||
result.set(new Uint8Array(buffer, 0, bundleOffset), offset);
|
||||
offset += bundleOffset;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
function interleaved(json, iGetter, jGetter, Δelems = [], elems = [], udv = 0) {
|
||||
if (!Array.isArray(json) || !json.length) return new Uint8Array(0);
|
||||
if (typeof iGetter !== 'function' || typeof jGetter !== 'function') throw new Error('i and j must be getter functions');
|
||||
Δelems.forEach((elem, idx) => {
|
||||
if (typeof elem.key !== 'function') throw new Error(`Δelems[${idx}].key must be a getter function`);
|
||||
});
|
||||
elems.forEach((elem, idx) => {
|
||||
if (typeof elem.key !== 'function') throw new Error(`elems[${idx}].key must be a getter function`);
|
||||
});
|
||||
|
||||
// Group records by i value
|
||||
const groups = new Map();
|
||||
for (const record of json) {
|
||||
const iValue = iGetter(record);
|
||||
if (iValue == null) throw new Error('Missing i value from getter');
|
||||
if (!groups.has(iValue)) groups.set(iValue, []);
|
||||
groups.get(iValue).push(record);
|
||||
}
|
||||
|
||||
const maxBundleSize = 0xFFFFFF; // Max bundle length (24 bits)
|
||||
const buffers = [];
|
||||
|
||||
// Process each group (i value)
|
||||
for (const [iValue, records] of groups) {
|
||||
// Sort records by j to ensure consistent order
|
||||
records.sort((a, b) => jGetter(a) - jGetter(b));
|
||||
const jValues = records.map(jGetter);
|
||||
if (jValues.some(v => v == null)) throw new Error('Missing j value from getter');
|
||||
|
||||
// Split records into chunks based on Δj continuity
|
||||
const chunks = [];
|
||||
let currentChunk = [records[0]];
|
||||
let currentJ0 = jValues[0];
|
||||
let currentΔj = records.length > 1 ? jValues[1] - jValues[0] : 0;
|
||||
|
||||
for (let idx = 1; idx < records.length; idx++) {
|
||||
const chunkIndex = chunks.reduce((sum, c) => sum + c.records.length, 0);
|
||||
const expectedJ = currentJ0 + (idx - chunkIndex) * currentΔj;
|
||||
if (jValues[idx] !== expectedJ || idx - chunkIndex >= 65536) {
|
||||
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
|
||||
currentChunk = [records[idx]];
|
||||
currentJ0 = jValues[idx];
|
||||
currentΔj = idx + 1 < records.length ? jValues[idx + 1] - jValues[idx] : 0;
|
||||
} else {
|
||||
currentChunk.push(records[idx]);
|
||||
}
|
||||
}
|
||||
if (currentChunk.length > 0) {
|
||||
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
|
||||
}
|
||||
|
||||
// Calculate total size for all chunks in this group by simulating offsets
|
||||
const chunkSizes = chunks.map(({ records: chunkRecords }) => {
|
||||
if (chunkRecords.length > 65535) throw new Error(`Chunk size exceeds 65535 for i=${iValue}`);
|
||||
let simulatedOffset = 0; // Relative to chunk start
|
||||
simulatedOffset += 12; // Header
|
||||
simulatedOffset += Δelems.length + elems.length; // Preface
|
||||
simulatedOffset += Δelems.reduce((sum, e) => sum + typeToBytes[e.baseType.name], 0); // Initial values
|
||||
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after initial
|
||||
simulatedOffset += chunkRecords.length * (
|
||||
Δelems.reduce((sum, e) => sum + typeToBytes[e.incrType.name], 0) +
|
||||
elems.reduce((sum, e) => sum + typeToBytes[e.type.name], 0)
|
||||
); // Interleaved record data
|
||||
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after record
|
||||
return simulatedOffset;
|
||||
});
|
||||
const totalChunkSize = chunkSizes.reduce((sum, size) => sum + size, 0);
|
||||
|
||||
// Start a new bundle if needed
|
||||
const lastBundle = buffers[buffers.length - 1];
|
||||
if (!lastBundle || lastBundle.offset + totalChunkSize > maxBundleSize) {
|
||||
buffers.push({ offset: 4, buffer: null, view: null });
|
||||
}
|
||||
|
||||
// Initialize DataView for current bundle
|
||||
const currentBundle = buffers[buffers.length - 1];
|
||||
if (!currentBundle.buffer) {
|
||||
const requiredSize = totalChunkSize + 4;
|
||||
currentBundle.buffer = new ArrayBuffer(requiredSize);
|
||||
currentBundle.view = new DataView(currentBundle.buffer);
|
||||
}
|
||||
|
||||
// Process each chunk
|
||||
for (const { records: chunkRecords, j0, Δj } of chunks) {
|
||||
const chunkSize = chunkSizes.shift();
|
||||
|
||||
// Ensure buffer is large enough
|
||||
if (currentBundle.offset + chunkSize > currentBundle.buffer.byteLength) {
|
||||
const newSize = currentBundle.offset + chunkSize;
|
||||
const newBuffer = new ArrayBuffer(newSize);
|
||||
new Uint8Array(newBuffer).set(new Uint8Array(currentBundle.buffer));
|
||||
currentBundle.buffer = newBuffer;
|
||||
currentBundle.view = new DataView(newBuffer);
|
||||
}
|
||||
|
||||
// Write chunk header
|
||||
let offset = currentBundle.offset;
|
||||
currentBundle.view.setUint8(offset++, 0x12); // Chunk type
|
||||
currentBundle.view.setUint8(offset++, udv); // udv
|
||||
currentBundle.view.setUint16(offset, chunkRecords.length, true); offset += 2; // count
|
||||
currentBundle.view.setUint16(offset, iValue, true); offset += 2; // i
|
||||
currentBundle.view.setUint16(offset, j0, true); offset += 2; // j0
|
||||
currentBundle.view.setInt16(offset, Δj, true); offset += 2; // Δj
|
||||
currentBundle.view.setUint8(offset++, Δelems.length); // Δelem_count
|
||||
currentBundle.view.setUint8(offset++, elems.length); // elem_count
|
||||
|
||||
// Write chunk preface (element types)
|
||||
for (const elem of Δelems) {
|
||||
const baseCode = typeToCode[elem.baseType.name];
|
||||
const incrCode = typeToCode[elem.incrType.name];
|
||||
currentBundle.view.setUint8(offset++, (incrCode << 4) | baseCode);
|
||||
}
|
||||
for (const elem of elems) {
|
||||
currentBundle.view.setUint8(offset++, typeToCode[elem.type.name]);
|
||||
}
|
||||
|
||||
// Write initial values for Δelems
|
||||
for (const elem of Δelems) {
|
||||
const value = elem.key(chunkRecords[0]);
|
||||
if (value == null) throw new Error('Missing Δelem value from getter');
|
||||
writeTypedValue(currentBundle.view, offset, value, elem.baseType);
|
||||
offset += typeToBytes[elem.baseType.name];
|
||||
}
|
||||
// Pad to 4-byte boundary
|
||||
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
|
||||
|
||||
// Write interleaved record data
|
||||
const prevValues = Δelems.map(elem => elem.key(chunkRecords[0]));
|
||||
for (let idx = 0; idx < chunkRecords.length; idx++) {
|
||||
// Write Δelems increments
|
||||
for (let i = 0; i < Δelems.length; i++) {
|
||||
const elem = Δelems[i];
|
||||
const value = idx === 0 ? 0 : elem.key(chunkRecords[idx]) - prevValues[i];
|
||||
writeTypedValue(currentBundle.view, offset, value, elem.incrType);
|
||||
offset += typeToBytes[elem.incrType.name];
|
||||
prevValues[i] = elem.key(chunkRecords[idx]);
|
||||
}
|
||||
// Write elems
|
||||
for (const elem of elems) {
|
||||
const value = elem.key(chunkRecords[idx]);
|
||||
if (value == null) throw new Error('Missing elem value from getter');
|
||||
writeTypedValue(currentBundle.view, offset, value, elem.type);
|
||||
offset += typeToBytes[elem.type.name];
|
||||
}
|
||||
}
|
||||
// Pad to 4-byte boundary
|
||||
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
|
||||
|
||||
// Update bundle offset
|
||||
currentBundle.offset = offset;
|
||||
}
|
||||
|
||||
// Update bundle header
|
||||
currentBundle.view.setUint32(0, 0x1C | ((currentBundle.offset - 4) << 8), true);
|
||||
}
|
||||
|
||||
// Combine buffers into final Uint8Array
|
||||
const finalLength = buffers.reduce((sum, b) => sum + b.offset, 0);
|
||||
const result = new Uint8Array(finalLength);
|
||||
let offset = 0;
|
||||
for (const { buffer, offset: bundleOffset } of buffers) {
|
||||
result.set(new Uint8Array(buffer, 0, bundleOffset), offset);
|
||||
offset += bundleOffset;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
function writeTypedValue(view, offset, value, type) {
|
||||
switch (type) {
|
||||
case Int8Array: view.setInt8(offset, value); break;
|
||||
case Uint8Array: view.setUint8(offset, value); break;
|
||||
case Int16Array: view.setInt16(offset, value, true); break;
|
||||
case Uint16Array: view.setUint16(offset, value, true); break;
|
||||
case Int32Array: view.setInt32(offset, value, true); break;
|
||||
case Uint32Array: view.setUint32(offset, value, true); break;
|
||||
case Float32Array: view.setFloat32(offset, value, true); break;
|
||||
case Float64Array: view.setFloat64(offset, value, true); break;
|
||||
case BigInt64Array: view.setBigInt64(offset, BigInt(value), true); break;
|
||||
case BigUint64Array: view.setBigUint64(offset, BigInt(value), true); break;
|
||||
default: throw new Error(`Unsupported type: ${type.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { sequential, interleaved };
|
||||
139
lib/modules/@dougal/binary/index.js
Normal file
139
lib/modules/@dougal/binary/index.js
Normal file
@@ -0,0 +1,139 @@
|
||||
|
||||
/** Binary encoder
|
||||
*
|
||||
* This module encodes scalar data from a grid-like source
|
||||
* into a packed binary format for bandwidth efficiency and
|
||||
* speed of access.
|
||||
*
|
||||
* Data are indexed by i & j values, with "i" being constant
|
||||
* (e.g., a sequence or line number) and "j" expected to change
|
||||
* by a constant, linear amount (e.g., point numbers). All data
|
||||
* from consecutive "j" values will be encoded as a single array
|
||||
* (or series of arrays if multiple values are encoded).
|
||||
* If there is a jump in the "j" progression, a new "chunk" will
|
||||
* be started with a new array (or series of arrays).
|
||||
*
|
||||
* Multiple values may be encoded per (i, j) pair, using any of
|
||||
* the types supported by JavaScript's TypedArray except for
|
||||
* Float16 and Uint8Clamped. Each variable can be encoded with
|
||||
* a different size.
|
||||
*
|
||||
* Values may be encoded directly or as deltas from an initial
|
||||
* value. The latter is particularly efficient when dealing with
|
||||
* monotonically incrementing data, such as timestamps.
|
||||
*
|
||||
* The conceptual packet format for sequentially encoded data
|
||||
* looks like this:
|
||||
*
|
||||
* <msg-type> <count: x> <i> <j0> <Δj>
|
||||
*
|
||||
* <Δelement_count: y>
|
||||
* <element_count: z>
|
||||
*
|
||||
* <Δelement_1_type_base> … <Δelement_y_type_base>
|
||||
* <Δelement_1_type_incr> … <Δelement_y_type_incr>
|
||||
* <elem_1_type> … <elem_z_type>
|
||||
*
|
||||
* <Δelement_1_first> … <Δelement_z_first>
|
||||
*
|
||||
* <Δelem_1_0> … <Δelem_1_x>
|
||||
* …
|
||||
* <Δelem_y_0> … <Δelem_y_x>
|
||||
* <elem_1_0> … <elem_1_x>
|
||||
* …
|
||||
* <elem_z_0> … <elem_z_x>
|
||||
*
|
||||
*
|
||||
* The conceptual packet format for interleaved encoded data
|
||||
* looks like this:
|
||||
*
|
||||
*
|
||||
* <msg-type> <count: x> <i> <j0> <Δj>
|
||||
*
|
||||
* <Δelement_count: y>
|
||||
* <element_count: z>
|
||||
*
|
||||
* <Δelement_1_type_base> … <Δelement_y_type_base>
|
||||
* <Δelement_1_type_incr> … <Δelement_y_type_incr>
|
||||
* <elem_1_type> … <elem_z_type>
|
||||
*
|
||||
* <Δelement_1_first> … <Δelement_y_first>
|
||||
*
|
||||
* <Δelem_1_0> <Δelem_2_0> … <Δelem_y_0> <elem_1_0> <elem_2_0> … <elem_z_0>
|
||||
* <Δelem_1_1> <Δelem_2_1> … <Δelem_y_1> <elem_1_1> <elem_2_1> … <elem_z_1>
|
||||
* …
|
||||
* <Δelem_1_x> <Δelem_2_x> … <Δelem_y_x> <elem_1_x> <elem_2_x> … <elem_z_x>
|
||||
*
|
||||
*
|
||||
* Usage example:
|
||||
*
|
||||
* json = [
|
||||
* {
|
||||
* sequence: 7,
|
||||
* sailline: 5354,
|
||||
* line: 5356,
|
||||
* point: 1068,
|
||||
* tstamp: 1695448704372,
|
||||
* objrefraw: 3,
|
||||
* objreffinal: 4
|
||||
* },
|
||||
* {
|
||||
* sequence: 7,
|
||||
* sailline: 5354,
|
||||
* line: 5352,
|
||||
* point: 1070,
|
||||
* tstamp: 1695448693612,
|
||||
* objrefraw: 2,
|
||||
* objreffinal: 3
|
||||
* },
|
||||
* {
|
||||
* sequence: 7,
|
||||
* sailline: 5354,
|
||||
* line: 5356,
|
||||
* point: 1072,
|
||||
* tstamp: 1695448684624,
|
||||
* objrefraw: 3,
|
||||
* objreffinal: 4
|
||||
* }
|
||||
* ];
|
||||
*
|
||||
* deltas = [
|
||||
* { key: el => el.tstamp, baseType: BigUint64Array, incrType: Int16Array }
|
||||
* ];
|
||||
*
|
||||
* elems = [
|
||||
* { key: el => el.objrefraw, type: Uint8Array },
|
||||
* { key: el => el.objreffinal, type: Uint8Array }
|
||||
* ];
|
||||
*
|
||||
* i = el => el.sequence;
|
||||
*
|
||||
* j = el => el.point;
|
||||
*
|
||||
* bundle = encode(json, i, j, deltas, elems);
|
||||
*
|
||||
* // bundle:
|
||||
*
|
||||
* Uint8Array(40) [
|
||||
* 36, 0, 0, 28, 17, 0, 3, 0, 7, 0,
|
||||
* 44, 4, 2, 0, 1, 2, 42, 1, 1, 116,
|
||||
* 37, 158, 192, 138, 1, 0, 0, 0, 0, 0,
|
||||
* 248, 213, 228, 220, 3, 2, 3, 4, 3, 4
|
||||
* ]
|
||||
*
|
||||
* decode(bundle);
|
||||
*
|
||||
* {
|
||||
* i: 7,
|
||||
* j: [ 1068, 1070, 1072 ],
|
||||
* 'Δelems': [ [ 1695448704372, 1695448693612, 1695448684624 ] ],
|
||||
* elems: [ [ 3, 2, 3 ], [ 4, 3, 4 ] ]
|
||||
* }
|
||||
*
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
encode: {...require('./encode')},
|
||||
decode: {...require('./decode')},
|
||||
...require('./classes')
|
||||
};
|
||||
12
lib/modules/@dougal/binary/package.json
Normal file
12
lib/modules/@dougal/binary/package.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "@dougal/binary",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": ""
|
||||
}
|
||||
25
lib/modules/@dougal/concurrency/index.js
Normal file
25
lib/modules/@dougal/concurrency/index.js
Normal file
@@ -0,0 +1,25 @@
|
||||
class ConcurrencyLimiter {
|
||||
|
||||
constructor(maxConcurrent) {
|
||||
this.maxConcurrent = maxConcurrent;
|
||||
this.active = 0;
|
||||
this.queue = [];
|
||||
}
|
||||
|
||||
async enqueue(task) {
|
||||
if (this.active >= this.maxConcurrent) {
|
||||
await new Promise(resolve => this.queue.push(resolve));
|
||||
}
|
||||
this.active++;
|
||||
try {
|
||||
return await task();
|
||||
} finally {
|
||||
this.active--;
|
||||
if (this.queue.length > 0) {
|
||||
this.queue.shift()();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ConcurrencyLimiter;
|
||||
12
lib/modules/@dougal/concurrency/package.json
Normal file
12
lib/modules/@dougal/concurrency/package.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "@dougal/concurrency",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": ""
|
||||
}
|
||||
75
lib/modules/@dougal/organisations/Organisation.js
Normal file
75
lib/modules/@dougal/organisations/Organisation.js
Normal file
@@ -0,0 +1,75 @@
|
||||
|
||||
class Organisation {
|
||||
|
||||
constructor (data) {
|
||||
|
||||
this.read = !!data?.read;
|
||||
this.write = !!data?.write;
|
||||
this.edit = !!data?.edit;
|
||||
|
||||
this.other = {};
|
||||
|
||||
return new Proxy(this, {
|
||||
get (target, prop) {
|
||||
if (prop in target) {
|
||||
return target[prop]
|
||||
} else {
|
||||
return target.other[prop];
|
||||
}
|
||||
},
|
||||
|
||||
set (target, prop, value) {
|
||||
const oldValue = target[prop] !== undefined ? target[prop] : target.other[prop];
|
||||
const newValue = Boolean(value);
|
||||
|
||||
if (["read", "write", "edit"].includes(prop)) {
|
||||
target[prop] = newValue;
|
||||
} else {
|
||||
target.other[prop] = newValue;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
toJSON () {
|
||||
return {
|
||||
read: this.read,
|
||||
write: this.write,
|
||||
edit: this.edit,
|
||||
...this.other
|
||||
}
|
||||
}
|
||||
|
||||
toString (replacer, space) {
|
||||
return JSON.stringify(this.toJSON(), replacer, space);
|
||||
}
|
||||
|
||||
/** Limit the operations to only those allowed by `other`
|
||||
*/
|
||||
filter (other) {
|
||||
const filteredOrganisation = new Organisation();
|
||||
|
||||
filteredOrganisation.read = this.read && other.read;
|
||||
filteredOrganisation.write = this.write && other.write;
|
||||
filteredOrganisation.edit = this.edit && other.edit;
|
||||
|
||||
return filteredOrganisation;
|
||||
}
|
||||
|
||||
intersect (other) {
|
||||
return this.filter(other);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
if (typeof module !== 'undefined' && module.exports) {
|
||||
module.exports = Organisation; // CJS export
|
||||
}
|
||||
|
||||
// ESM export
|
||||
if (typeof exports !== 'undefined' && !exports.default) {
|
||||
exports.default = Organisation; // ESM export
|
||||
}
|
||||
225
lib/modules/@dougal/organisations/Organisations.js
Normal file
225
lib/modules/@dougal/organisations/Organisations.js
Normal file
@@ -0,0 +1,225 @@
|
||||
const Organisation = require('./Organisation');
|
||||
|
||||
class Organisations {
|
||||
|
||||
#values = {}
|
||||
|
||||
#overlord
|
||||
|
||||
static entries (orgs) {
|
||||
return orgs.names().map(name => [name, orgs.get(name)]);
|
||||
}
|
||||
|
||||
constructor (data, overlord) {
|
||||
if (data instanceof Organisations) {
|
||||
for (const [name, value] of Organisations.entries(data)) {
|
||||
this.set(name, new Organisation(value));
|
||||
}
|
||||
} else if (data instanceof Object) {
|
||||
for (const [name, value] of Object.entries(data)) {
|
||||
this.set(name, new Organisation(value));
|
||||
}
|
||||
} else if (data instanceof String) {
|
||||
this.set(data, new Organisation());
|
||||
} else if (typeof data !== "undefined") {
|
||||
throw new Error("Invalid constructor argument");
|
||||
}
|
||||
|
||||
if (overlord) {
|
||||
this.#overlord = overlord;
|
||||
}
|
||||
}
|
||||
|
||||
get values () {
|
||||
return this.#values;
|
||||
}
|
||||
|
||||
get length () {
|
||||
return this.names().length;
|
||||
}
|
||||
|
||||
get overlord () {
|
||||
return this.#overlord;
|
||||
}
|
||||
|
||||
set overlord (v) {
|
||||
this.#overlord = new Organisations(v);
|
||||
}
|
||||
|
||||
/** Get the operations for `name`
|
||||
*/
|
||||
get (name) {
|
||||
const key = Object.keys(this.values).find( k => k.toLowerCase() == name.toLowerCase() ) ?? name;
|
||||
return this.values[key];
|
||||
}
|
||||
|
||||
/** Set the operations for `name` to `value`
|
||||
*
|
||||
* If we have an overlord, ensure we cannot:
|
||||
*
|
||||
* 1. Add new organisations which the overlord
|
||||
* is not a member of
|
||||
* 2. Access operations that the overlord is not
|
||||
* allowed to access
|
||||
*/
|
||||
set (name, value) {
|
||||
name = String(name).trim();
|
||||
const key = Object.keys(this.values).find( k => k.toLowerCase() == name.toLowerCase() ) ?? name;
|
||||
const org = new Organisation(value);
|
||||
|
||||
if (this.overlord) {
|
||||
const parent = this.overlord.get(key) ?? this.overlord.get("*");
|
||||
if (parent) {
|
||||
this.values[key] = parent.filter(org);
|
||||
}
|
||||
} else {
|
||||
this.values[key] = new Organisation(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Enable the operation `op` in all organisations
|
||||
*/
|
||||
enableOperation (op) {
|
||||
if (this.overlord) {
|
||||
Object.keys(this.#values)
|
||||
.filter( key => (this.overlord.get(key) ?? this.overlord.get("*"))?.[op] )
|
||||
.forEach( key => this.#values[key][op] = true );
|
||||
} else {
|
||||
Object.values(this.#values).forEach( org => org[op] = true );
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Disable the operation `op` in all organisations
|
||||
*/
|
||||
disableOperation (op) {
|
||||
Object.values(this.#values).forEach( org => org[op] = false );
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Create a new organisation object limited by the caller's rights
|
||||
*
|
||||
* The spawned Organisations instance will have the same organisations
|
||||
* and rights as the caller minus the applied `mask`. With the default
|
||||
* mask, the spawned object will inherit all rights except for `edit`
|
||||
* rights.
|
||||
*
|
||||
* The "*" organisation must be explicitly assigned. It is not inherited.
|
||||
*/
|
||||
spawn (mask = {read: true, write: true, edit: false}) {
|
||||
|
||||
const parent = new Organisations();
|
||||
const wildcard = this.get("*").edit; // If true, we can spawn everywhere
|
||||
|
||||
this.entries().forEach( ([k, v]) => {
|
||||
// if (k != "*") { // This organisation is not inherited
|
||||
if (v.edit || wildcard) { // We have the right to spawn in this organisation
|
||||
const o = new Organisation({
|
||||
read: v.read && mask.read,
|
||||
write: v.write && mask.write,
|
||||
edit: v.edit && mask.edit
|
||||
});
|
||||
parent.set(k, o);
|
||||
}
|
||||
// }
|
||||
});
|
||||
|
||||
return new Organisations({}, parent);
|
||||
}
|
||||
|
||||
remove (name) {
|
||||
const key = Object.keys(this.values).find( k => k.toLowerCase() == name.toLowerCase() ) ?? name;
|
||||
delete this.values[key];
|
||||
}
|
||||
|
||||
/** Return the list of organisation names
|
||||
*/
|
||||
names () {
|
||||
return Object.keys(this.values);
|
||||
}
|
||||
|
||||
/** Same as this.get(name)
|
||||
*/
|
||||
value (name) {
|
||||
return this.values[name];
|
||||
}
|
||||
|
||||
/** Same as Object.prototype.entries
|
||||
*/
|
||||
entries () {
|
||||
return this.names().map( name => [ name, this.value(name) ] );
|
||||
}
|
||||
|
||||
/** Return true if the named organisation is present
|
||||
*/
|
||||
has (name) {
|
||||
return Boolean(this.value(name));
|
||||
}
|
||||
|
||||
/** Return only those of our organisations
|
||||
* and operations present in `other`
|
||||
*/
|
||||
filter (other) {
|
||||
const filteredOrganisations = new Organisations();
|
||||
|
||||
const wildcard = other.value("*");
|
||||
|
||||
for (const [name, org] of this.entries()) {
|
||||
const ownOrg = other.value(name) ?? wildcard;
|
||||
if (ownOrg) {
|
||||
filteredOrganisations.set(name, org.filter(ownOrg))
|
||||
}
|
||||
}
|
||||
|
||||
return filteredOrganisations;
|
||||
}
|
||||
|
||||
/** Return only those organisations
|
||||
* that have access to the required
|
||||
* operation
|
||||
*/
|
||||
accessToOperation (op) {
|
||||
const filteredOrganisations = new Organisations();
|
||||
|
||||
for (const [name, org] of this.entries()) {
|
||||
if (org[op]) {
|
||||
filteredOrganisations.set(name, org);
|
||||
}
|
||||
}
|
||||
|
||||
return filteredOrganisations;
|
||||
}
|
||||
|
||||
toJSON () {
|
||||
const obj = {};
|
||||
for (const key in this.values) {
|
||||
obj[key] = this.values[key].toJSON();
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
toString (replacer, space) {
|
||||
return JSON.stringify(this.toJSON(), replacer, space);
|
||||
}
|
||||
|
||||
*[Symbol.iterator] () {
|
||||
for (const [name, operations] of this.entries()) {
|
||||
yield {name, operations};
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
if (typeof module !== 'undefined' && module.exports) {
|
||||
module.exports = Organisations; // CJS export
|
||||
}
|
||||
|
||||
// ESM export
|
||||
if (typeof exports !== 'undefined' && !exports.default) {
|
||||
exports.default = Organisations; // ESM export
|
||||
}
|
||||
5
lib/modules/@dougal/organisations/index.js
Normal file
5
lib/modules/@dougal/organisations/index.js
Normal file
@@ -0,0 +1,5 @@
|
||||
|
||||
module.exports = {
|
||||
Organisation: require('./Organisation'),
|
||||
Organisations: require('./Organisations')
|
||||
}
|
||||
12
lib/modules/@dougal/organisations/package.json
Normal file
12
lib/modules/@dougal/organisations/package.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "@dougal/organisations",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": ""
|
||||
}
|
||||
364
lib/modules/@dougal/user/User.js
Normal file
364
lib/modules/@dougal/user/User.js
Normal file
@@ -0,0 +1,364 @@
|
||||
const EventEmitter = require('events');
|
||||
const { Organisations } = require('@dougal/organisations');
|
||||
|
||||
function randomUUID () {
|
||||
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
|
||||
const r = Math.random() * 16 | 0;
|
||||
const v = c === 'x' ? r : (r & 0x3 | 0x8);
|
||||
return v.toString(16);
|
||||
});
|
||||
}
|
||||
|
||||
class User extends EventEmitter {
|
||||
|
||||
// Valid field names
|
||||
static fields = [ "ip", "host", "name", "email", "description", "colour", "active", "organisations", "meta" ]
|
||||
|
||||
static validUUID (str) {
|
||||
const uuidv4Rx = /^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
|
||||
return uuidv4Rx.test(str);
|
||||
}
|
||||
|
||||
static validIPv4 (str) {
|
||||
const ipv4Rx = /^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\/([0-9]|[1-2][0-9]|3[0-2]))?$/;
|
||||
return ipv4Rx.test(str);
|
||||
}
|
||||
|
||||
static validIPv6 (str) {
|
||||
const ipv6Rx = /^(?:[0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|(?:[0-9a-fA-F]{1,4}:){1,7}:|(?:[0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|(?:[0-9a-fA-F]{1,4}:){1,5}(?::[0-9a-fA-F]{1,4}){1,2}|(?:[0-9a-fA-F]{1,4}:){1,4}(?::[0-9a-fA-F]{1,4}){1,3}|(?:[0-9a-fA-F]{1,4}:){1,3}(?::[0-9a-fA-F]{1,4}){1,4}|(?:[0-9a-fA-F]{1,4}:){1,2}(?::[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:(?::[0-9a-fA-F]{1,4}){1,6}|:((?::[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(?::[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(?:ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|(2[0-4][0-9]|[01]?[0-9][0-9]?))|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|(2[0-4][0-9]|[01]?[0-9][0-9]?))))$/;
|
||||
return ipv6Rx.test(str);
|
||||
}
|
||||
|
||||
static validHostname (str) {
|
||||
const hostnameRx = /^(?=.{1,253}$)(?:(?!-)[A-Za-z0-9-]{1,63}(?<!-)\.)+[A-Za-z]{2,}$/;
|
||||
return hostnameRx.test(str);
|
||||
}
|
||||
|
||||
#setString (k, v) {
|
||||
if (typeof v === "undefined") {
|
||||
this.values[k] = v;
|
||||
} else {
|
||||
this.values[k] = String(v).trim();
|
||||
}
|
||||
this.emit("changed", k, v);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
#updateTimestamp (v) {
|
||||
if (typeof v === "undefined") {
|
||||
this.#timestamp = (new Date()).valueOf();
|
||||
} else {
|
||||
this.#timestamp = (new Date(v)).valueOf();
|
||||
}
|
||||
this.emit("last_modified", this.#timestamp);
|
||||
}
|
||||
|
||||
// Create a new instance of `other`, where `other` is
|
||||
// an instance of User or of a derived class
|
||||
#clone (other = this) {
|
||||
const clone = new this.constructor();
|
||||
Object.assign(clone.values, other.values);
|
||||
clone.organisations = new Organisations(other.organisations);
|
||||
return clone;
|
||||
}
|
||||
|
||||
values = {}
|
||||
|
||||
#timestamp
|
||||
|
||||
constructor (data) {
|
||||
super();
|
||||
|
||||
User.fields.forEach( f => this[f] = data?.[f] );
|
||||
this.values.id = data?.id ?? randomUUID();
|
||||
this.values.active = !!this.active;
|
||||
this.values.hash = data?.hash;
|
||||
this.values.password = data?.password;
|
||||
this.values.organisations = new Organisations(data?.organisations);
|
||||
this.#updateTimestamp(data?.last_modified);
|
||||
}
|
||||
|
||||
/*
|
||||
* Getters
|
||||
*/
|
||||
|
||||
get id () { return this.values.id }
|
||||
|
||||
get ip () { return this.values.ip }
|
||||
|
||||
get host () { return this.values.host }
|
||||
|
||||
get name () { return this.values.name }
|
||||
|
||||
get email () { return this.values.email }
|
||||
|
||||
get description () { return this.values.description }
|
||||
|
||||
get colour () { return this.values.colour }
|
||||
|
||||
get active () { return this.values.active }
|
||||
|
||||
get organisations () { return this.values.organisations }
|
||||
|
||||
get password () { return this.values.password }
|
||||
|
||||
get timestamp () { return new Date(this.#timestamp) }
|
||||
|
||||
/*
|
||||
* Setters
|
||||
*/
|
||||
|
||||
set id (v) {
|
||||
if (typeof v === "undefined") {
|
||||
this.values.id = randomUUID();
|
||||
} else if (User.validUUID(v)) {
|
||||
this.values.id = v;
|
||||
} else {
|
||||
throw new Error("Invalid ID format (must be UUIDv4)");
|
||||
}
|
||||
this.emit("changed", "id", this.values.id);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
set ip (v) {
|
||||
if (User.validIPv4(v) || User.validIPv6(v) || typeof v === "undefined") {
|
||||
this.values.ip = v;
|
||||
} else {
|
||||
throw new Error("Invalid IP address or subnet");
|
||||
}
|
||||
this.emit("changed", "ip", this.values.ip);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
set host (v) {
|
||||
if (User.validHostname(v) || typeof v === "undefined") {
|
||||
this.values.host = v;
|
||||
} else {
|
||||
throw new Error("Invalid hostname");
|
||||
}
|
||||
this.emit("changed", "host", this.values.host);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
set name (v) {
|
||||
this.#setString("name", v);
|
||||
}
|
||||
|
||||
set email (v) {
|
||||
// TODO should validate, buy hey!
|
||||
this.#setString("email", v);
|
||||
}
|
||||
|
||||
set description (v) {
|
||||
this.#setString("description", v);
|
||||
}
|
||||
|
||||
set colour (v) {
|
||||
this.#setString("colour", v);
|
||||
}
|
||||
|
||||
set active (v) {
|
||||
this.values.active = !!v;
|
||||
this.emit("changed", "active", this.values.active);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
set organisations (v) {
|
||||
this.values.organisations = new Organisations(v);
|
||||
this.emit("changed", "organisations", this.values.organisations);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
set password (v) {
|
||||
this.values.password = v;
|
||||
this.emit("changed", "password", this.values.password);
|
||||
this.#updateTimestamp();
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Validation methods
|
||||
*/
|
||||
|
||||
get errors () {
|
||||
let err = [];
|
||||
|
||||
if (!this.id) err.push("ERR_NO_ID");
|
||||
if (!this.name) err.push("ERR_NO_NAME");
|
||||
if (!this.organisations.length) err.push("ERR_NO_ORG");
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
get isValid () {
|
||||
return this.errors.length == 0;
|
||||
}
|
||||
|
||||
/*
|
||||
* Filtering methods
|
||||
*/
|
||||
|
||||
filter (other) {
|
||||
// const filteredUser = new User(this);
|
||||
const filteredUser = this.#clone();
|
||||
filteredUser.organisations = this.organisations.filter(other.organisations);
|
||||
return filteredUser;
|
||||
}
|
||||
|
||||
/** Return users that are visible to me.
|
||||
*
|
||||
* These are users with which at leas one common organisation
|
||||
* with read, write or delete access to.
|
||||
*
|
||||
* If we are wildcarded ("*"), we see everyone.
|
||||
*
|
||||
* If a peer is wildcarded, they can be seen by everone.
|
||||
*/
|
||||
peers (list) {
|
||||
if (this.organisations.value("*")) {
|
||||
return list;
|
||||
} else {
|
||||
return list.filter( user => this.canRead(user) );
|
||||
// return list.filter( user =>
|
||||
// user.organisations.value("*") ||
|
||||
// user.organisations.filter(this.organisations).length > 0
|
||||
// this.organisations.filter(user.organisations).length > 0
|
||||
// );
|
||||
}
|
||||
}
|
||||
|
||||
/** Return users that I can edit
|
||||
*
|
||||
* These users must belong to an organisation
|
||||
* over which I have edit rights.
|
||||
*
|
||||
* If we are edit wildcarded, we can edit everyone.
|
||||
*/
|
||||
editablePeers (list) {
|
||||
const editableOrgs = this.organisations.accessToOperation("edit");
|
||||
if (editableOrgs.value("*")) {
|
||||
return list;
|
||||
} else {
|
||||
return list.filter( user => this.canEdit(user) );
|
||||
// editableOrgs.filter(user.organisations).length > 0
|
||||
// );
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* General methods
|
||||
*/
|
||||
|
||||
/** Return `true` if we are `other`
|
||||
*/
|
||||
is (other) {
|
||||
return this.id == other.id;
|
||||
}
|
||||
|
||||
canDo (operation, other) {
|
||||
if (this.organisations.get('*')?.[operation])
|
||||
return true;
|
||||
|
||||
if (other instanceof User) {
|
||||
return other.organisations.names().some(name => this.organisations.get(name)?.[operation]);
|
||||
} else if (other instanceof Organisations) {
|
||||
return other.accessToOperation(operation).names().some(name => this.organisations.get(name)?.[operation]);
|
||||
} else if (other?.organisations) {
|
||||
return this.canDo(operation, new Organisations(other.organisations));
|
||||
} else if (other instanceof Object) {
|
||||
return this.canDo(operation, new Organisations(other));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
canRead (other) {
|
||||
return this.canDo("read", other);
|
||||
}
|
||||
|
||||
canWrite (other) {
|
||||
return this.canDo("write", other);
|
||||
}
|
||||
|
||||
canEdit (other) {
|
||||
return this.canDo("edit", other);
|
||||
}
|
||||
|
||||
/** Perform an edit on another user
|
||||
*
|
||||
* Syntax: user.edit(other).to(another);
|
||||
*
|
||||
* Applies to `other` the changes described in `another`
|
||||
* that are permitted to `user`. The argument `another`
|
||||
* must be a plain object (not a `User` instance) with
|
||||
* only the properties that are to be changed.
|
||||
*
|
||||
* NOTE: Organisations are not merged, they are overwritten
|
||||
* and then filtered to ensure that the edited user does not
|
||||
* gain more privileges than those granted to the editing
|
||||
* user.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* // This causes user test77 to set user x23 to
|
||||
* // inactive
|
||||
* test77.edit(x23).to({active: false})
|
||||
*/
|
||||
edit (other) {
|
||||
if (this.canEdit(other)) {
|
||||
return {
|
||||
to: (another) => {
|
||||
const newUser = Object.assign(this.#clone(other), another);
|
||||
return newUser.filter(this);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Do not fail or throw but return undefined
|
||||
}
|
||||
|
||||
/** Create a new user similar to us except it doesn't have `edit` rights
|
||||
* by default
|
||||
*/
|
||||
spawn (init = {}, mask = {read: true, write: true, edit: false}) {
|
||||
// const user = new User(init);
|
||||
const user = this.#clone(init);
|
||||
user.organisations = this.organisations.accessToOperation("edit").disableOperation("edit");
|
||||
user.organisations.overlord = this.organisations;
|
||||
return user;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/*
|
||||
* Conversion and presentation methods
|
||||
*/
|
||||
|
||||
toJSON () {
|
||||
return {
|
||||
id: this.id,
|
||||
ip: this.ip,
|
||||
host: this.host,
|
||||
name: this.name,
|
||||
email: this.email,
|
||||
description: this.description,
|
||||
colour: this.colour,
|
||||
active: this.active,
|
||||
organisations: this.organisations.toJSON(),
|
||||
password: this.password
|
||||
}
|
||||
}
|
||||
|
||||
toString (replacer, space) {
|
||||
return JSON.stringify(this.toJSON(), replacer, space);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
if (typeof module !== 'undefined' && module.exports) {
|
||||
module.exports = User; // CJS export
|
||||
}
|
||||
|
||||
// ESM export
|
||||
if (typeof exports !== 'undefined' && !exports.default) {
|
||||
exports.default = User; // ESM export
|
||||
}
|
||||
4
lib/modules/@dougal/user/index.js
Normal file
4
lib/modules/@dougal/user/index.js
Normal file
@@ -0,0 +1,4 @@
|
||||
|
||||
module.exports = {
|
||||
User: require('./User')
|
||||
}
|
||||
15
lib/modules/@dougal/user/package.json
Normal file
15
lib/modules/@dougal/user/package.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "@dougal/user",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"dependencies": {
|
||||
"@dougal/organisations": "file:../organisations"
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ module.exports = {
|
||||
'@vue/cli-plugin-babel/preset'
|
||||
],
|
||||
plugins: [
|
||||
'@babel/plugin-proposal-logical-assignment-operators'
|
||||
'@babel/plugin-proposal-logical-assignment-operators',
|
||||
'@babel/plugin-transform-private-methods'
|
||||
]
|
||||
}
|
||||
|
||||
27287
lib/www/client/source/package-lock.json
generated
27287
lib/www/client/source/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -3,34 +3,48 @@
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"serve": "vue-cli-service serve",
|
||||
"serve": "vue-cli-service serve --host=0.0.0.0",
|
||||
"build": "vue-cli-service build"
|
||||
},
|
||||
"dependencies": {
|
||||
"@mdi/font": "^5.6.55",
|
||||
"@deck.gl/aggregation-layers": "^9.1.13",
|
||||
"@deck.gl/geo-layers": "^9.1.13",
|
||||
"@deck.gl/mesh-layers": "^9.1.14",
|
||||
"@dougal/binary": "file:../../../modules/@dougal/binary",
|
||||
"@dougal/concurrency": "file:../../../modules/@dougal/concurrency",
|
||||
"@dougal/organisations": "file:../../../modules/@dougal/organisations",
|
||||
"@dougal/user": "file:../../../modules/@dougal/user",
|
||||
"@loaders.gl/obj": "^4.3.4",
|
||||
"@mdi/font": "^7.2.96",
|
||||
"buffer": "^6.0.3",
|
||||
"core-js": "^3.6.5",
|
||||
"csv-parse": "^5.5.2",
|
||||
"d3": "^7.0.1",
|
||||
"jwt-decode": "^3.0.0",
|
||||
"leaflet": "^1.7.1",
|
||||
"leaflet-arrowheads": "^1.2.2",
|
||||
"leaflet-realtime": "^2.2.0",
|
||||
"leaflet.markercluster": "^1.4.1",
|
||||
"marked": "^2.0.3",
|
||||
"plotly.js-dist": "^2.5.0",
|
||||
"lodash.debounce": "^4.0.8",
|
||||
"marked": "^9.1.4",
|
||||
"path-browserify": "^1.0.1",
|
||||
"plotly.js-dist": "^2.27.0",
|
||||
"suncalc": "^1.8.0",
|
||||
"typeface-roboto": "0.0.75",
|
||||
"vue": "^2.6.12",
|
||||
"vue-debounce": "^2.6.0",
|
||||
"vue-router": "^3.5.1",
|
||||
"vuetify": "^2.5.0",
|
||||
"vuex": "^3.6.2"
|
||||
"vuex": "^3.6.2",
|
||||
"yaml": "^2.3.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/plugin-proposal-logical-assignment-operators": "^7.14.5",
|
||||
"@vue/cli-plugin-babel": "~4.4.0",
|
||||
"@vue/cli-plugin-router": "~4.4.0",
|
||||
"@vue/cli-plugin-vuex": "~4.4.0",
|
||||
"@vue/cli-service": "^4.5.13",
|
||||
"@babel/plugin-transform-private-methods": "^7.27.1",
|
||||
"@vue/cli-plugin-babel": "^5.0.8",
|
||||
"@vue/cli-plugin-router": "^5.0.8",
|
||||
"@vue/cli-plugin-vuex": "^5.0.8",
|
||||
"@vue/cli-service": "^5.0.8",
|
||||
"sass": "~1.32",
|
||||
"sass-loader": "^8.0.0",
|
||||
"stylus": "^0.54.8",
|
||||
|
||||
406982
lib/www/client/source/public/assets/boat0.obj
Normal file
406982
lib/www/client/source/public/assets/boat0.obj
Normal file
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,7 @@
|
||||
:color="snackColour"
|
||||
:timeout="6000"
|
||||
>
|
||||
{{ snackText }}
|
||||
<div v-html="snackText"></div>
|
||||
<template v-slot:action="{ attrs }">
|
||||
<v-btn
|
||||
text
|
||||
@@ -35,7 +35,7 @@
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { mapActions } from 'vuex';
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import DougalNavigation from './components/navigation';
|
||||
import DougalFooter from './components/footer';
|
||||
|
||||
@@ -52,8 +52,8 @@ export default {
|
||||
}),
|
||||
|
||||
computed: {
|
||||
snackText () { return this.$store.state.snack.snackText },
|
||||
snackColour () { return this.$store.state.snack.snackColour }
|
||||
snackText () { return this.$root.markdownInline(this.$store.state.snack.snackText) },
|
||||
snackColour () { return this.$store.state.snack.snackColour },
|
||||
},
|
||||
|
||||
watch: {
|
||||
@@ -75,17 +75,44 @@ export default {
|
||||
if (!newVal) {
|
||||
this.$store.commit('setSnackText', "");
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
methods: {
|
||||
...mapActions(["setCredentials"])
|
||||
|
||||
handleJWT (context, {payload}) {
|
||||
this.setCredentials({token: payload.token});
|
||||
},
|
||||
|
||||
handleProject (context, {payload}) {
|
||||
if (payload?.table == "public") {
|
||||
this.refreshProjects();
|
||||
}
|
||||
},
|
||||
|
||||
registerNotificationHandlers () {
|
||||
|
||||
this.$store.dispatch('registerHandler', {
|
||||
table: '.jwt',
|
||||
handler: this.handleJWT
|
||||
});
|
||||
|
||||
this.$store.dispatch('registerHandler', {
|
||||
table: 'project',
|
||||
handler: this.handleProject
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
...mapActions(["setCredentials", "refreshProjects"])
|
||||
},
|
||||
|
||||
mounted () {
|
||||
async mounted () {
|
||||
// Local Storage values are always strings
|
||||
this.$vuetify.theme.dark = localStorage.getItem("darkTheme") == "true";
|
||||
this.setCredentials()
|
||||
this.registerNotificationHandlers();
|
||||
await this.setCredentials();
|
||||
this.refreshProjects();
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
@@ -0,0 +1,86 @@
|
||||
<template>
|
||||
<v-dialog
|
||||
v-model="dialogOpen"
|
||||
@input="(e) => $emit('input', e)"
|
||||
max-width="600"
|
||||
>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-btn v-if="adminaccess()"
|
||||
title="Create a new project from scratch. Generally, it's preferable to clone an existing project (right-click → ‘Clone’)"
|
||||
small
|
||||
outlined
|
||||
color="warning"
|
||||
v-bind="attrs"
|
||||
v-on="on"
|
||||
>
|
||||
<span>Create new project</span>
|
||||
<v-icon right small>mdi-file-document-plus-outline</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
|
||||
<dougal-project-settings-name-id-geodetics
|
||||
:value="newProjectDetails"
|
||||
@input="save"
|
||||
@close="dialogOpen = false"
|
||||
>
|
||||
</dougal-project-settings-name-id-geodetics>
|
||||
|
||||
</v-dialog>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import DougalProjectSettingsNameIdGeodetics from '@/components/project-settings/name-id-geodetics'
|
||||
import AccessMixin from '@/mixins/access';
|
||||
|
||||
export default {
|
||||
name: 'DougalAppBarExtensionProjectList',
|
||||
|
||||
components: {
|
||||
DougalProjectSettingsNameIdGeodetics
|
||||
},
|
||||
|
||||
mixins: [
|
||||
AccessMixin
|
||||
],
|
||||
|
||||
data() {
|
||||
return {
|
||||
dialogOpen: false,
|
||||
newProjectDetails: {
|
||||
name: null,
|
||||
id: null,
|
||||
epsg: null
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
methods: {
|
||||
async save (data) {
|
||||
this.dialogOpen = false;
|
||||
data.archived = true; // Make the project inactive to start with
|
||||
console.log("POST the new project data");
|
||||
console.log(data);
|
||||
|
||||
const init = {
|
||||
method: "POST",
|
||||
body: data
|
||||
};
|
||||
const cb = (err, res) => {
|
||||
if (!err && res) {
|
||||
console.log(res);
|
||||
if (res.status == "201") {
|
||||
// Redirect to new project settings page
|
||||
const settingsUrl = `/projects/${data.id.toLowerCase()}/configuration`;
|
||||
this.$router.push(settingsUrl);
|
||||
|
||||
}
|
||||
}
|
||||
};
|
||||
await this.api(["/project", init, cb]);
|
||||
},
|
||||
|
||||
...mapActions(["api"])
|
||||
}
|
||||
}
|
||||
</script>
|
||||
@@ -0,0 +1,63 @@
|
||||
<template>
|
||||
<v-tabs :value="tab" show-arrows v-if="page != 'configuration'">
|
||||
<v-tab v-for="tab, index in tabs" :key="index" link :to="tabLink(tab.href)" v-text="tab.text"></v-tab>
|
||||
<template v-if="adminaccess()">
|
||||
<v-spacer></v-spacer>
|
||||
<v-tab :to="tabLink('configuration')" class="orange--text darken-3" title="Edit project settings"><v-icon small left color="orange darken-3">mdi-cog-outline</v-icon> Settings</v-tab>
|
||||
</template>
|
||||
</v-tabs>
|
||||
<v-tabs optional :value="0" show-arrows align-with-title v-else>
|
||||
<v-tab>Project settings</v-tab>
|
||||
<v-spacer></v-spacer>
|
||||
<v-tab :to="tabLink('summary')">Go to project</v-tab>
|
||||
</v-tabs>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import AccessMixin from '@/mixins/access';
|
||||
|
||||
export default {
|
||||
name: 'DougalAppBarExtensionProject',
|
||||
|
||||
mixins: [
|
||||
AccessMixin
|
||||
],
|
||||
|
||||
data() {
|
||||
return {
|
||||
tabs: [
|
||||
{ href: "summary", text: "Summary" },
|
||||
{ href: "lines", text: "Lines" },
|
||||
{ href: "plan", text: "Plan" },
|
||||
{ href: "sequences", text: "Sequences" },
|
||||
{ href: "calendar", text: "Calendar" },
|
||||
{ href: "log", text: "Log" },
|
||||
{ href: "qc", text: "QC" },
|
||||
{ href: "graphs", text: "Graphs" },
|
||||
{ href: "map", text: "Map" }
|
||||
]
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
page () {
|
||||
return this.$route.path.split(/\/+/)[3];
|
||||
},
|
||||
|
||||
tab () {
|
||||
return this.tabs.findIndex(t => t.href == this.page);
|
||||
},
|
||||
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
tabLink (href) {
|
||||
return `/projects/${this.$route.params.project}/${href}`;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
</script>
|
||||
@@ -0,0 +1,82 @@
|
||||
<template>
|
||||
<v-row
|
||||
dense
|
||||
no-gutters
|
||||
align="center"
|
||||
>
|
||||
|
||||
<v-col cols="1">
|
||||
<slot name="prepend"></slot>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
<v-chip outlined label small :color="colour || getHSLColourFor(key)">{{name}}</v-chip>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="4">
|
||||
<v-text-field
|
||||
dense
|
||||
label="Column"
|
||||
type="number"
|
||||
min="0"
|
||||
clearable
|
||||
:value="value.column"
|
||||
@input="$emit('input', {...value, column: Number($event)})"
|
||||
>
|
||||
<template v-slot:append-outer>
|
||||
<dougal-field-content-dialog
|
||||
:readonly="readonly"
|
||||
:value="value"
|
||||
@input="$emit('input', $event)"
|
||||
></dougal-field-content-dialog>
|
||||
</template>
|
||||
</v-text-field>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="1">
|
||||
<slot name="append"></slot>
|
||||
</v-col>
|
||||
|
||||
</v-row>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { parse } from 'csv-parse/sync'
|
||||
import { getHSLColourFor } from '@/lib/hsl'
|
||||
import DougalFieldContentDialog from '../fields/field-content-dialog'
|
||||
|
||||
export default {
|
||||
name: "DougalDelimitedStringDecoderField",
|
||||
|
||||
components: {
|
||||
//DougalFixedStringDecoderField,
|
||||
DougalFieldContentDialog
|
||||
},
|
||||
|
||||
props: {
|
||||
value: Object,
|
||||
name: String,
|
||||
colour: String,
|
||||
readonly: Boolean,
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
},
|
||||
|
||||
watch: {
|
||||
},
|
||||
|
||||
methods: {
|
||||
getHSLColourFor: getHSLColourFor.bind(this),
|
||||
},
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -0,0 +1,366 @@
|
||||
<template>
|
||||
<v-card flat elevation="0">
|
||||
<v-card-title v-if="title">{{ title }}</v-card-title>
|
||||
<v-card-subtitle v-if="subtitle">{{ subtitle }}</v-card-subtitle>
|
||||
<v-card-text>
|
||||
<v-form>
|
||||
|
||||
<v-container>
|
||||
|
||||
<dougal-delimited-string-decoder-field v-for="(field, key) in fields" :key="key"
|
||||
:colour="getHSLColourFor(key)"
|
||||
:readonly="readonly"
|
||||
:name="key"
|
||||
:value="fields[key]"
|
||||
@input="$emit('update:fields', {...fields, [key]: $event})"
|
||||
>
|
||||
<template v-slot:append v-if="editableFieldList && !readonly">
|
||||
<v-btn
|
||||
class="ml-3"
|
||||
fab
|
||||
text
|
||||
small
|
||||
title="Remove this property"
|
||||
>
|
||||
<v-icon
|
||||
color="error"
|
||||
@click="removeField(key)"
|
||||
>mdi-minus</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
</dougal-delimited-string-decoder-field>
|
||||
<v-row dense no-gutters v-if="editableFieldList && !readonly">
|
||||
<v-col cols=6 offset=1>
|
||||
<v-text-field
|
||||
label="Add new field"
|
||||
hint="Enter the name of a new field"
|
||||
:error-messages="fieldNameErrors"
|
||||
v-model="fieldName"
|
||||
append-outer-icon="mdi-plus-circle"
|
||||
@keydown.enter.prevent="addField"
|
||||
>
|
||||
<template v-slot:append-outer>
|
||||
<v-icon
|
||||
color="primary"
|
||||
:disabled="fieldName && !!fieldNameErrors"
|
||||
@click="addField"
|
||||
>mdi-plus</v-icon>
|
||||
</template>
|
||||
</v-text-field>
|
||||
</v-col>
|
||||
</v-row>
|
||||
|
||||
<v-row>
|
||||
<v-col cols="6">
|
||||
<v-combobox
|
||||
label="Field delimiter"
|
||||
hint="How are the fields separated from each other?"
|
||||
:items="delimiters"
|
||||
v-model="delimiter_"
|
||||
></v-combobox>
|
||||
</v-col>
|
||||
</v-row>
|
||||
|
||||
<v-row>
|
||||
<v-col cols="6">
|
||||
<v-text-field
|
||||
class="ml-3"
|
||||
label="Skip lines"
|
||||
hint="This lets you to skip file headers if present"
|
||||
type="number"
|
||||
min="0"
|
||||
:value.number="numberedLines"
|
||||
@input="$emit('update:numbered-lines', Number($event))"
|
||||
></v-text-field>
|
||||
</v-col>
|
||||
<v-col cols="6">
|
||||
<v-checkbox
|
||||
v-ripple
|
||||
label="First non-skipped line are field names"
|
||||
:value="headerRow"
|
||||
@change="$emit('update:header-row', $event)"
|
||||
></v-checkbox>
|
||||
</v-col>
|
||||
</v-row>
|
||||
|
||||
<v-row>
|
||||
<v-col>
|
||||
<v-simple-table dense>
|
||||
<template v-slot:default>
|
||||
<colgroup v-if="showLineNumbers">
|
||||
<col class="line_no"/>
|
||||
</colgroup>
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="line_no">
|
||||
<v-simple-checkbox
|
||||
off-icon="mdi-format-list-numbered"
|
||||
title="Show line numbers"
|
||||
v-model="showLineNumbers"
|
||||
>
|
||||
</v-simple-checkbox>
|
||||
</th>
|
||||
<th v-for="(header, idx) in headers" :key="idx"
|
||||
:style="`color:${header.colour};`"
|
||||
>
|
||||
<v-select
|
||||
dense
|
||||
clearable
|
||||
:items="fieldsAvailableFor(idx)"
|
||||
:value="header.fieldName"
|
||||
@input="fieldSelected(idx, $event)"
|
||||
>
|
||||
</v-select>
|
||||
</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th class="line_no">
|
||||
<small v-if="showLineNumbers && headers.length">Line no.</small>
|
||||
</th>
|
||||
<th v-for="(header, idx) in headers" :key="idx"
|
||||
:style="`color:${header.colour};`"
|
||||
>
|
||||
{{ header.text }}
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr v-for="(row, ridx) in rows" :key="ridx">
|
||||
<td class="line_no"">
|
||||
<small v-if="showLineNumbers">
|
||||
{{ ridx + (typeof numberedLines == "number" ? numberedLines : 0)+1 }}
|
||||
</small>
|
||||
</td>
|
||||
<td v-for="(cell, cidx) in row" :key="cidx"
|
||||
:style="`background-color:${cell.colour};`"
|
||||
>
|
||||
{{ cell.text }}
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</template>
|
||||
</v-simple-table>
|
||||
</v-col>
|
||||
</v-row>
|
||||
|
||||
</v-container>
|
||||
|
||||
|
||||
|
||||
</v-form>
|
||||
</v-card-text>
|
||||
<v-card-actions>
|
||||
</v-card-actions>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
/*.v-data-table table tbody tr td*/
|
||||
th {
|
||||
border: 1px solid hsl(0, 0%, 33.3%);
|
||||
}
|
||||
|
||||
td {
|
||||
border-inline: 1px solid hsl(0, 0%, 33.3%);
|
||||
}
|
||||
|
||||
.line_no {
|
||||
text-align: right;
|
||||
width: 4ex;
|
||||
border: none !important;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { parse } from 'csv-parse/sync'
|
||||
import { getHSLColourFor } from '@/lib/hsl'
|
||||
import truncateText from '@/lib/truncate-text'
|
||||
import DougalDelimitedStringDecoderField from './delimited-string-decoder-field'
|
||||
|
||||
export default {
|
||||
name: "DougalDelimitedStringDecoder",
|
||||
|
||||
components: {
|
||||
DougalDelimitedStringDecoderField
|
||||
},
|
||||
|
||||
props: {
|
||||
text: String,
|
||||
fields: Object,
|
||||
delimiter: String,
|
||||
headerRow: { type: [ Boolean, Number ], default: false},
|
||||
numberedLines: [ Boolean, Number ],
|
||||
maxHeight: String,
|
||||
editableFieldList: { type: Boolean, default: true },
|
||||
readonly: Boolean,
|
||||
title: String,
|
||||
subtitle: String
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
//< The reason for not using this.text directly is that at some point
|
||||
//< we might extend this component to allow editing the sample text.
|
||||
text_: "",
|
||||
//< The name of a new field to add.
|
||||
fieldName: "",
|
||||
showLineNumbers: null,
|
||||
delimiters: [
|
||||
{ text: "Comma (,)", value: "," },
|
||||
{ text: "Tabulator (⇥)", value: "\x09" },
|
||||
{ text: "Semicolon (;)", value: ";" }
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
/** The index of the last column.
|
||||
*
|
||||
* This will be the higher of the number of columns available
|
||||
* in the sample text or the highest column number defined in
|
||||
* this.fields.
|
||||
*
|
||||
* NOTE: May return NaN
|
||||
*/
|
||||
numberOfColumns () {
|
||||
const lastIndex = Object.values(this.fields)
|
||||
.reduce( (acc, cur) => Math.max(acc, cur.column), this.cells[0]?.length-1);
|
||||
return isNaN(lastIndex) ? 0 : (lastIndex + 1);
|
||||
},
|
||||
|
||||
cells () {
|
||||
return parse(this.text_, {delimiter: this.delimiter, trim: true});
|
||||
},
|
||||
|
||||
headers () {
|
||||
|
||||
const headerNames = typeof this.headerRow == "number"
|
||||
? this.cells[this.headerRow]
|
||||
: this.headerRow === true
|
||||
? this.cells[0]
|
||||
: Array.from(this.cells[0] ?? [], (_, ι) => `Column ${ι}`);
|
||||
|
||||
return headerNames?.map((c, ι) => {
|
||||
const fieldName = Object.keys(this.fields).find(i => this.fields[i].column == ι);
|
||||
const field = this.fields[fieldName] ?? {}
|
||||
const colour = this.headerRow === false
|
||||
? this.getHSLColourFor(ι*10)
|
||||
: this.getHSLColourFor(c);
|
||||
|
||||
return {
|
||||
text: c,
|
||||
colour: this.getHSLColourFor(c),
|
||||
fieldName,
|
||||
field
|
||||
} ?? {}
|
||||
}) ?? [];
|
||||
},
|
||||
|
||||
rows () {
|
||||
// NOTE It doesn't matter if headerRow is boolean, it works just the same.
|
||||
return [...this.cells].slice(this.headerRow).map(r =>
|
||||
r.map( (c, ι) => ({
|
||||
text: truncateText(c),
|
||||
colour: this.headers.length
|
||||
? this.getHSLColourFor(this.headers[ι]?.text, 0.2)
|
||||
: this.getHSLColourFor(ι*10, 0.2)
|
||||
})));
|
||||
},
|
||||
|
||||
fieldNameErrors () {
|
||||
return Object.keys(this.fields).includes(this.fieldName)
|
||||
? "A field with this name already exists"
|
||||
: null;
|
||||
},
|
||||
|
||||
delimiter_: {
|
||||
get () {
|
||||
return this.delimiters.find(i => i.value == this.delimiter) ?? this.delimiter;
|
||||
},
|
||||
|
||||
set (v) {
|
||||
this.$emit("update:delimiter", typeof v == "object" ? v.value : v);
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
text () {
|
||||
if (this.text != this.text_) {
|
||||
this.reset();
|
||||
}
|
||||
},
|
||||
|
||||
numberedLines (cur, prev) {
|
||||
if (cur != prev) {
|
||||
this.showLineNumbers = typeof cur == "number" || cur;
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
|
||||
|
||||
fieldsAvailableFor (idx) {
|
||||
return Object.keys(this.fields).filter( i =>
|
||||
this.fields[i].column === idx || this.fields[i].column === null) ?? [];
|
||||
},
|
||||
|
||||
fieldSelected (col, key) {
|
||||
|
||||
const fields = {};
|
||||
for (const k in this.fields) {
|
||||
const field = {...this.fields[k]};
|
||||
if (k === key) {
|
||||
field.column = col
|
||||
} else {
|
||||
if (field.column === col) {
|
||||
field.column = null;
|
||||
}
|
||||
}
|
||||
fields[k] = field;
|
||||
}
|
||||
|
||||
this.$emit("update:fields", fields);
|
||||
|
||||
},
|
||||
|
||||
addField () {
|
||||
if (!this.fieldNameErrors) {
|
||||
this.$emit("update:fields", {
|
||||
...this.fields,
|
||||
[this.fieldName]: { column: null }
|
||||
});
|
||||
this.fieldName = "";
|
||||
}
|
||||
},
|
||||
|
||||
removeField (key) {
|
||||
const fields = {...this.fields};
|
||||
delete fields[key];
|
||||
this.$emit("update:fields", fields);
|
||||
},
|
||||
|
||||
getHSLColourFor: getHSLColourFor.bind(this),
|
||||
|
||||
numberLine (number, line) {
|
||||
return `<span class="line-number">${number}</span>${line}`;
|
||||
},
|
||||
|
||||
reset () {
|
||||
this.text_ = this.text.replaceAll("\r", "");
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.reset();
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -0,0 +1,140 @@
|
||||
<template>
|
||||
<v-row dense no-gutters>
|
||||
|
||||
<v-col cols="1">
|
||||
<slot name="prepend"></slot>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
<v-chip outlined label small :color="colour">{{name}}</v-chip>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
<v-text-field
|
||||
class="ml-3"
|
||||
dense
|
||||
label="From"
|
||||
type="number"
|
||||
min="0"
|
||||
v-model.number="value.offset"
|
||||
:readonly="readonly"
|
||||
></v-text-field>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
<v-text-field
|
||||
class="ml-3"
|
||||
dense
|
||||
label="Length"
|
||||
type="number"
|
||||
min="0"
|
||||
v-model.number="value.length"
|
||||
:readonly="readonly"
|
||||
></v-text-field>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
<dougal-field-content-dialog
|
||||
:readonly="readonly"
|
||||
:value="value"
|
||||
@input="$emit('input', $event)"
|
||||
></dougal-field-content-dialog>
|
||||
|
||||
</v-col>
|
||||
|
||||
<v-col cols="1">
|
||||
<slot name="append"></slot>
|
||||
</v-col>
|
||||
|
||||
</v-row>
|
||||
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
.input {
|
||||
flex: 1 1 auto;
|
||||
line-height: 20px;
|
||||
padding: 8px 0 8px;
|
||||
min-height: 32px;
|
||||
max-height: 32px;
|
||||
max-width: 100%;
|
||||
min-width: 0px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.input >>> .chunk {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid;
|
||||
}
|
||||
|
||||
.input >>> .chunk-empty {
|
||||
padding-inline: 1px;
|
||||
}
|
||||
|
||||
.input >>> .chunk-overlap {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid grey;
|
||||
color: grey;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import DougalFieldContentDialog from '../fields/field-content-dialog'
|
||||
|
||||
export default {
|
||||
name: "DougalFixedStringDecoderField",
|
||||
|
||||
components: {
|
||||
DougalFieldContentDialog
|
||||
},
|
||||
|
||||
props: {
|
||||
value: Object,
|
||||
name: String,
|
||||
colour: String,
|
||||
readonly: Boolean,
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
name_: "",
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
name () {
|
||||
if (this.name != this.name_) {
|
||||
this.name_ = this.name;
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
addField () {
|
||||
if (!this.fieldNameErrors) {
|
||||
this.$emit("update:fields", {
|
||||
...this.fields,
|
||||
[this.fieldName]: { offset: 0, length: 0 }
|
||||
});
|
||||
this.fieldName = "";
|
||||
}
|
||||
},
|
||||
|
||||
reset () {
|
||||
this.text_ = this.text;
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.reset();
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -0,0 +1,486 @@
|
||||
<template>
|
||||
<v-card flat elevation="0">
|
||||
<v-card-title v-if="title">{{ title }}</v-card-title>
|
||||
<v-card-subtitle v-if="subtitle">{{ subtitle }}</v-card-subtitle>
|
||||
<v-card-text>
|
||||
<v-form>
|
||||
<div v-if="isMultiline"
|
||||
class="multiline mb-5"
|
||||
:style="multilineElementStyle"
|
||||
v-html="html"
|
||||
>
|
||||
</div>
|
||||
<v-input v-else
|
||||
class="v-text-field"
|
||||
:hint="hint"
|
||||
persistent-hint
|
||||
v-model="text_"
|
||||
>
|
||||
<label
|
||||
class="v-label"
|
||||
:class="[ $vuetify.theme.isDark && 'theme--dark', text_ && text_.length && 'v-label--active' ]"
|
||||
style="left: 0px; right: auto; position: absolute;"
|
||||
>{{ label }}</label>
|
||||
<div class="input"
|
||||
:class="isMultiline ? 'multiline' : ''"
|
||||
v-html="html"
|
||||
>
|
||||
</div>
|
||||
</v-input>
|
||||
|
||||
<v-container>
|
||||
|
||||
<!-- Variable fields -->
|
||||
|
||||
<v-row no-gutters class="mb-2">
|
||||
<h4>Variable fields</h4>
|
||||
</v-row>
|
||||
|
||||
<dougal-fixed-string-decoder-field v-for="(field, key) in fields" :key="key"
|
||||
v-model="fields[key]"
|
||||
:name="key"
|
||||
:colour="getHSLColourFor(key)"
|
||||
:readonly="readonly"
|
||||
>
|
||||
<template v-slot:append v-if="editableFieldList && !readonly">
|
||||
<v-btn
|
||||
class="ml-3"
|
||||
fab
|
||||
text
|
||||
small
|
||||
title="Remove this property"
|
||||
>
|
||||
<v-icon
|
||||
color="error"
|
||||
@click="removeField(key)"
|
||||
>mdi-minus</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
</dougal-fixed-string-decoder-field>
|
||||
|
||||
<v-row dense no-gutters v-if="editableFieldList && !readonly">
|
||||
<v-col cols="3">
|
||||
<v-text-field
|
||||
label="Add new field"
|
||||
hint="Enter the name of a new field"
|
||||
:error-messages="fieldNameErrors"
|
||||
v-model="fieldName"
|
||||
append-outer-icon="mdi-plus-circle"
|
||||
@keydown.enter.prevent="addField"
|
||||
>
|
||||
<template v-slot:append-outer>
|
||||
<v-icon
|
||||
color="primary"
|
||||
:disabled="fieldName && !!fieldNameErrors"
|
||||
@click="addField"
|
||||
>mdi-plus</v-icon>
|
||||
</template>
|
||||
</v-text-field>
|
||||
</v-col>
|
||||
</v-row>
|
||||
|
||||
<!-- Fixed text strings -->
|
||||
|
||||
<v-row no-gutters class="mt-2 mb-2">
|
||||
<h4>Fixed strings</h4>
|
||||
</v-row>
|
||||
|
||||
<dougal-fixed-string-text v-for="(item, idx) in fixed" :key="idx"
|
||||
v-model="fixed[idx]"
|
||||
:colour="getHSLColourFor(item.text+item.offset)"
|
||||
:readonly="readonly"
|
||||
>
|
||||
<template v-slot:append v-if="editableFieldList && !readonly">
|
||||
<v-btn
|
||||
class="ml-3"
|
||||
fab
|
||||
text
|
||||
small
|
||||
title="Remove this property"
|
||||
>
|
||||
<v-icon
|
||||
color="error"
|
||||
@click="removeFixed(idx)"
|
||||
>mdi-minus</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
</dougal-fixed-string-text>
|
||||
|
||||
<v-row dense no-gutters v-if="editableFieldList && !readonly">
|
||||
<v-col cols="3">
|
||||
<v-text-field
|
||||
label="Add fixed text"
|
||||
hint="Enter text"
|
||||
:error-messages="fieldNameErrors"
|
||||
v-model="fixedName"
|
||||
@keydown.enter.prevent="addFixed"
|
||||
>
|
||||
</v-text-field>
|
||||
</v-col>
|
||||
<v-col cols="3">
|
||||
<v-text-field
|
||||
class="ml-3"
|
||||
label="From position"
|
||||
hint="Enter offset"
|
||||
type="number"
|
||||
min="0"
|
||||
v-model.number="fixedOffset"
|
||||
:readonly="readonly"
|
||||
append-outer-icon="mdi-plus-circle"
|
||||
>
|
||||
<template v-slot:append-outer>
|
||||
<v-icon
|
||||
color="primary"
|
||||
:disabled="!fixedName"
|
||||
@click="addFixed"
|
||||
>mdi-plus</v-icon>
|
||||
</template>
|
||||
</v-text-field>
|
||||
</v-col>
|
||||
</v-row>
|
||||
|
||||
</v-container>
|
||||
|
||||
|
||||
|
||||
</v-form>
|
||||
</v-card-text>
|
||||
<v-card-actions>
|
||||
</v-card-actions>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
.input {
|
||||
flex: 1 1 auto;
|
||||
line-height: 20px;
|
||||
padding: 8px 0 8px;
|
||||
min-height: 32px;
|
||||
max-height: 32px;
|
||||
max-width: 100%;
|
||||
min-width: 0px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.multiline {
|
||||
font-family: mono;
|
||||
white-space: pre;
|
||||
overflow-x: auto;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.multiline >>> .line-number {
|
||||
display: inline-block;
|
||||
font-size: 75%;
|
||||
width: 5ex;
|
||||
margin-inline-end: 1ex;
|
||||
text-align: right;
|
||||
border: none;
|
||||
position: relative;
|
||||
top: -1px;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-field {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-fixed {
|
||||
padding-inline: 1px;
|
||||
border: 1px dashed;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-empty {
|
||||
padding-inline: 1px;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-overlap {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid grey;
|
||||
color: grey;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-mismatch {
|
||||
padding-inline: 1px;
|
||||
border: 2px solid red !important;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { getHSLColourFor } from '@/lib/hsl'
|
||||
import DougalFixedStringDecoderField from './fixed-string-decoder-field'
|
||||
import DougalFixedStringText from './fixed-string-text'
|
||||
|
||||
export default {
|
||||
name: "DougalFixedStringDecoder",
|
||||
|
||||
components: {
|
||||
DougalFixedStringDecoderField,
|
||||
DougalFixedStringText
|
||||
},
|
||||
|
||||
mixins: [
|
||||
{
|
||||
methods: {
|
||||
getHSLColourFor
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
props: {
|
||||
text: { type: String, default: "" },
|
||||
fixed: { type: Array, default: () => [] },
|
||||
fields: { type: Object, default: () => ({}) },
|
||||
multiline: Boolean,
|
||||
numberedLines: [ Boolean, Number ],
|
||||
maxHeight: String,
|
||||
editableFieldList: { type: Boolean, default: true },
|
||||
readonly: Boolean,
|
||||
title: String,
|
||||
subtitle: String,
|
||||
label: String,
|
||||
hint: String,
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
//< The reason for not using this.text directly is that at some point
|
||||
//< we might extend this component to allow editing the sample text.
|
||||
text_: "",
|
||||
//< The value of a fixed string that should be always present at a specific position
|
||||
fixedName: "",
|
||||
fixedOffset: 0,
|
||||
//< The name of a new field to add.
|
||||
fieldName: ""
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
/** Whether to treat the sample text as multiline.
|
||||
*/
|
||||
isMultiline () {
|
||||
return this.multiline === true || this.text.includes("\n");
|
||||
},
|
||||
|
||||
/* Return the fields as an array sorted by offset
|
||||
*/
|
||||
parts () {
|
||||
// return Object.entries(this.fields).sort( (a, b) => a[1].offset - b[1].offset );
|
||||
return [
|
||||
...Object.entries(this.fields),
|
||||
...this.fixed.map(i => [ i.text + i.offset, {...i, length: i.text?.length} ])
|
||||
].sort( (a, b) => {
|
||||
const offset_a = a.offset ?? a[1].offset;
|
||||
const offset_b = b.offset ?? b[1].offset;
|
||||
return a - b;
|
||||
})
|
||||
},
|
||||
|
||||
/* Transform this.parts into {start, end} intervals.
|
||||
*/
|
||||
chunks () {
|
||||
const chunks = [];
|
||||
const chunk_num = 0;
|
||||
for (const [name, part] of this.parts) {
|
||||
const chunk = {};
|
||||
chunk.start = part.offset;
|
||||
chunk.end = part.offset + part.length - 1;
|
||||
//chunk.text = this.text_.slice(chunk.start, chunk.end);
|
||||
chunk.colour = this.getHSLColourFor(name)
|
||||
chunk.class = part.text ? "fixed" : "field";
|
||||
chunk.text = part.text;
|
||||
|
||||
chunks.push(chunk);
|
||||
}
|
||||
|
||||
return chunks;
|
||||
},
|
||||
|
||||
multilineElementStyle () {
|
||||
if (this.maxHeight) {
|
||||
return `max-height: ${this.maxHeight};`;
|
||||
}
|
||||
return "";
|
||||
},
|
||||
|
||||
/** Return a colourised HTML version of this.text.
|
||||
*/
|
||||
html () {
|
||||
if (!this.text_) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.isMultiline) {
|
||||
if (typeof this.numberedLines == "number" || this.numberedLines) {
|
||||
const offset = typeof this.numberedLines == "number" ? Math.abs(this.numberedLines) : 0;
|
||||
return this.text_.split("\n").map( (line, idx) =>
|
||||
this.numberLine(offset+idx, this.renderTextLine(line))).join("<br/>");
|
||||
} else {
|
||||
return this.text_.split("\n").map(this.renderTextLine).join("<br/>");
|
||||
}
|
||||
} else {
|
||||
return this.renderTextLine(this.text_);
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
fieldNameErrors () {
|
||||
return this.parts.find( i => i[0] == this.fieldName )
|
||||
? "A field with this name already exists"
|
||||
: null;
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
text () {
|
||||
if (this.text != this.text_) {
|
||||
this.reset();
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
addFixed () {
|
||||
if (this.fixedName) {
|
||||
const fixed = [
|
||||
...this.fixed,
|
||||
{ text: this.fixedName, offset: this.fixedOffset }
|
||||
];
|
||||
fixed.sort( (a, b) => a.offset - b.offset );
|
||||
this.fixedName = null;
|
||||
this.fixedOffset = 0;
|
||||
this.$emit("update:fixed", fixed);
|
||||
}
|
||||
},
|
||||
|
||||
addField () {
|
||||
if (!this.fieldNameErrors) {
|
||||
this.$emit("update:fields", {
|
||||
...this.fields,
|
||||
[this.fieldName]: { offset: 0, length: 0 }
|
||||
});
|
||||
this.fieldName = "";
|
||||
}
|
||||
},
|
||||
|
||||
// NOTE Not used
|
||||
updateField (field, key, value) {
|
||||
const fields = {
|
||||
...this.fields,
|
||||
[field]: {
|
||||
...this.fields[field],
|
||||
[key]: value
|
||||
}
|
||||
};
|
||||
this.$emit("update:fields", fields);
|
||||
},
|
||||
|
||||
removeField (key) {
|
||||
const fields = {...this.fields};
|
||||
delete fields[key];
|
||||
this.$emit("update:fields", fields);
|
||||
},
|
||||
|
||||
removeFixed (idx) {
|
||||
const fixed = [...this.fixed];
|
||||
fixed.splice(idx, 1);
|
||||
//fixed.sort( (a, b) => a.offset - b.offset );
|
||||
this.$emit("update:fixed", fixed);
|
||||
},
|
||||
|
||||
/** Return an HSL colour as a function of an input value
|
||||
* `str`.
|
||||
*/
|
||||
xgetHSLColourFor () {
|
||||
console.log("WILL BE DEFINED ON MOUNT");
|
||||
},
|
||||
|
||||
/** Return a `<span>` opening tag.
|
||||
*/
|
||||
style (name, colour) {
|
||||
return colour
|
||||
? `<span class="${name}" style="color:${colour};border-color:${colour}">`
|
||||
: `<span class="${name}">`;
|
||||
},
|
||||
|
||||
/** Return an array of the intervals that intersect `pos`.
|
||||
* May be empty.
|
||||
*/
|
||||
chunksFor (pos) {
|
||||
return this.chunks.filter( chunk =>
|
||||
pos >= chunk.start &&
|
||||
pos <= chunk.end
|
||||
)
|
||||
},
|
||||
|
||||
/*
|
||||
* Algorithm:
|
||||
*
|
||||
* Go through every character of one line of text and determine in which
|
||||
* part(s) it falls in, if any. Collect adjacent same parts into <span/>
|
||||
* elements.
|
||||
*/
|
||||
renderTextLine (text) {
|
||||
const parts = [];
|
||||
|
||||
let prevStyle;
|
||||
|
||||
for (const pos in text) {
|
||||
const chunks = this.chunksFor(pos);
|
||||
const isEmpty = chunks.length == 0;
|
||||
const isOverlap = chunks.length > 1;
|
||||
const isMismatch = chunks[0]?.text &&
|
||||
(text.substring(chunks[0].start, chunks[0].end+1) != chunks[0].text)
|
||||
|
||||
const style = isEmpty
|
||||
? this.style("chunk-empty")
|
||||
: isMismatch
|
||||
? this.style("chunk-mismatch", chunks[0].colour)
|
||||
: isOverlap
|
||||
? this.style("chunk-overlap")
|
||||
: this.style("chunk-"+chunks[0].class, chunks[0].colour);
|
||||
|
||||
if (style != prevStyle) {
|
||||
if (prevStyle) {
|
||||
parts.push("</span>");
|
||||
}
|
||||
parts.push(style);
|
||||
}
|
||||
parts.push(text[pos]);
|
||||
prevStyle = style;
|
||||
}
|
||||
|
||||
if (parts.length) {
|
||||
parts.push("</span>");
|
||||
}
|
||||
|
||||
return parts.join("");
|
||||
},
|
||||
|
||||
numberLine (number, line) {
|
||||
return `<span class="line-number">${number}</span>${line}`;
|
||||
},
|
||||
|
||||
setText (v) {
|
||||
//console.log(v);
|
||||
this.text_ = v;
|
||||
},
|
||||
|
||||
reset () {
|
||||
this.text_ = this?.text.replaceAll("\r", "");
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.reset();
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -0,0 +1,122 @@
|
||||
<template>
|
||||
<v-row dense no-gutters>
|
||||
|
||||
<v-col cols="1">
|
||||
<slot name="prepend"></slot>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
<v-chip outlined label small :color="colour" style="border: 1px dashed">{{value.text}}</v-chip>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
<v-text-field
|
||||
class="ml-3"
|
||||
dense
|
||||
label="From"
|
||||
type="number"
|
||||
min="0"
|
||||
v-model.number="value.offset"
|
||||
:readonly="readonly"
|
||||
></v-text-field>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
</v-col>
|
||||
|
||||
<v-col cols="1">
|
||||
<slot name="append"></slot>
|
||||
</v-col>
|
||||
|
||||
</v-row>
|
||||
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
.input {
|
||||
flex: 1 1 auto;
|
||||
line-height: 20px;
|
||||
padding: 8px 0 8px;
|
||||
min-height: 32px;
|
||||
max-height: 32px;
|
||||
max-width: 100%;
|
||||
min-width: 0px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.input >>> .chunk {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid;
|
||||
}
|
||||
|
||||
.input >>> .chunk-empty {
|
||||
padding-inline: 1px;
|
||||
}
|
||||
|
||||
.input >>> .chunk-overlap {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid grey;
|
||||
color: grey;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
|
||||
export default {
|
||||
name: "DougalFixedStringText",
|
||||
|
||||
components: {
|
||||
},
|
||||
|
||||
props: {
|
||||
value: Object,
|
||||
colour: String,
|
||||
readonly: Boolean,
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
name_: "",
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
name () {
|
||||
if (this.name != this.name_) {
|
||||
this.name_ = this.name;
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
addField () {
|
||||
if (!this.fieldNameErrors) {
|
||||
this.$emit("update:fields", {
|
||||
...this.fields,
|
||||
[this.fieldName]: { offset: 0, length: 0 }
|
||||
});
|
||||
this.fieldName = "";
|
||||
}
|
||||
},
|
||||
|
||||
reset () {
|
||||
this.text_ = this.text;
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.reset();
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -0,0 +1,301 @@
|
||||
<template>
|
||||
<v-card flat elevation="0">
|
||||
<v-card-title v-if="title">{{ title }}</v-card-title>
|
||||
<v-card-subtitle v-if="subtitle">{{ subtitle }}</v-card-subtitle>
|
||||
<v-card-text>
|
||||
|
||||
<v-tabs v-model="viewTab">
|
||||
<v-tab>Text</v-tab>
|
||||
<v-tab>Parsed</v-tab>
|
||||
</v-tabs>
|
||||
|
||||
<v-tabs-items v-model="viewTab">
|
||||
<v-tab-item>
|
||||
<v-simple-table dense class="text">
|
||||
<template v-slot:default>
|
||||
<colgroup v-if="showLineNumbers">
|
||||
<col class="line_no"/>
|
||||
</colgroup>
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="line_no">
|
||||
<v-simple-checkbox
|
||||
v-ripple
|
||||
off-icon="mdi-format-list-numbered"
|
||||
title="Show line numbers"
|
||||
v-model="showLineNumbers"
|
||||
>
|
||||
</v-simple-checkbox>
|
||||
</th>
|
||||
<th v-for="(header, idx) in headers" :key="idx"
|
||||
:style="`color:${header.colour};`"
|
||||
>
|
||||
{{ header.text }}
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr v-for="(row, ridx) in rows" :key="ridx">
|
||||
<td class="line_no"">
|
||||
<small v-if="showLineNumbers">
|
||||
{{ ridx + (typeof numberedLines == "number" ? numberedLines : 0)+1 }}
|
||||
</small>
|
||||
</td>
|
||||
<td v-for="(cell, cidx) in row" :key="cidx"
|
||||
:style="`background-color:${cell.colour};`"
|
||||
>
|
||||
{{ cell.text }}
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</template>
|
||||
</v-simple-table>
|
||||
</v-tab-item>
|
||||
|
||||
<v-tab-item>
|
||||
<!-- Parsed view -->
|
||||
<v-simple-table dense class="parsed">
|
||||
<template v-slot:default>
|
||||
<thead>
|
||||
<tr>
|
||||
<th
|
||||
title="The line along which the vessel will nominally sail"
|
||||
>Sail line</th>
|
||||
<th
|
||||
title="Whether the line will be acquired in the incrementing or decrementing shot points direction"
|
||||
>Direction</th>
|
||||
<th
|
||||
title="Whether the line is planned to be acquired. Some lines may be in the preplot but not intended to be shot in a particular campaign"
|
||||
>Acquire?</th>
|
||||
<th
|
||||
title="The source lines that will be shot from this vessel line. Typically there is one source line per source array."
|
||||
>Source lines</th>
|
||||
<th
|
||||
title="Any general remarks concerning this sail line (supports Markdown)"
|
||||
>Remarks</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr v-for="(line, line_no) in saillines" :key="line_no">
|
||||
<td>{{ line_no }}</td>
|
||||
<td v-if="line.incr" title="Incrementing">▲</td>
|
||||
<td v-else title="Decrementing">▼</td>
|
||||
<td v-if="line.ntba" title="Not to be acquired" class="ko">✘</td>
|
||||
<td v-else title="Line acquisition planned" class="ok">✔</td>
|
||||
<td v-html="line.source_line.join('<br/>')"></td>
|
||||
<td v-if="line['meta.colour']"
|
||||
:style="`background-color:${line['meta.colour']};`"
|
||||
v-html="$options.filters.markdown(line.remarks)"></td>
|
||||
<td v-else
|
||||
v-html="$options.filters.markdown(line.remarks)"></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</template>
|
||||
</v-simple-table>
|
||||
</v-tab-item>
|
||||
</v-tabs-items>
|
||||
|
||||
</v-card-text>
|
||||
<v-card-actions>
|
||||
</v-card-actions>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
/*.v-data-table table tbody tr td*/
|
||||
.text th {
|
||||
border: 1px solid hsl(0, 0%, 33.3%);
|
||||
}
|
||||
|
||||
.text td {
|
||||
border-inline: 1px solid hsl(0, 0%, 33.3%);
|
||||
}
|
||||
|
||||
.parsed td {
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
.line_no {
|
||||
text-align: right;
|
||||
width: 4ex;
|
||||
border: none !important;
|
||||
}
|
||||
|
||||
.ok {
|
||||
color: green;
|
||||
}
|
||||
|
||||
.ko {
|
||||
color: red;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { parse } from 'csv-parse/sync'
|
||||
import { getHSLColourFor } from '@/lib/hsl'
|
||||
import truncateText from '@/lib/truncate-text'
|
||||
|
||||
export default {
|
||||
name: "DougalSaillinesStringDecoder",
|
||||
|
||||
components: {
|
||||
},
|
||||
|
||||
props: {
|
||||
text: String,
|
||||
//fields: Object,
|
||||
//delimiter: String,
|
||||
headerRow: { type: [ Boolean, Number ], default: false},
|
||||
numberedLines: [ Boolean, Number ],
|
||||
maxHeight: String,
|
||||
editableFieldList: { type: Boolean, default: true },
|
||||
readonly: Boolean,
|
||||
title: String,
|
||||
subtitle: String
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
delimiter: ",",
|
||||
showLineNumbers: null,
|
||||
text_: "",
|
||||
viewTab: null
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
cells () {
|
||||
return parse(this.text_, {delimiter: this.delimiter, trim: true});
|
||||
},
|
||||
|
||||
headers () {
|
||||
return this.cells[0]?.map(cell => ({
|
||||
text: cell,
|
||||
colour: this.getHSLColourFor(cell),
|
||||
backgroundColour: this.getHSLColourFor(cell, 0.2),
|
||||
})) ?? [];
|
||||
},
|
||||
|
||||
rows () {
|
||||
return [...this.cells].slice(1).map(r =>
|
||||
r.map( (c, ι) => ({
|
||||
text: truncateText(c),
|
||||
colour: this.headers[ι]?.backgroundColour
|
||||
})));
|
||||
},
|
||||
|
||||
/*
|
||||
* A saillines object looks like:
|
||||
*
|
||||
* {
|
||||
* [sail_line]: {
|
||||
* incr: true, // or false
|
||||
* ntba: true, // or false
|
||||
* remarks: "",
|
||||
* source_line: [ 1000, 1001, …],
|
||||
* "meta.colour": ""
|
||||
* },
|
||||
* …
|
||||
* }
|
||||
*/
|
||||
saillines () {
|
||||
// Return an array of the column numbers
|
||||
// corresponding to `key`.
|
||||
// This file accepts duplicate column numbers,
|
||||
// notably for `source_line`.
|
||||
const key_indices = (key) =>
|
||||
this.headers.reduce( (acc, cur, ι) => {
|
||||
if (cur.text == key) {
|
||||
acc.push(ι)
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
// Properties of the sailline object
|
||||
const keys = [ "incr", "ntba", "remarks", "source_line", "meta.colour" ];
|
||||
|
||||
function to_bool (v, missing=false) {
|
||||
return (v === undefined || v === null)
|
||||
? missing // Missing value meaning
|
||||
: /^t(rue)|^[1-9-]+$/i.test(String(v).trim())
|
||||
}
|
||||
|
||||
// To transform the input text into the required format for each field
|
||||
const transformer = (key) => {
|
||||
const transformers = {
|
||||
incr: (v) => to_bool(v, true),
|
||||
ntba: (v) => to_bool(v, false),
|
||||
remarks: (v) => (v === undefined || v === null) ? "" : String,
|
||||
source_line: Number,
|
||||
};
|
||||
return transformers[key] ?? String;
|
||||
};
|
||||
|
||||
// This is the saillines object
|
||||
const lines = {};
|
||||
|
||||
// The column numbers for each property
|
||||
const columns = keys.map( k => [ k, key_indices(k) ] );
|
||||
|
||||
// The column number for the sail_line property, which
|
||||
// we use as a key.
|
||||
const sail_line_idx = key_indices("sail_line")[0];
|
||||
|
||||
// Transform each line in the input file into a
|
||||
// sailline object (just for display purposes,
|
||||
// this is not exactly how the server will do it).
|
||||
for (const row of this.rows) {
|
||||
const sail_line = row[sail_line_idx]?.text;
|
||||
const values = columns.map(i => [
|
||||
i[0],
|
||||
i[0] == "source_line"
|
||||
? i[1].map(idx => transformer(i[0])(row[idx]?.text))
|
||||
: transformer(i[0])(row[i[1][0]]?.text)
|
||||
]);
|
||||
|
||||
lines[sail_line] = Object.fromEntries(values);
|
||||
}
|
||||
|
||||
return lines;
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
text () {
|
||||
if (this.text != this.text_) {
|
||||
this.reset();
|
||||
}
|
||||
},
|
||||
|
||||
numberedLines (cur, prev) {
|
||||
if (cur != prev) {
|
||||
this.showLineNumbers = typeof cur == "number" || cur;
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
|
||||
getHSLColourFor: getHSLColourFor.bind(this),
|
||||
|
||||
numberLine (number, line) {
|
||||
return `<span class="line-number">${number}</span>${line}`;
|
||||
},
|
||||
|
||||
reset () {
|
||||
this.text_ = this.text.replaceAll("\r", "");
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.reset();
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -0,0 +1,269 @@
|
||||
<template>
|
||||
<v-row dense no-gutters>
|
||||
|
||||
<v-col>
|
||||
<slot name="prepend"></slot>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="2">
|
||||
<v-chip v-if="value.item && !readonly"
|
||||
outlined
|
||||
label
|
||||
small
|
||||
:color="colour"
|
||||
:title="description"
|
||||
>{{name}}</v-chip>
|
||||
<v-select v-else-if="items.length && !readonly"
|
||||
label="Item"
|
||||
:items=items
|
||||
v-model="value.item"
|
||||
dense
|
||||
title="Select an item to use as a field"
|
||||
></v-select>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<v-select v-if="type == 'boolean'"
|
||||
label="Condition"
|
||||
:items="[true, false]"
|
||||
v-model="value.when"
|
||||
dense
|
||||
title="Use this configuration only when the value of this item matches the selected state. This allows the user to configure different values for true and false conditions."
|
||||
></v-select>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<v-text-field v-if="type == 'boolean' || type == 'text'"
|
||||
class="ml-3"
|
||||
dense
|
||||
label="Value"
|
||||
v-model="value.value"
|
||||
title="This literal text will be inserted at the designated position"
|
||||
></v-text-field>
|
||||
<v-menu v-else-if="type == 'number'"
|
||||
max-width="600"
|
||||
:close-on-content-click="false"
|
||||
offset-y
|
||||
>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-chip
|
||||
class="ml-3"
|
||||
small
|
||||
:light="$vuetify.theme.isDark"
|
||||
:dark="!$vuetify.theme.isDark"
|
||||
:color="value.scale_offset != null || value.scale_multiplier != null ? 'primary' : ''"
|
||||
:title="`Number scaling${ value.scale_offset != null ? ('\nOffset: ' + value.scale_offset) : '' }${ value.scale_multiplier != null ? ('\nMultiplier: ' + value.scale_multiplier) : ''}`"
|
||||
v-bind="attrs"
|
||||
v-on="on"
|
||||
>
|
||||
<v-icon small>mdi-ruler</v-icon>
|
||||
</v-chip>
|
||||
</template>
|
||||
|
||||
<v-card rounded outlined>
|
||||
<v-card-text>
|
||||
<v-row dense no-gutters>
|
||||
<v-text-field
|
||||
type="number"
|
||||
dense
|
||||
clearable
|
||||
label="Offset"
|
||||
title="Offset the value by this amount (after scaling)"
|
||||
v-model.number="value.scale_offset"
|
||||
></v-text-field>
|
||||
</v-row>
|
||||
<v-row dense no-gutters>
|
||||
<v-text-field
|
||||
type="number"
|
||||
dense
|
||||
clearable
|
||||
label="Scale"
|
||||
title="Mutiply the value by this amount (before scaling)"
|
||||
v-model.number="value.scale_multiplier"
|
||||
></v-text-field>
|
||||
</v-row>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</v-menu>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<v-text-field
|
||||
class="ml-3"
|
||||
dense
|
||||
label="From"
|
||||
type="number"
|
||||
min="0"
|
||||
v-model.number="value.offset"
|
||||
:readonly="readonly"
|
||||
></v-text-field>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<v-text-field
|
||||
class="ml-3"
|
||||
dense
|
||||
label="Length"
|
||||
type="number"
|
||||
min="0"
|
||||
v-model.number="value.length"
|
||||
:readonly="readonly"
|
||||
></v-text-field>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<v-menu v-if="value.length > 1"
|
||||
max-width="600"
|
||||
:close-on-content-click="false"
|
||||
offset-y
|
||||
:disabled="!(value.length>1)"
|
||||
>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-chip
|
||||
class="ml-3"
|
||||
small
|
||||
:light="$vuetify.theme.isDark"
|
||||
:dark="!$vuetify.theme.isDark"
|
||||
title="Text alignment"
|
||||
v-bind="attrs"
|
||||
v-on="on"
|
||||
:disabled="!(value.length>1)"
|
||||
>
|
||||
<v-icon small v-if="value.pad_side=='right'">mdi-format-align-left</v-icon>
|
||||
<v-icon small v-else-if="value.pad_side=='left'">mdi-format-align-right</v-icon>
|
||||
<v-icon small v-else>mdi-format-align-justify</v-icon>
|
||||
</v-chip>
|
||||
</template>
|
||||
|
||||
<v-card rounded outlined>
|
||||
<v-card-text>
|
||||
<v-row dense no-gutters>
|
||||
<v-select
|
||||
label="Alignment"
|
||||
clearable
|
||||
:items='[{text:"Left", value:"right"}, {text:"Right", value:"left"}]'
|
||||
v-model="value.pad_side"
|
||||
></v-select>
|
||||
</v-row>
|
||||
<v-row dense no-gutters v-if="value.pad_side">
|
||||
<v-text-field
|
||||
dense
|
||||
label="Pad character"
|
||||
title="Fill the width of the field on the opposite side by padding with this character"
|
||||
v-model="value.pad_string"
|
||||
></v-text-field>
|
||||
</v-row>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</v-menu>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<slot name="append"></slot>
|
||||
</v-col>
|
||||
|
||||
</v-row>
|
||||
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
.input {
|
||||
flex: 1 1 auto;
|
||||
line-height: 20px;
|
||||
padding: 8px 0 8px;
|
||||
min-height: 32px;
|
||||
max-height: 32px;
|
||||
max-width: 100%;
|
||||
min-width: 0px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.input >>> .chunk {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid;
|
||||
}
|
||||
|
||||
.input >>> .chunk-empty {
|
||||
padding-inline: 1px;
|
||||
}
|
||||
|
||||
.input >>> .chunk-overlap {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid grey;
|
||||
color: grey;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: "DougalFixedStringEncoderField",
|
||||
|
||||
components: {
|
||||
},
|
||||
|
||||
props: {
|
||||
value: Object,
|
||||
properties: Object,
|
||||
colour: String,
|
||||
readonly: Boolean,
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
}
|
||||
},
|
||||
|
||||
watch: {
|
||||
"value.value": function (value, old) {
|
||||
if (value != null && String(value).length > this.value.length) {
|
||||
this.value.length = String(value).length;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
field: {
|
||||
get () {
|
||||
return this.value;
|
||||
},
|
||||
set (v) {
|
||||
console.log("input", v);
|
||||
this.$emit("input", v);
|
||||
}
|
||||
},
|
||||
|
||||
item () {
|
||||
return this.properties?.[this.value?.item] ?? {};
|
||||
},
|
||||
|
||||
items () {
|
||||
return Object.entries(this.properties).map(i => ({text: i[1].summary ?? i[0], value: i[0]}))
|
||||
},
|
||||
|
||||
name () {
|
||||
// TODO Use properties[item].summary or similar
|
||||
return this.item?.summary ?? this.value.item ?? "???";
|
||||
},
|
||||
|
||||
type () {
|
||||
return this.item?.type ?? typeof this.value?.item ?? "undefined";
|
||||
},
|
||||
|
||||
description () {
|
||||
return this.item?.description;
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
reset () {
|
||||
}
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.reset();
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -0,0 +1,351 @@
|
||||
<template>
|
||||
<v-input
|
||||
class="v-text-field"
|
||||
:hint="hint"
|
||||
persistent-hint
|
||||
:value="text"
|
||||
>
|
||||
<label
|
||||
class="v-label"
|
||||
:class="[ $vuetify.theme.isDark && 'theme--dark', text && text.length && 'v-label--active' ]"
|
||||
style="left: 0px; right: auto; position: absolute;"
|
||||
>{{ label }}</label>
|
||||
<div class="input" slot="default"
|
||||
v-html="html"
|
||||
>
|
||||
</div>
|
||||
<template slot="append">
|
||||
<v-menu
|
||||
scrollable
|
||||
offset-y
|
||||
:close-on-content-click="false"
|
||||
>
|
||||
|
||||
<template v-slot:activator="{on, attrs}">
|
||||
<v-btn
|
||||
icon
|
||||
v-bind="attrs"
|
||||
v-on="on"
|
||||
>
|
||||
<v-icon title="Configure sample values">mdi-list-box-outline</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
|
||||
<v-card>
|
||||
<v-card-title>Sample values</v-card-title>
|
||||
<v-card-subtitle>Enter sample values to test your configuration</v-card-subtitle>
|
||||
|
||||
<v-divider></v-divider>
|
||||
|
||||
<v-card-text>
|
||||
|
||||
<v-container>
|
||||
<v-row v-for="(prop, key) in properties" :key="key">
|
||||
<template v-if="prop.type == 'boolean'">
|
||||
<v-col cols="6" align-self="center">
|
||||
<v-chip
|
||||
outlined
|
||||
label
|
||||
small
|
||||
:color="getHSLColourFor(key)"
|
||||
:title="prop.description"
|
||||
>{{prop.summary || key}}</v-chip>
|
||||
</v-col>
|
||||
<v-col cols="6" align-self="center">
|
||||
<v-simple-checkbox v-model="values[key]"></v-simple-checkbox>
|
||||
</v-col>
|
||||
</template>
|
||||
<template v-else-if="key != 'text'">
|
||||
<v-col cols="6" align-self="center">
|
||||
<v-chip
|
||||
outlined
|
||||
label
|
||||
small
|
||||
:color="getHSLColourFor(key)"
|
||||
:title="prop.description"
|
||||
>{{prop.summary || key}}</v-chip>
|
||||
</v-col>
|
||||
<v-col cols="6" align-self="center">
|
||||
<v-text-field v-if="prop.type == 'number'"
|
||||
:type="prop.type"
|
||||
:label="prop.summary || key"
|
||||
:hint="prop.description"
|
||||
v-model.number="values[key]"
|
||||
></v-text-field>
|
||||
<v-text-field v-else
|
||||
:type="prop.type"
|
||||
:label="prop.summary || key"
|
||||
:hint="prop.description"
|
||||
v-model="values[key]"
|
||||
></v-text-field>
|
||||
</v-col>
|
||||
</template>
|
||||
</v-row>
|
||||
</v-container>
|
||||
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
|
||||
</v-menu>
|
||||
</template>
|
||||
<v-icon slot="prepend">mdi-list</v-icon>
|
||||
</v-input>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
.input {
|
||||
flex: 1 1 auto;
|
||||
line-height: 20px;
|
||||
padding: 8px 0 8px;
|
||||
min-height: 32px;
|
||||
max-height: 32px;
|
||||
max-width: 100%;
|
||||
min-width: 0px;
|
||||
width: 100%;
|
||||
white-space-collapse: preserve;
|
||||
}
|
||||
|
||||
.multiline {
|
||||
font-family: mono;
|
||||
white-space: pre;
|
||||
overflow-x: auto;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.multiline >>> .line-number {
|
||||
display: inline-block;
|
||||
font-size: 75%;
|
||||
width: 5ex;
|
||||
margin-inline-end: 1ex;
|
||||
text-align: right;
|
||||
border: none;
|
||||
position: relative;
|
||||
top: -1px;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-field {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-fixed {
|
||||
padding-inline: 1px;
|
||||
border: 1px dashed;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-empty {
|
||||
padding-inline: 1px;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-overlap {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid grey;
|
||||
color: grey;
|
||||
}
|
||||
|
||||
.input >>> .chunk-mismatch {
|
||||
padding-inline: 1px;
|
||||
border: 2px solid red !important;
|
||||
}
|
||||
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { getHSLColourFor } from '@/lib/hsl'
|
||||
|
||||
export default {
|
||||
name: "DougalFixedStringEncoderSample",
|
||||
|
||||
components: {
|
||||
},
|
||||
|
||||
mixins: [
|
||||
{
|
||||
methods: {
|
||||
getHSLColourFor
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
props: {
|
||||
properties: { type: Object, default: () => ({}) },
|
||||
fields: { type: Array, default: () => [] },
|
||||
values: { type: Object, default: () => ({}) },
|
||||
readonly: Boolean,
|
||||
label: String,
|
||||
hint: String,
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
chunks () {
|
||||
const properties = this.properties;
|
||||
const fields = this.fields;
|
||||
const values = this.values;
|
||||
const str = "";
|
||||
const chunks = [];
|
||||
|
||||
for (const field of fields) {
|
||||
const value = this.fieldValue(properties, field, values);
|
||||
|
||||
if (value != null) {
|
||||
const chunk = {
|
||||
start: field.offset,
|
||||
end: field.offset + field.length - 1,
|
||||
colour: this.getHSLColourFor(field.item),
|
||||
class: field.item == "text" ? "fixed" : "field",
|
||||
text: value
|
||||
}
|
||||
chunks.push(chunk);
|
||||
}
|
||||
}
|
||||
|
||||
return chunks;
|
||||
},
|
||||
|
||||
text () {
|
||||
return this.sample(this.properties, this.fields, this.values);
|
||||
},
|
||||
|
||||
html () {
|
||||
return this.renderTextLine(this.text);
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
fieldValue (properties, field, values) {
|
||||
let value;
|
||||
|
||||
if (field.item == "text") {
|
||||
value = field.value;
|
||||
} else if (properties[field.item]?.type == "boolean") {
|
||||
if (values[field.item] === field.when) {
|
||||
value = field.value;
|
||||
}
|
||||
} else {
|
||||
value = values[field.item];
|
||||
}
|
||||
|
||||
if (value != null) {
|
||||
|
||||
if (properties[field.item]?.type == "number") {
|
||||
if (field.scale_multiplier != null) {
|
||||
value *= field.scale_multiplier;
|
||||
}
|
||||
if (field.scale_offset != null) {
|
||||
value += field.scale_offset;
|
||||
}
|
||||
|
||||
if (field.format == "integer") {
|
||||
value = Math.round(value);
|
||||
}
|
||||
}
|
||||
|
||||
value = String(value);
|
||||
if (field.pad_side == "left") {
|
||||
value = value.padStart(field.length, field.pad_string ?? " ");
|
||||
} else if (field.pad_side == "right") {
|
||||
value = value.padEnd(field.length, field.pad_string ?? " ");
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
},
|
||||
|
||||
sample (properties, fields, values, str = "") {
|
||||
|
||||
const length = fields.reduce( (acc, cur) => (cur.offset + cur.length) > acc ? (cur.offset + cur.length) : acc, str.length )
|
||||
|
||||
str = str.padEnd(length);
|
||||
|
||||
for (const field of fields) {
|
||||
//console.log("FIELD", field);
|
||||
const value = this.fieldValue(properties, field, values);
|
||||
if (value != null) {
|
||||
str = str.slice(0, field.offset) + value + str.slice(field.offset + field.length);
|
||||
}
|
||||
}
|
||||
|
||||
return str;
|
||||
},
|
||||
|
||||
/** Return a `<span>` opening tag.
|
||||
*/
|
||||
style (name, colour) {
|
||||
return colour
|
||||
? `<span class="${name}" style="color:${colour};border-color:${colour}">`
|
||||
: `<span class="${name}">`;
|
||||
},
|
||||
|
||||
/** Return an array of the intervals that intersect `pos`.
|
||||
* May be empty.
|
||||
*/
|
||||
chunksFor (pos) {
|
||||
return this.chunks.filter( chunk =>
|
||||
pos >= chunk.start &&
|
||||
pos <= chunk.end
|
||||
)
|
||||
},
|
||||
|
||||
/*
|
||||
* Algorithm:
|
||||
*
|
||||
* Go through every character of one line of text and determine in which
|
||||
* part(s) it falls in, if any. Collect adjacent same parts into <span/>
|
||||
* elements.
|
||||
*/
|
||||
renderTextLine (text) {
|
||||
const parts = [];
|
||||
|
||||
let prevStyle;
|
||||
|
||||
for (const pos in text) {
|
||||
const chunks = this.chunksFor(pos);
|
||||
const isEmpty = chunks.length == 0;
|
||||
const isOverlap = chunks.length > 1;
|
||||
const isMismatch = chunks[0]?.text &&
|
||||
(text.substring(chunks[0].start, chunks[0].end+1) != chunks[0].text);
|
||||
|
||||
const style = isEmpty
|
||||
? this.style("chunk-empty")
|
||||
: isMismatch
|
||||
? this.style("chunk-mismatch", chunks[0].colour)
|
||||
: isOverlap
|
||||
? this.style("chunk-overlap")
|
||||
: this.style("chunk-"+chunks[0].class, chunks[0].colour);
|
||||
|
||||
if (style != prevStyle) {
|
||||
if (prevStyle) {
|
||||
parts.push("</span>");
|
||||
}
|
||||
parts.push(style);
|
||||
}
|
||||
parts.push(text[pos]);
|
||||
prevStyle = style;
|
||||
}
|
||||
|
||||
if (parts.length) {
|
||||
parts.push("</span>");
|
||||
}
|
||||
|
||||
return parts.join("");
|
||||
},
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -0,0 +1,307 @@
|
||||
<template>
|
||||
<v-card flat elevation="0">
|
||||
<v-card-title v-if="title">{{ title }}</v-card-title>
|
||||
<v-card-subtitle v-if="subtitle">{{ subtitle }}</v-card-subtitle>
|
||||
<v-card-text>
|
||||
<v-form>
|
||||
|
||||
<!-- Sample text -->
|
||||
|
||||
<dougal-fixed-string-encoder-sample
|
||||
:label="label"
|
||||
:hint="hint"
|
||||
:properties="properties"
|
||||
:fields="fields"
|
||||
:values.sync="values"
|
||||
></dougal-fixed-string-encoder-sample>
|
||||
|
||||
<!-- Fields -->
|
||||
|
||||
<v-container>
|
||||
|
||||
<v-row no-gutters class="mb-2">
|
||||
<h4>Fields</h4>
|
||||
</v-row>
|
||||
|
||||
<dougal-fixed-string-encoder-field v-for="(field, key) in fields" :key="key"
|
||||
v-model="fields[key]"
|
||||
:properties="properties"
|
||||
:colour="getHSLColourFor(field.item)"
|
||||
:readonly="readonly"
|
||||
>
|
||||
<template v-slot:append v-if="editableFieldList && !readonly">
|
||||
<v-btn
|
||||
class="ml-3"
|
||||
fab
|
||||
text
|
||||
small
|
||||
title="Remove this field"
|
||||
>
|
||||
<v-icon
|
||||
color="error"
|
||||
@click="removeField(key)"
|
||||
>mdi-minus</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
</dougal-fixed-string-encoder-field>
|
||||
|
||||
<v-row no-gutters class="mb-2" v-if="editableFieldList && !readonly">
|
||||
<h4>Add new field</h4>
|
||||
</v-row>
|
||||
|
||||
<dougal-fixed-string-encoder-field v-if="editableFieldList && !readonly"
|
||||
v-model="newField"
|
||||
:properties="properties"
|
||||
:colour="getHSLColourFor(newField.item)"
|
||||
>
|
||||
<template v-slot:prepend>
|
||||
<v-btn v-if="isFieldDirty(newField)"
|
||||
top
|
||||
text
|
||||
small
|
||||
title="Reset"
|
||||
>
|
||||
<v-icon
|
||||
color="warning"
|
||||
@click="resetField(newField)"
|
||||
>mdi-backspace-reverse-outline</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
<template v-slot:append>
|
||||
<v-btn
|
||||
class="ml-3"
|
||||
fab
|
||||
text
|
||||
small
|
||||
title="Add field"
|
||||
:disabled="isFieldValid(newField) !== true"
|
||||
>
|
||||
<v-icon
|
||||
color="primary"
|
||||
@click="addField(newField)"
|
||||
>mdi-plus</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
</dougal-fixed-string-encoder-field>
|
||||
|
||||
</v-container>
|
||||
|
||||
|
||||
|
||||
</v-form>
|
||||
</v-card-text>
|
||||
<v-card-actions>
|
||||
</v-card-actions>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
.input {
|
||||
flex: 1 1 auto;
|
||||
line-height: 20px;
|
||||
padding: 8px 0 8px;
|
||||
min-height: 32px;
|
||||
max-height: 32px;
|
||||
max-width: 100%;
|
||||
min-width: 0px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-field {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-fixed {
|
||||
padding-inline: 1px;
|
||||
border: 1px dashed;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-empty {
|
||||
padding-inline: 1px;
|
||||
}
|
||||
|
||||
.input, .multiline >>> .chunk-overlap {
|
||||
padding-inline: 1px;
|
||||
border: 1px solid grey;
|
||||
color: grey;
|
||||
}
|
||||
|
||||
.input >>> .chunk-mismatch {
|
||||
padding-inline: 1px;
|
||||
border: 2px solid red !important;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { getHSLColourFor } from '@/lib/hsl'
|
||||
import DougalFixedStringEncoderField from './fixed-string-encoder-field'
|
||||
import DougalFixedStringEncoderSample from './fixed-string-encoder-sample'
|
||||
|
||||
export default {
|
||||
name: "DougalFixedStringEncoder",
|
||||
|
||||
components: {
|
||||
DougalFixedStringEncoderField,
|
||||
DougalFixedStringEncoderSample
|
||||
},
|
||||
|
||||
mixins: [
|
||||
{
|
||||
methods: {
|
||||
getHSLColourFor
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
props: {
|
||||
properties: { type: Object },
|
||||
fields: { type: Array },
|
||||
values: { type: Object },
|
||||
editableFieldList: { type: Boolean, default: true },
|
||||
readonly: Boolean,
|
||||
title: String,
|
||||
subtitle: String,
|
||||
label: String,
|
||||
hint: String,
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
//< The reason for not using this.text directly is that at some point
|
||||
//< we might extend this component to allow editing the sample text.
|
||||
text_: "",
|
||||
//< The value of a fixed string that should be always present at a specific position
|
||||
fixedName: "",
|
||||
fixedOffset: 0,
|
||||
//< The name of a new field to add.
|
||||
fieldName: "",
|
||||
newField: {
|
||||
item: null,
|
||||
when: null,
|
||||
offset: null,
|
||||
length: null,
|
||||
value: null,
|
||||
pad_side: null,
|
||||
pad_string: null
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
chunks () {
|
||||
const properties = this.properties;
|
||||
const fields = this.fields;
|
||||
const values = this.values;
|
||||
const str = "";
|
||||
const chunks = [];
|
||||
|
||||
for (const field of fields) {
|
||||
|
||||
//console.log("FIELD", structuredClone(field));
|
||||
//console.log("VALUES DATA", values[field.item]);
|
||||
let value;
|
||||
|
||||
if (field.item == "text") {
|
||||
value = field.value;
|
||||
} else if (properties[field.item]?.type == "boolean") {
|
||||
if (values[field.item] === field.when) {
|
||||
value = field.value;
|
||||
}
|
||||
} else {
|
||||
value = values[field.item];
|
||||
}
|
||||
|
||||
if (value != null) {
|
||||
|
||||
value = String(value);
|
||||
if (field.pad_side == "left") {
|
||||
value = value.padStart(field.length, field.pad_string);
|
||||
} else {
|
||||
value = value.padEnd(field.length, field.pad_string);
|
||||
}
|
||||
|
||||
const chunk = {
|
||||
start: field.offset,
|
||||
end: field.offset + field.length - 1,
|
||||
colour: this.getHSLColourFor(field.item),
|
||||
class: field.item == "text" ? "fixed" : "field",
|
||||
text: value
|
||||
}
|
||||
|
||||
//console.log("CHUNK", chunk);
|
||||
chunks.push(chunk);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return chunks;
|
||||
},
|
||||
|
||||
html () {
|
||||
return this.renderTextLine(this.sample(this.properties, this.fields, this.values));
|
||||
//return this.sample(this.properties, this.fields, this.values);
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
isFieldDirty (field) {
|
||||
return Object.entries(field).reduce( (acc, cur) => cur[1] === null ? acc : true, false );
|
||||
},
|
||||
|
||||
isFieldValid (field) {
|
||||
if (!field.item) return "Missing item";
|
||||
if (typeof field.offset !== "number" || field.offset < 0) return "Missing offset";
|
||||
if (typeof field.length !== "number" || field.length < 1) return "Missing length";
|
||||
if (!this.properties[field.item]) return "Unrecognised property";
|
||||
if (this.properties[field.item].type == "text" && !field.value?.length) return "Missing value";
|
||||
if (this.properties[field.item].type == "boolean" && !field.value?.length) return "Missing value (boolean)";
|
||||
if(!!field.pad_side && !field.pad_string) return "Missing pad string";
|
||||
|
||||
return true;
|
||||
},
|
||||
|
||||
resetField (field) {
|
||||
field.item = null;
|
||||
field.when = null;
|
||||
field.offset = null;
|
||||
field.length = null;
|
||||
field.value = null;
|
||||
field.pad_side = null;
|
||||
field.pad_string = null;
|
||||
|
||||
return field;
|
||||
},
|
||||
|
||||
addField (field) {
|
||||
if (this.isFieldValid(field)) {
|
||||
const fields = structuredClone(this.fields);
|
||||
fields.push({...field});
|
||||
this.resetField(field);
|
||||
console.log("update:fields", fields);
|
||||
this.$emit("update:fields", fields);
|
||||
}
|
||||
},
|
||||
|
||||
removeField (key) {
|
||||
console.log("REMOVE", "update:fields", key, this.fields);
|
||||
const fields = structuredClone(this.fields);
|
||||
fields.splice(key, 1);
|
||||
this.$emit("update:fields", fields);
|
||||
},
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -44,7 +44,7 @@
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-text-field
|
||||
v-model="tsDate"
|
||||
:disabled="!!(sequence || point || entrySequence || entryPoint)"
|
||||
:disabled="!!(entrySequence || entryPoint)"
|
||||
label="Date"
|
||||
suffix="UTC"
|
||||
prepend-icon="mdi-calendar"
|
||||
@@ -64,7 +64,7 @@
|
||||
<v-col>
|
||||
<v-text-field
|
||||
v-model="tsTime"
|
||||
:disabled="!!(sequence || point || entrySequence || entryPoint)"
|
||||
:disabled="!!(entrySequence || entryPoint)"
|
||||
label="Time"
|
||||
suffix="UTC"
|
||||
prepend-icon="mdi-clock-outline"
|
||||
@@ -123,29 +123,11 @@
|
||||
|
||||
<v-row dense>
|
||||
<v-col cols="12">
|
||||
<v-combobox
|
||||
ref="remarks"
|
||||
v-model="entryRemarks"
|
||||
:disabled="loading"
|
||||
:search-input.sync="entryRemarksInput"
|
||||
:items="remarksAvailable"
|
||||
:filter="searchRemarks"
|
||||
item-text="text"
|
||||
return-object
|
||||
label="Remarks"
|
||||
hint="Placeholders: @DMS@, @DEG@, @EN@, @WD@, @BSP@, @CMG@, …"
|
||||
prepend-icon="mdi-text-box-outline"
|
||||
append-outer-icon="mdi-magnify"
|
||||
@click:append-outer="(e) => remarksMenu = e"
|
||||
></v-combobox>
|
||||
|
||||
<dougal-context-menu
|
||||
:value="remarksMenu"
|
||||
@input="handleRemarksMenu"
|
||||
:items="presetRemarks"
|
||||
absolute
|
||||
></dougal-context-menu>
|
||||
|
||||
<dougal-event-select
|
||||
v-bind.sync="entryRemarks"
|
||||
:preset-remarks="presetRemarks"
|
||||
@update:labels="(v) => this.entryLabels = v"
|
||||
></dougal-event-select>
|
||||
</v-col>
|
||||
</v-row>
|
||||
|
||||
@@ -256,6 +238,15 @@
|
||||
>
|
||||
Cancel
|
||||
</v-btn>
|
||||
<v-btn v-if="!id && (entrySequence || entryPoint)"
|
||||
color="info"
|
||||
text
|
||||
title="Enter an event by time"
|
||||
@click="timed"
|
||||
>
|
||||
<v-icon left small>mdi-clock-outline</v-icon>
|
||||
Timed
|
||||
</v-btn>
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn
|
||||
:disabled="!canSave"
|
||||
@@ -281,6 +272,7 @@
|
||||
<script>
|
||||
import { mapActions } from 'vuex';
|
||||
import DougalContextMenu from '@/components/context-menu';
|
||||
import DougalEventSelect from '@/components/event-select';
|
||||
|
||||
function stringSort (a, b) {
|
||||
return a == b
|
||||
@@ -299,6 +291,7 @@ function flattenRemarks(items, keywords=[], labels=[]) {
|
||||
if (!item.items) {
|
||||
result.push({
|
||||
text: item.text,
|
||||
properties: item.properties,
|
||||
labels: labels.concat(item.labels??[]),
|
||||
keywords
|
||||
})
|
||||
@@ -333,7 +326,8 @@ export default {
|
||||
name: 'DougalEventEdit',
|
||||
|
||||
components: {
|
||||
DougalContextMenu
|
||||
DougalContextMenu,
|
||||
DougalEventSelect
|
||||
},
|
||||
|
||||
props: {
|
||||
@@ -345,6 +339,7 @@ export default {
|
||||
sequence: { type: Number },
|
||||
point: { type: Number },
|
||||
remarks: { type: String },
|
||||
meta: { type: Object },
|
||||
labels: { type: Array, default: () => [] },
|
||||
latitude: { type: Number },
|
||||
longitude: { type: Number },
|
||||
@@ -362,18 +357,11 @@ export default {
|
||||
entrySequence: null,
|
||||
entryPoint: null,
|
||||
entryRemarks: null,
|
||||
entryRemarksInput: null,
|
||||
entryLatitude: null,
|
||||
entryLongitude: null
|
||||
}),
|
||||
|
||||
computed: {
|
||||
remarksAvailable () {
|
||||
return this.entryRemarksInput == this.entryRemarks?.text ||
|
||||
this.entryRemarksInput == this.entryRemarks
|
||||
? []
|
||||
: flattenRemarks(this.presetRemarks);
|
||||
},
|
||||
|
||||
allSelected () {
|
||||
return this.entryLabels.length === this.items.length
|
||||
@@ -385,11 +373,6 @@ export default {
|
||||
return true;
|
||||
}
|
||||
|
||||
// The user is editing the remarks
|
||||
if (this.entryRemarksText != this.entryRemarksInput) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Selected label set distinct from input labels
|
||||
if (distinctSets(this.selectedLabels, this.entryLabels, (i) => i.text)) {
|
||||
return true;
|
||||
@@ -493,11 +476,8 @@ export default {
|
||||
|
||||
this.entrySequence = this.sequence;
|
||||
this.entryPoint = this.point;
|
||||
this.entryRemarks = this.remarks;
|
||||
this.entryLabels = [...(this.labels??[])];
|
||||
|
||||
// Focus remarks field
|
||||
this.$nextTick(() => this.$refs.remarks.focus());
|
||||
this.makeEntryRemarks();
|
||||
}
|
||||
},
|
||||
|
||||
@@ -568,22 +548,13 @@ export default {
|
||||
};
|
||||
},
|
||||
|
||||
searchRemarks (item, queryText, itemText) {
|
||||
const needle = queryText.toLowerCase();
|
||||
const text = item.text.toLowerCase();
|
||||
const keywords = item.keywords.map(i => i.toLowerCase());
|
||||
const labels = item.labels.map(i => i.toLowerCase());
|
||||
return text.includes(needle) ||
|
||||
keywords.some(i => i.includes(needle)) ||
|
||||
labels.some(i => i.includes(needle));
|
||||
},
|
||||
|
||||
handleRemarksMenu (event) {
|
||||
if (typeof event == 'boolean') {
|
||||
this.remarksMenu = event;
|
||||
} else {
|
||||
this.entryRemarks = event;
|
||||
this.remarksMenu = false;
|
||||
makeEntryRemarks () {
|
||||
this.entryRemarks = {
|
||||
template: null,
|
||||
schema: {},
|
||||
values: [],
|
||||
...this.meta?.structured_values,
|
||||
text: this.remarks
|
||||
}
|
||||
},
|
||||
|
||||
@@ -632,6 +603,14 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
timed () {
|
||||
const tstamp = (new Date()).toISOString();
|
||||
this.entrySequence = null;
|
||||
this.entryPoint = null;
|
||||
this.tsDate = tstamp.substr(0, 10);
|
||||
this.tsTime = tstamp.substr(11, 8);
|
||||
},
|
||||
|
||||
close () {
|
||||
this.entryLabels = this.selectedLabels.map(this.labelToItem)
|
||||
this.$emit("input", false);
|
||||
@@ -640,14 +619,24 @@ export default {
|
||||
save () {
|
||||
// In case the focus goes directly from the remarks field
|
||||
// to the Save button.
|
||||
if (this.entryRemarksInput != this.entryRemarksText) {
|
||||
this.entryRemarks = this.entryRemarksInput;
|
||||
|
||||
let meta;
|
||||
|
||||
if (this.entryRemarks.values?.length) {
|
||||
meta = {
|
||||
structured_values: {
|
||||
template: this.entryRemarks.template,
|
||||
schema: this.entryRemarks.schema,
|
||||
values: this.entryRemarks.values
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const data = {
|
||||
id: this.id,
|
||||
remarks: this.entryRemarksText,
|
||||
labels: this.entryLabels
|
||||
labels: this.entryLabels,
|
||||
meta
|
||||
};
|
||||
|
||||
/* NOTE This is the purist way.
|
||||
|
||||
142
lib/www/client/source/src/components/event-properties.vue
Normal file
142
lib/www/client/source/src/components/event-properties.vue
Normal file
@@ -0,0 +1,142 @@
|
||||
<template>
|
||||
<v-card flat>
|
||||
<v-card-subtitle v-text="text">
|
||||
</v-card-subtitle>
|
||||
<v-card-text style="max-height:350px;overflow:scroll;">
|
||||
<v-form>
|
||||
<template v-for="key in fieldKeys">
|
||||
<template v-if="schema[key].enum">
|
||||
<v-select v-if="schema[key].type == 'number'" :key="key"
|
||||
v-model.number="fieldValues[key]"
|
||||
:items="schema[key].enum"
|
||||
:label="schema[key].title"
|
||||
:hint="schema[key].description"
|
||||
@input="updateFieldValue(key, Number($event))"
|
||||
></v-select>
|
||||
<v-select v-else :key="key"
|
||||
v-model="fieldValues[key]"
|
||||
:items="schema[key].enum"
|
||||
:label="schema[key].title"
|
||||
:hint="schema[key].description"
|
||||
@input="updateFieldValue(key, $event)"
|
||||
></v-select>
|
||||
</template>
|
||||
<template v-else>
|
||||
<v-text-field v-if="schema[key].type == 'number'" :key="key"
|
||||
v-model.number="fieldValues[key]"
|
||||
type="number"
|
||||
:min="schema[key].minimum"
|
||||
:max="schema[key].maximum"
|
||||
:step="schema[key].multiplier"
|
||||
:label="schema[key].title"
|
||||
:hint="schema[key].description"
|
||||
@input="updateFieldValue(key, Number($event))"
|
||||
>
|
||||
</v-text-field>
|
||||
<v-text-field v-else-if="schema[key].type == 'string'" :key="key"
|
||||
v-model="fieldValues[key]"
|
||||
:label="schema[key].title"
|
||||
:hint="schema[key].description"
|
||||
@input="updateFieldValue(key, $event)"
|
||||
>
|
||||
</v-text-field>
|
||||
<v-checkbox v-else-if="schema[key].type == 'boolean'" :key="key"
|
||||
v-model="fieldValues[key]"
|
||||
:label="schema[key].title"
|
||||
:hint="schema[key].description"
|
||||
@change="updateFieldValue(key, $event)"
|
||||
>
|
||||
</v-checkbox>
|
||||
<v-text-field v-else :key="key"
|
||||
v-model="fieldValues[key]"
|
||||
:label="schema[key].title"
|
||||
:hint="schema[key].description"
|
||||
@input="updateFieldValue(key, $event)"
|
||||
>
|
||||
</v-text-field>
|
||||
</template>
|
||||
</template>
|
||||
</v-form>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
|
||||
export default {
|
||||
name: "DougalEventPropertiesEdit",
|
||||
|
||||
components: {
|
||||
},
|
||||
|
||||
props: {
|
||||
value: String,
|
||||
template: String,
|
||||
schema: Object,
|
||||
values: Array
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
fieldKeys () {
|
||||
return Object.entries(this.schema).sort((a, b) => a[1].title > b[1].title ? 1 : -1).map(i => i[0]);
|
||||
},
|
||||
|
||||
fieldValues () {
|
||||
const keys = Object.keys(this.schema ?? this.values);
|
||||
return Object.fromEntries(
|
||||
keys.map( (k, idx) =>
|
||||
[ k, this.values?.[idx] ?? this.schema[k].default ]));
|
||||
},
|
||||
|
||||
/*
|
||||
fields () {
|
||||
// TODO Remove this and rename fields → schema
|
||||
return this.schema;
|
||||
},
|
||||
*/
|
||||
|
||||
text () {
|
||||
if (this.template) {
|
||||
const rx = /{{([a-z_][a-z0-9_]*)}}/ig;
|
||||
return this.template.replace(rx, (match, p1) => this.fieldValues[p1] ?? "(n/a)");
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
values () {
|
||||
this.$emit("input", this.text);
|
||||
},
|
||||
|
||||
template () {
|
||||
this.$emit("input", this.text);
|
||||
},
|
||||
|
||||
schema () {
|
||||
this.$emit("input", this.text);
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
methods: {
|
||||
updateFieldValue(key, ev) {
|
||||
const values = {...this.fieldValues};
|
||||
values[key] = ev;
|
||||
this.$emit("update:values", Object.values(values));
|
||||
}
|
||||
},
|
||||
|
||||
mount () {
|
||||
}
|
||||
}
|
||||
|
||||
</script>
|
||||
163
lib/www/client/source/src/components/event-select.vue
Normal file
163
lib/www/client/source/src/components/event-select.vue
Normal file
@@ -0,0 +1,163 @@
|
||||
<template>
|
||||
<div>
|
||||
<v-combobox
|
||||
ref="remarks"
|
||||
:value="text"
|
||||
@input="handleComboBox"
|
||||
:search-input.sync="entryRemarksInput"
|
||||
:items="remarksAvailable"
|
||||
:filter="searchRemarks"
|
||||
item-text="text"
|
||||
return-object
|
||||
label="Remarks"
|
||||
hint="Placeholders: @DMS@, @DEG@, @EN@, @WD@, @BSP@, @CMG@, …"
|
||||
prepend-icon="mdi-text-box-outline"
|
||||
append-outer-icon="mdi-magnify"
|
||||
@click:append-outer="(e) => remarksMenu = e"
|
||||
></v-combobox>
|
||||
|
||||
<dougal-context-menu
|
||||
:value="remarksMenu"
|
||||
@input="handleRemarksMenu"
|
||||
:items="presetRemarks"
|
||||
absolute
|
||||
></dougal-context-menu>
|
||||
|
||||
<v-expansion-panels v-if="haveProperties"
|
||||
class="px-8"
|
||||
:value="0"
|
||||
>
|
||||
<v-expansion-panel>
|
||||
<v-expansion-panel-header>Properties</v-expansion-panel-header>
|
||||
<v-expansion-panel-content>
|
||||
<dougal-event-properties-edit
|
||||
:value="text"
|
||||
@input="$emit('update:text', $event)"
|
||||
:template="template"
|
||||
:schema="schema"
|
||||
:values="values"
|
||||
@update:values="$emit('update:values', $event)"
|
||||
>
|
||||
</dougal-event-properties-edit>
|
||||
</v-expansion-panel-content>
|
||||
</v-expansion-panel>
|
||||
</v-expansion-panels>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import DougalContextMenu from '@/components/context-menu';
|
||||
import DougalEventPropertiesEdit from '@/components/event-properties';
|
||||
|
||||
export default {
|
||||
name: "DougalEventSelect",
|
||||
|
||||
components: {
|
||||
DougalContextMenu,
|
||||
DougalEventPropertiesEdit
|
||||
},
|
||||
|
||||
props: {
|
||||
text: String,
|
||||
template: String,
|
||||
schema: Object,
|
||||
values: Array,
|
||||
presetRemarks: Array
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
entryRemarksInput: null,
|
||||
remarksMenu: false,
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
remarksAvailable () {
|
||||
return this.entryRemarksInput == this.text
|
||||
? []
|
||||
: this.flattenRemarks(this.presetRemarks);
|
||||
},
|
||||
|
||||
haveProperties () {
|
||||
for (const key in this.schema) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
|
||||
flattenRemarks (items, keywords=[], labels=[]) {
|
||||
const result = [];
|
||||
|
||||
if (items) {
|
||||
for (const item of items) {
|
||||
if (!item.items) {
|
||||
result.push({
|
||||
text: item.text,
|
||||
properties: item.properties,
|
||||
labels: labels.concat(item.labels??[]),
|
||||
keywords
|
||||
})
|
||||
} else {
|
||||
const k = [...keywords, item.text];
|
||||
const l = [...labels, ...(item.labels??[])];
|
||||
result.push(...this.flattenRemarks(item.items, k, l))
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
},
|
||||
|
||||
searchRemarks (item, queryText, itemText) {
|
||||
const needle = queryText.toLowerCase();
|
||||
const text = item.text.toLowerCase();
|
||||
const keywords = item.keywords.map(i => i.toLowerCase());
|
||||
const labels = item.labels.map(i => i.toLowerCase());
|
||||
return text.includes(needle) ||
|
||||
keywords.some(i => i.includes(needle)) ||
|
||||
labels.some(i => i.includes(needle));
|
||||
},
|
||||
|
||||
handleComboBox (event) {
|
||||
if (typeof event == "object") {
|
||||
this.$emit("update:text", event.text);
|
||||
this.$emit("update:template", event.template ?? event.text);
|
||||
this.$emit("update:schema", event.properties);
|
||||
this.$emit("update:labels", event.labels);
|
||||
} else {
|
||||
this.$emit("update:text", event);
|
||||
this.$emit("update:template", null);
|
||||
this.$emit("update:properties", null);
|
||||
this.$emit("update:labels", []);
|
||||
}
|
||||
},
|
||||
|
||||
handleRemarksMenu (event) {
|
||||
if (typeof event == 'boolean') {
|
||||
this.remarksMenu = event;
|
||||
} else {
|
||||
this.$emit("update:text", event.text);
|
||||
this.$emit("update:template", event.template ?? event.text);
|
||||
this.$emit("update:schema", event.properties);
|
||||
this.$emit("update:labels", event.labels);
|
||||
this.remarksMenu = false;
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
mount () {
|
||||
// Focus remarks field
|
||||
this.$nextTick(() => this.$refs.remarks.focus());
|
||||
}
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -0,0 +1,109 @@
|
||||
<template>
|
||||
<v-dialog
|
||||
max-width="600"
|
||||
:close-on-content-click="false"
|
||||
offset-y
|
||||
>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-chip
|
||||
class="ml-3"
|
||||
small
|
||||
:light="$vuetify.theme.isDark"
|
||||
:dark="!$vuetify.theme.isDark"
|
||||
:title="getFriendlyTypeName(value.type)"
|
||||
:color="getHSLColourFor(value.type||'str', .4, .5)"
|
||||
v-bind="attrs"
|
||||
v-on="on"
|
||||
>
|
||||
<v-icon small>{{ getTypeIcon(value.type||'str') }}</v-icon>
|
||||
<v-icon small v-if="value.enum"
|
||||
:title="'Values: '+Object.entries(value.enum).map(i => `${i[0]}=${i[1]}`).join('; ')+'\nDefault: '+value.default"
|
||||
>mdi-format-list-group</v-icon>
|
||||
</v-chip>
|
||||
</template>
|
||||
|
||||
<dougal-field-content
|
||||
:readonly="readonly"
|
||||
:value="value"
|
||||
@input="$emit('input', $event)"
|
||||
></dougal-field-content>
|
||||
|
||||
</v-dialog>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import DougalFieldContent from './field-content'
|
||||
|
||||
export default {
|
||||
|
||||
name: "DougalFieldContentDialog",
|
||||
|
||||
components: {
|
||||
DougalFieldContent
|
||||
},
|
||||
|
||||
props: {
|
||||
value: Object,
|
||||
readonly: Boolean
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
getFriendlyTypeName (type) {
|
||||
switch (type) {
|
||||
case "str":
|
||||
return "Text";
|
||||
case "int":
|
||||
return "Integer";
|
||||
case "float":
|
||||
return "Float";
|
||||
case "bool":
|
||||
return "Boolean";
|
||||
default:
|
||||
return type ?? "Text (default)";
|
||||
}
|
||||
},
|
||||
|
||||
getTypeIcon (type) {
|
||||
switch (type) {
|
||||
case "str":
|
||||
return "mdi-format-text-variant";
|
||||
case "int":
|
||||
return "mdi-numeric";
|
||||
case "float":
|
||||
return "mdi-decimal";
|
||||
case "bool":
|
||||
return "mdi-format-list-checks";
|
||||
default:
|
||||
return "mdi-format-text";
|
||||
}
|
||||
},
|
||||
|
||||
getHSLColourFor (str, saturation = 1, lightness = 0.25, offset = 0) {
|
||||
|
||||
function getHash (v) {
|
||||
return [...v].reduce( (acc, cur) => String(cur).charCodeAt(0) + ((acc << 5) - acc), 0 );
|
||||
}
|
||||
|
||||
const h = (getHash(str) + offset) % 360;
|
||||
const s = saturation * 100;
|
||||
const l = this.$vuetify.theme.isDark
|
||||
? (1-lightness) * 100
|
||||
: lightness * 100;
|
||||
|
||||
return `hsl(${h},${s}%,${l}%)`;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
</script>
|
||||
242
lib/www/client/source/src/components/fields/field-content.vue
Normal file
242
lib/www/client/source/src/components/fields/field-content.vue
Normal file
@@ -0,0 +1,242 @@
|
||||
<template>
|
||||
<v-card flat elevation="0">
|
||||
<v-card-subtitle>Item options</v-card-subtitle>
|
||||
<v-card-text>
|
||||
<v-select
|
||||
label="Value type"
|
||||
v-model="type"
|
||||
:items="types"
|
||||
value="int"
|
||||
:readonly="readonly"
|
||||
></v-select>
|
||||
|
||||
<v-checkbox
|
||||
label="Enumerated values"
|
||||
v-model="enumerated"
|
||||
:readonly="readonly"
|
||||
></v-checkbox>
|
||||
</v-card-text>
|
||||
|
||||
<template v-if="enumerated">
|
||||
<v-card-subtitle>Valid options</v-card-subtitle>
|
||||
<v-card-text>
|
||||
<v-list dense>
|
||||
<v-list-item v-for="(out, key) in value.enum" :key=key
|
||||
>
|
||||
<v-list-item-content class="mr-1">
|
||||
<v-text-field
|
||||
dense
|
||||
hide-details="auto"
|
||||
v-model="key"
|
||||
:readonly="readonly"
|
||||
></v-text-field>
|
||||
</v-list-item-content>
|
||||
<v-list-item-content class="ml-1">
|
||||
<v-select v-if="type == 'bool'"
|
||||
dense
|
||||
hide-details="auto"
|
||||
:items="[ true, false ]"
|
||||
v-model="value.enum[key]"
|
||||
:readonly="readonly"
|
||||
></v-select>
|
||||
<v-text-field v-else
|
||||
dense
|
||||
hide-details="auto"
|
||||
v-model="value.enum[key]"
|
||||
:readonly="readonly"
|
||||
></v-text-field>
|
||||
</v-list-item-content>
|
||||
<v-list-item-action>
|
||||
<v-icon
|
||||
small
|
||||
color="error"
|
||||
:disabled="readonly"
|
||||
@click="removeEnum(key)"
|
||||
>mdi-minus-circle</v-icon>
|
||||
</v-list-item-action>
|
||||
</v-list-item>
|
||||
|
||||
<v-list-item v-if="!readonly"
|
||||
>
|
||||
<v-list-item-content class="mr-1">
|
||||
<v-text-field
|
||||
dense
|
||||
hide-details="auto"
|
||||
label="New input value"
|
||||
v-model="newEnumKey"
|
||||
></v-text-field>
|
||||
</v-list-item-content>
|
||||
<v-list-item-content class="ml-1">
|
||||
<v-select v-if="type == 'bool'"
|
||||
dense
|
||||
hide-details="auto"
|
||||
label="New output value"
|
||||
:items="[ true, false ]"
|
||||
v-model="newEnumValue"
|
||||
></v-select>
|
||||
<v-text-field v-else
|
||||
dense
|
||||
hide-details="auto"
|
||||
label="New output value"
|
||||
v-model="newEnumValue"
|
||||
></v-text-field>
|
||||
</v-list-item-content>
|
||||
<v-list-item-action>
|
||||
<v-icon
|
||||
small
|
||||
color="primary"
|
||||
:disabled="!isNewEnumValid"
|
||||
@click="addEnum"
|
||||
>mdi-plus-circle</v-icon>
|
||||
</v-list-item-action>
|
||||
</v-list-item>
|
||||
|
||||
<v-list-item>
|
||||
<v-list-item-content>
|
||||
<v-select v-if="type == 'bool'"
|
||||
dense
|
||||
hide-details="auto"
|
||||
label="Default value"
|
||||
hint="Value to use if none matches"
|
||||
:items="[ true, false ]"
|
||||
v-model="defaultValue"
|
||||
:readonly="readonly"
|
||||
></v-select>
|
||||
<v-text-field v-else
|
||||
label="Default value"
|
||||
hint="Value to use if none matches"
|
||||
persistent-hint
|
||||
v-model="defaultValue"
|
||||
:readonly="readonly"
|
||||
></v-text-field>
|
||||
</v-list-item-content>
|
||||
<v-list-item-action>
|
||||
<v-icon
|
||||
small
|
||||
color="secondary"
|
||||
:disabled="readonly"
|
||||
@click="defaultValue = null"
|
||||
>mdi-backspace</v-icon>
|
||||
</v-list-item-action>
|
||||
</v-list-item>
|
||||
|
||||
</v-list>
|
||||
|
||||
</v-card-text>
|
||||
</template>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
|
||||
export default {
|
||||
name: "DougalFieldContent",
|
||||
|
||||
props: {
|
||||
value: Object,
|
||||
readonly: Boolean
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
newEnumKey: null,
|
||||
newEnumValue: null,
|
||||
types: [
|
||||
{ text: "Text", value: "str" },
|
||||
{ text: "Integer", value: "int" },
|
||||
{ text: "Float", value: "float" },
|
||||
{ text: "Boolean", value: "bool" },
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
type: {
|
||||
get () {
|
||||
return this.value?.type ?? "str";
|
||||
},
|
||||
|
||||
set (v) {
|
||||
this.$emit("input", {
|
||||
...this.value,
|
||||
type: v
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
enumerated: {
|
||||
get () {
|
||||
return typeof this.value?.enum === "object";
|
||||
},
|
||||
|
||||
set (v) {
|
||||
if (v) {
|
||||
this.$emit("input", {
|
||||
enum: {},
|
||||
...this.value
|
||||
})
|
||||
} else {
|
||||
const obj = {...this.value};
|
||||
delete obj.enum;
|
||||
this.$emit("input", obj)
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
defaultValue: {
|
||||
|
||||
get () {
|
||||
return this.value?.default;
|
||||
},
|
||||
|
||||
set (v) {
|
||||
this.$emit("input", {
|
||||
...this.value,
|
||||
"default": v
|
||||
});
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
isNewEnumValid () {
|
||||
return !!(this.newEnumKey &&
|
||||
!Object.keys(this.value.enum).includes(this.newEnumKey) &&
|
||||
(typeof this.newEnumValue == "boolean" || this.newEnumValue));
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
addEnum () {
|
||||
this.$emit("input", {
|
||||
...this.value,
|
||||
enum: {
|
||||
...this.value.enum,
|
||||
[this.newEnumKey]: this.newEnumValue
|
||||
}
|
||||
});
|
||||
this.newEnumKey = null;
|
||||
this.newEnumValue = null;
|
||||
},
|
||||
|
||||
removeEnum (key) {
|
||||
const obj = {...this.value.enum};
|
||||
delete obj[key];
|
||||
this.$emit("input", {
|
||||
...this.value,
|
||||
enum: obj
|
||||
});
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
@@ -0,0 +1,83 @@
|
||||
<template>
|
||||
<v-dialog
|
||||
max-width="600"
|
||||
v-model="open"
|
||||
>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<v-icon
|
||||
v-bind="attrs"
|
||||
v-on="on"
|
||||
:title="title"
|
||||
>mdi-folder-network-outline</v-icon>
|
||||
</template>
|
||||
<v-card>
|
||||
<v-card-title>File picker</v-card-title>
|
||||
<v-divider></v-divider>
|
||||
<v-card-text>
|
||||
<dougal-file-browser
|
||||
v-model="selected"
|
||||
:mimetypes="mimetypes"
|
||||
:root="root"
|
||||
ref="browser"
|
||||
>
|
||||
</dougal-file-browser>
|
||||
</v-card-text>
|
||||
<v-card-actions>
|
||||
<v-btn text @click="save" :disabled="!selected">
|
||||
<v-icon small flat color="primary" class="mr-2">mdi-content-save</v-icon>
|
||||
Ok
|
||||
</v-btn>
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn text @click="refresh">
|
||||
<v-icon small flat class="mr-2">mdi-reload</v-icon>
|
||||
Refresh
|
||||
</v-btn>
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn text @click="close">
|
||||
<v-icon small flat color="red" class="mr-2">mdi-close</v-icon>
|
||||
Cancel
|
||||
</v-btn>
|
||||
</v-card-actions>
|
||||
</v-card>
|
||||
</v-dialog>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import DougalFileBrowser from './file-browser';
|
||||
|
||||
export default {
|
||||
name: "DougalFileBrowserDialog",
|
||||
|
||||
components: { DougalFileBrowser },
|
||||
|
||||
props: [ "path", "mimetypes", "root", "title" ],
|
||||
|
||||
data () {
|
||||
return {
|
||||
open: false,
|
||||
selected: ""
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
refresh () {
|
||||
this.$refs.browser.refresh();
|
||||
},
|
||||
|
||||
close () {
|
||||
this.open = false;
|
||||
},
|
||||
|
||||
save () {
|
||||
this.$emit('input', this.selected);
|
||||
this.close();
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.selected = this.path;
|
||||
}
|
||||
}
|
||||
</script>
|
||||
@@ -0,0 +1,150 @@
|
||||
<template>
|
||||
<v-layout fill-height style="overflow-y:auto;max-height:400px;">
|
||||
<v-progress-circular v-if="loading && !items.length"></v-progress-circular>
|
||||
<v-treeview v-else
|
||||
activatable
|
||||
:active.sync="active"
|
||||
:items="items"
|
||||
item-key="path"
|
||||
item-name="basename"
|
||||
:load-children="readdir"
|
||||
@update:active="activeChanged"
|
||||
style="min-width:100%"
|
||||
>
|
||||
<template v-slot:label="{item}">
|
||||
<div style="cursor:pointer;">
|
||||
{{ item.basename }}
|
||||
</div>
|
||||
</template>
|
||||
</v-treeview>
|
||||
</v-layout>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
|
||||
function find(haystack, needle) {
|
||||
for (const item of haystack) {
|
||||
if (item.path == needle) {
|
||||
return item;
|
||||
} else if (item.children) {
|
||||
const found = find(item.children, needle);
|
||||
if (found) {
|
||||
return found;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
name: "DougalFileBrowser",
|
||||
|
||||
props: [ "value", "mimetypes", "root" ],
|
||||
|
||||
data () {
|
||||
return {
|
||||
loading: false,
|
||||
items: [],
|
||||
active: [],
|
||||
selected: null,
|
||||
path: "",
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
dirsAreSelectable () {
|
||||
return !this.mimetypes ||
|
||||
this.mimetypes == "inode/directory" ||
|
||||
(Array.isArray(this.mimetypes) && this.mimetypes.includes("inode/directory"));
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
activeChanged (active) {
|
||||
const candidate = find(this.items, active[0]);
|
||||
if (!this.dirsAreSelectable && this.isDirectory(candidate)) {
|
||||
this.selected = null;
|
||||
} else {
|
||||
this.selected = candidate;
|
||||
}
|
||||
this.$emit("input", this.selected?.path);
|
||||
},
|
||||
|
||||
isDirectory (item) {
|
||||
return item && item["Content-Type"] == "inode/directory";
|
||||
},
|
||||
|
||||
filterMimetypes (item) {
|
||||
if (!this.mimetypes) {
|
||||
return true;
|
||||
} else if (Array.isArray(this.mimetypes)) {
|
||||
return item["Content-Type"] == "inode/directory" ||
|
||||
this.mimetypes.includes(item["Content-Type"].split(";")[0]) ||
|
||||
this.filterGlob(item);
|
||||
} else {
|
||||
return item["Content-Type"] == "inode/directory" ||
|
||||
this.mimetypes == item["Content-Type"].split(";")[0];
|
||||
}
|
||||
return false;
|
||||
},
|
||||
|
||||
filterGlob (item) {
|
||||
const globs = (Array.isArray(this.mimetypes)
|
||||
? this.mimetypes
|
||||
: [ this.mimetypes ])
|
||||
.filter(i => /^\*\..+$/.test(i));
|
||||
|
||||
for (const glob of globs) {
|
||||
const ext = (glob.match(/^\*\.(.+)$/)||[])[1];
|
||||
if (item.path.toLowerCase().endsWith(ext.toLowerCase())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
|
||||
async readdir (item) {
|
||||
this.loading = true;
|
||||
const url = `/files/${item? item.path : (this.root || this.path || "")}`;
|
||||
const list = await this.api([url]);
|
||||
this.loading = false;
|
||||
const items = list?.map(item => {
|
||||
if (item["Content-Type"] == "inode/directory") {
|
||||
item.children = [];
|
||||
}
|
||||
item.id = item.path;
|
||||
item.name = item.basename;
|
||||
return item;
|
||||
}).filter(this.filterMimetypes);
|
||||
if (item) {
|
||||
item.children = items;
|
||||
} else {
|
||||
this.items = items;
|
||||
}
|
||||
},
|
||||
|
||||
async refresh () {
|
||||
this.items = []
|
||||
this.$nextTick(this.readdir);
|
||||
},
|
||||
|
||||
...mapActions(["api"])
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
if (this.value) {
|
||||
this.path = this.value;
|
||||
}
|
||||
this.readdir();
|
||||
}
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -9,8 +9,17 @@
|
||||
|
||||
<v-spacer></v-spacer>
|
||||
|
||||
<v-icon v-if="serverConnected" class="mr-6" small title="Connected to server">mdi-lan-connect</v-icon>
|
||||
<v-icon v-else class="mr-6" small color="red" title="Server connection lost (we'll reconnect automatically when the server comes back)">mdi-lan-disconnect</v-icon>
|
||||
<template v-if="isFrontendRemote">
|
||||
<template v-if="serverConnected">
|
||||
<v-icon v-if="isGatewayReliable" class="mr-6" title="Connected to server via gateway">mdi-cloud-outline</v-icon>
|
||||
<v-icon v-else class="mr-6" color="orange" title="Gateway connection is unreliable. Expect outages.">mdi-cloud-off</v-icon>
|
||||
</template>
|
||||
<v-icon v-else class="mr-6" color="red" :title="`Server connection lost: the gateway cannot reach the remote server.\nWe will reconnect automatically when the link with the remote server is restored.`">mdi-cloud-off</v-icon>
|
||||
</template>
|
||||
<template v-else>
|
||||
<v-icon v-if="serverConnected" class="mr-6" small title="Connected to server">mdi-lan-connect</v-icon>
|
||||
<v-icon v-else class="mr-6" small color="red" :title="`Server connection lost.\nWe will reconnect automatically when the server comes back.`">mdi-lan-disconnect</v-icon>
|
||||
</template>
|
||||
|
||||
<dougal-notifications-control class="mr-6"></dougal-notifications-control>
|
||||
|
||||
@@ -29,7 +38,7 @@
|
||||
<style>
|
||||
@font-face {
|
||||
font-family: "Bank Gothic Medium";
|
||||
src: local("Bank Gothic Medium"), url("/fonts/bank-gothic-medium.woff");
|
||||
src: local("Bank Gothic Medium"), url("/public/fonts/bank-gothic-medium.woff");
|
||||
}
|
||||
|
||||
.brand {
|
||||
@@ -51,13 +60,39 @@ export default {
|
||||
DougalNotificationsControl
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
lastGatewayErrorTimestamp: 0,
|
||||
gatewayErrorSilencePeriod: 60000,
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
year () {
|
||||
const date = new Date();
|
||||
return date.getUTCFullYear();
|
||||
},
|
||||
|
||||
...mapState({serverConnected: state => state.notify.serverConnected})
|
||||
...mapState({
|
||||
serverConnected: state => state.notify.serverConnected,
|
||||
isFrontendRemote: state => state.api.serverInfo?.["remote-frontend"] ?? false,
|
||||
isGatewayReliable: state => state.api.isGatewayReliable
|
||||
})
|
||||
},
|
||||
|
||||
watch: {
|
||||
|
||||
isGatewayReliable (val) {
|
||||
if (val === false) {
|
||||
const elapsed = Date.now() - this.lastGatewayErrorTimestamp;
|
||||
const lastGatewayErrorTimestamp = Date.now();
|
||||
if (elapsed > this.gatewayErrorSilencePeriod) {
|
||||
this.$root.showSnack("Gateway error", "warning");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
</script>
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
<template>
|
||||
<v-card style="min-height:400px;">
|
||||
<v-card style="min-height:400px;" outlined>
|
||||
<v-card-title class="headline">
|
||||
Array inline / crossline error
|
||||
<v-spacer></v-spacer>
|
||||
<!--
|
||||
<v-switch v-model="scatterplot" label="Scatterplot"></v-switch>
|
||||
<v-switch class="ml-4" v-model="histogram" label="Histogram"></v-switch>
|
||||
-->
|
||||
</v-card-title>
|
||||
|
||||
<v-container fluid fill-height>
|
||||
@@ -35,7 +37,6 @@
|
||||
<style scoped>
|
||||
|
||||
.graph-container {
|
||||
background-color: red;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
@@ -58,8 +59,8 @@ export default {
|
||||
graph: [],
|
||||
busy: false,
|
||||
resizeObserver: null,
|
||||
scatterplot: false,
|
||||
histogram: false
|
||||
scatterplot: true,
|
||||
histogram: true
|
||||
};
|
||||
},
|
||||
|
||||
@@ -95,6 +96,10 @@ export default {
|
||||
scatterplot () {
|
||||
this.plot();
|
||||
this.$emit("update:settings", {[`${this.$options.name}.scatterplot`]: this.scatterplot});
|
||||
},
|
||||
|
||||
"$vuetify.theme.isDark" () {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
@@ -175,6 +180,11 @@ export default {
|
||||
title: "Shotpoint",
|
||||
anchor: "x1"
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
@@ -233,6 +243,11 @@ export default {
|
||||
xaxis: {
|
||||
title: "Crossline (m)"
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
@@ -306,6 +321,11 @@ export default {
|
||||
domain: [ 0.55, 1 ],
|
||||
anchor: 'x2'
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
<template>
|
||||
<v-card style="min-height:400px;">
|
||||
<v-card style="min-height:400px;" outlined>
|
||||
<v-card-title class="headline">
|
||||
Gun depth
|
||||
<v-spacer></v-spacer>
|
||||
<!--
|
||||
<v-switch v-model="shotpoint" label="Shotpoint"></v-switch>
|
||||
<v-switch class="ml-4" v-model="violinplot" label="Violin plot"></v-switch>
|
||||
-->
|
||||
</v-card-title>
|
||||
|
||||
<v-container fluid fill-height>
|
||||
@@ -59,7 +61,7 @@ export default {
|
||||
busy: false,
|
||||
resizeObserver: null,
|
||||
shotpoint: true,
|
||||
violinplot: false
|
||||
violinplot: true
|
||||
};
|
||||
},
|
||||
|
||||
@@ -98,6 +100,10 @@ export default {
|
||||
this.plotViolin();
|
||||
}
|
||||
this.$emit("update:settings", {[`${this.$options.name}.violinplot`]: this.violinplot});
|
||||
},
|
||||
|
||||
"$vuetify.theme.isDark" () {
|
||||
this.plot();
|
||||
}
|
||||
|
||||
},
|
||||
@@ -196,6 +202,11 @@ export default {
|
||||
title: "Shotpoint",
|
||||
showspikes: true
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
@@ -232,6 +243,11 @@ export default {
|
||||
title: "Gun number",
|
||||
type: 'category'
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: {
|
||||
point
|
||||
}
|
||||
@@ -305,6 +321,11 @@ export default {
|
||||
xaxis: {
|
||||
title: "Gun number"
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<template>
|
||||
<v-card style="min-height:400px;">
|
||||
<v-card style="min-height:400px;" outlined>
|
||||
<v-card-title class="headline">
|
||||
Gun details
|
||||
</v-card-title>
|
||||
@@ -76,6 +76,10 @@ export default {
|
||||
if (this.violinplot) {
|
||||
this.plotViolin();
|
||||
}
|
||||
},
|
||||
|
||||
"$vuetify.theme.isDark" () {
|
||||
this.plot();
|
||||
}
|
||||
|
||||
},
|
||||
@@ -332,6 +336,11 @@ export default {
|
||||
title: "Shotpoint",
|
||||
showspikes: true
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
<template>
|
||||
<v-card style="min-height:400px;">
|
||||
<v-card style="min-height:400px;" outlined>
|
||||
<v-card-title class="headline">
|
||||
Gun pressures
|
||||
<v-spacer></v-spacer>
|
||||
<!--
|
||||
<v-switch v-model="shotpoint" label="Shotpoint"></v-switch>
|
||||
<v-switch class="ml-4" v-model="violinplot" label="Violin plot"></v-switch>
|
||||
-->
|
||||
</v-card-title>
|
||||
|
||||
<v-container fluid fill-height>
|
||||
@@ -59,7 +61,7 @@ export default {
|
||||
busy: false,
|
||||
resizeObserver: null,
|
||||
shotpoint: true,
|
||||
violinplot: false
|
||||
violinplot: true
|
||||
};
|
||||
},
|
||||
|
||||
@@ -98,6 +100,10 @@ export default {
|
||||
this.plotViolin();
|
||||
}
|
||||
this.$emit("update:settings", {[`${this.$options.name}.violinplot`]: this.violinplot});
|
||||
},
|
||||
|
||||
"$vuetify.theme.isDark" () {
|
||||
this.plot();
|
||||
}
|
||||
|
||||
},
|
||||
@@ -123,7 +129,7 @@ export default {
|
||||
const gunPressuresSorted = gunPressures.map(s => d3a.sort(s));
|
||||
const gunVolumes = guns.map(s => s.map(g => g[12]));
|
||||
const gunPressureWeights = gunVolumes.map( (s, sidx) => s.map( v => v/meta[sidx].volume ));
|
||||
const gunsWeightedAvgPressure = gunPressures.map( (s, sidx) =>
|
||||
const gunsWeightedAvgPressure = gunPressures.map( (s, sidx) =>
|
||||
d3a.sum(s.map( (pressure, gidx) => pressure * gunPressureWeights[sidx][gidx] )) / d3a.sum(gunPressureWeights[sidx])
|
||||
);
|
||||
|
||||
@@ -210,6 +216,11 @@ export default {
|
||||
title: "Shotpoint",
|
||||
showspikes: true
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
@@ -249,6 +260,11 @@ export default {
|
||||
title: "Gun number",
|
||||
type: 'category'
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: {
|
||||
point
|
||||
}
|
||||
@@ -322,6 +338,11 @@ export default {
|
||||
xaxis: {
|
||||
title: "Gun number"
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
<template>
|
||||
<v-card style="min-height:400px;">
|
||||
<v-card style="min-height:400px;" outlined>
|
||||
<v-card-title class="headline">
|
||||
Gun timing
|
||||
<v-spacer></v-spacer>
|
||||
<!--
|
||||
<v-switch v-model="shotpoint" label="Shotpoint"></v-switch>
|
||||
<v-switch class="ml-4" v-model="violinplot" label="Violin plot"></v-switch>
|
||||
-->
|
||||
</v-card-title>
|
||||
|
||||
<v-container fluid fill-height>
|
||||
@@ -59,7 +61,7 @@ export default {
|
||||
busy: false,
|
||||
resizeObserver: null,
|
||||
shotpoint: true,
|
||||
violinplot: false
|
||||
violinplot: true
|
||||
};
|
||||
},
|
||||
|
||||
@@ -98,6 +100,10 @@ export default {
|
||||
this.plotViolin();
|
||||
}
|
||||
this.$emit("update:settings", {[`${this.$options.name}.violinplot`]: this.violinplot});
|
||||
},
|
||||
|
||||
"$vuetify.theme.isDark" () {
|
||||
this.plot();
|
||||
}
|
||||
|
||||
},
|
||||
@@ -196,6 +202,11 @@ export default {
|
||||
title: "Shotpoint",
|
||||
showspikes: true
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
@@ -232,6 +243,11 @@ export default {
|
||||
title: "Gun number",
|
||||
type: 'category'
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: {
|
||||
point
|
||||
}
|
||||
@@ -305,6 +321,11 @@ export default {
|
||||
xaxis: {
|
||||
title: "Gun number"
|
||||
},
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)",
|
||||
meta: this.data.meta
|
||||
};
|
||||
|
||||
|
||||
@@ -127,7 +127,7 @@ export default {
|
||||
},
|
||||
|
||||
computed: {
|
||||
...mapGetters(['user', 'writeaccess', 'loading', 'serverEvent'])
|
||||
...mapGetters(['user', 'loading', 'serverEvent'])
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
@@ -0,0 +1,299 @@
|
||||
<template>
|
||||
<div ref="graph"
|
||||
class="graph-container"
|
||||
></div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
.graph-container {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import Plotly from 'plotly.js-dist';
|
||||
import unpack from '@/lib/unpack.js';
|
||||
|
||||
export default {
|
||||
name: "DougalGraphProjectSequenceInlineCrossline",
|
||||
|
||||
props: {
|
||||
items: Array,
|
||||
gunDataFormat: { type: String, default: "smsrc" },
|
||||
facet: { type: String, default: "scatter" }
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
plotted: false,
|
||||
resizeObserver: null
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
config () {
|
||||
switch (this.facet) {
|
||||
case "scatter":
|
||||
default:
|
||||
return {
|
||||
editable: false,
|
||||
displaylogo: false
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
layout () {
|
||||
const base = {
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
}
|
||||
};
|
||||
|
||||
switch (this.facet) {
|
||||
case "scatter":
|
||||
return {
|
||||
...base,
|
||||
autocolorscale: true,
|
||||
title: {text: `Preplot deviation <span style="font-size:smaller;">(x̅: %{data[0].meta.avg_x} ±%{data[0].meta.std_x} m; y̅: %{data[0].meta.avg_y} ±%{data[0].meta.std_y} m)</span>`},
|
||||
xaxis: {
|
||||
title: "Crossline (m)"
|
||||
},
|
||||
yaxis: {
|
||||
title: "Inline (m)"
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)"
|
||||
};
|
||||
|
||||
case "crossline":
|
||||
return {
|
||||
...base,
|
||||
autocolorscale: true,
|
||||
title: {text: `Crossline deviation <span style="font-size:smaller;">(x̅: %{data[0].meta.avg_x} ±%{data[0].meta.std_x} m)</span>`},
|
||||
xaxis: {
|
||||
title: "Shotpoint"
|
||||
},
|
||||
yaxis: {
|
||||
title: "Crossline (m)"
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)"
|
||||
};
|
||||
|
||||
case "2dhist":
|
||||
return {
|
||||
...base,
|
||||
showlegend: true,
|
||||
title: {text: `Preplot deviation <span style="font-size:smaller;">(x̅: %{data[0].meta.avg_x} ±%{data[0].meta.std_x} m; y̅: %{data[0].meta.avg_y} ±%{data[0].meta.std_y} m)</span>`},
|
||||
xaxis: {
|
||||
title: "Crossline (m)",
|
||||
showgrid: true,
|
||||
zeroline: true
|
||||
},
|
||||
yaxis: {
|
||||
title: "Inline (m)",
|
||||
showgrid: true,
|
||||
zeroline: true
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)"
|
||||
};
|
||||
|
||||
case "c-o":
|
||||
return {
|
||||
...base,
|
||||
showlegend: true,
|
||||
title: {
|
||||
text: this.data[0]?.x?.length
|
||||
? `Final vs raw <span style="font-size:smaller;">(x̅: %{data[0].meta.avg_x} ±%{data[0].meta.std_x} m; y̅: %{data[0].meta.avg_y} ±%{data[0].meta.std_y} m)</span>`
|
||||
: `Final vs raw: no data`
|
||||
},
|
||||
xaxis: {
|
||||
title: "Crossline (m)",
|
||||
showgrid: true,
|
||||
zeroline: true
|
||||
},
|
||||
yaxis: {
|
||||
title: "Inline (m)",
|
||||
showgrid: true,
|
||||
zeroline: true
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)"
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
data () {
|
||||
if (!this.items?.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let x, y, avg_x, avg_y, std_x, std_y;
|
||||
|
||||
const items = this.items.sort( (a, b) => a.point - b.point );
|
||||
const meta = unpack(items, "meta");
|
||||
const src_number = unpack(unpack(unpack(meta, "raw"), this.gunDataFormat), "src_number");
|
||||
|
||||
if (this.facet == "c-o") {
|
||||
const _items = items.filter(i => i.errorfinal && i.errorraw);
|
||||
const εf = unpack(unpack(_items, "errorfinal"), "coordinates");
|
||||
const εr = unpack(unpack(_items, "errorraw"), "coordinates");
|
||||
|
||||
x = εf.map( (f, idx) => f[0] - εr[idx][0] )
|
||||
y = εf.map( (f, idx) => f[1] - εr[idx][1] )
|
||||
|
||||
} else {
|
||||
const coords = unpack(unpack(items, ((row) => row?.errorfinal ? row.errorfinal : row.errorraw)), "coordinates");
|
||||
|
||||
x = unpack(coords, 0);
|
||||
y = unpack(coords, 1);
|
||||
|
||||
|
||||
}
|
||||
|
||||
// No chance of overflow
|
||||
avg_x = (x.reduce((acc, cur) => acc + cur, 0) / x.length).toFixed(2);
|
||||
avg_y = (y.reduce((acc, cur) => acc + cur, 0) / y.length).toFixed(2);
|
||||
std_x = Math.sqrt(x.reduce((acc, cur) => (cur-avg_x)**2 + acc, 0) / x.length).toFixed(2);
|
||||
std_y = Math.sqrt(y.reduce((acc, cur) => (cur-avg_y)**2 + acc, 0) / y.length).toFixed(2);
|
||||
|
||||
if (this.facet == "scatter") {
|
||||
|
||||
const data = [{
|
||||
type: "scatter",
|
||||
mode: "markers",
|
||||
x,
|
||||
y,
|
||||
meta: { avg_x, avg_y, std_x, std_y},
|
||||
transforms: [{
|
||||
type: "groupby",
|
||||
groups: src_number,
|
||||
styles: [
|
||||
{target: 1, value: {line: {color: "green"}}},
|
||||
{target: 2, value: {line: {color: "red"}}},
|
||||
{target: 3, value: {line: {color: "blue"}}}
|
||||
]
|
||||
}],
|
||||
}];
|
||||
|
||||
return data;
|
||||
|
||||
} else if (this.facet == "crossline") {
|
||||
|
||||
const s = unpack(items, "point");
|
||||
|
||||
const data = [{
|
||||
type: "scatter",
|
||||
x: s,
|
||||
y: x,
|
||||
meta: { avg_x, avg_y, std_x, std_y},
|
||||
_transforms: [{
|
||||
type: "groupby",
|
||||
groups: src_number,
|
||||
styles: [
|
||||
{target: 1, value: {line: {color: "green"}}},
|
||||
{target: 2, value: {line: {color: "red"}}},
|
||||
{target: 3, value: {line: {color: "blue"}}}
|
||||
]
|
||||
}],
|
||||
}];
|
||||
|
||||
return data;
|
||||
|
||||
} else if (this.facet == "2dhist" || this.facet == "c-o") {
|
||||
|
||||
const bottomValue = this.$vuetify.theme.isDark
|
||||
? ['0.0', 'rgba(0,0,0,0)']
|
||||
: ['0.0', 'rgb(165,0,38)'];
|
||||
const topValue = this.$vuetify.theme.isDark
|
||||
? ['1.0', 'rgb(49,54,149)']
|
||||
: ['1.0', 'rgba(0,0,0,0)'];
|
||||
|
||||
const colourscale = this.facet == "c-o"
|
||||
? [bottomValue, [0.1, 'rgb(0,0,0)'], [0.9, 'rgb(255,255,255)'], topValue]
|
||||
: [
|
||||
bottomValue,
|
||||
['0.111111111111', 'rgb(215,48,39)'],
|
||||
['0.222222222222', 'rgb(244,109,67)'],
|
||||
['0.333333333333', 'rgb(253,174,97)'],
|
||||
['0.444444444444', 'rgb(254,224,144)'],
|
||||
['0.555555555556', 'rgb(224,243,248)'],
|
||||
['0.666666666667', 'rgb(171,217,233)'],
|
||||
['0.777777777778', 'rgb(116,173,209)'],
|
||||
['0.888888888889', 'rgb(69,117,180)'],
|
||||
topValue
|
||||
];
|
||||
|
||||
const data = [{
|
||||
type: "histogram2dcontour",
|
||||
ncontours: 20,
|
||||
colorscale: colourscale,
|
||||
showscale: false,
|
||||
reversescale: !this.$vuetify.theme.isDark,
|
||||
contours: {
|
||||
coloring: this.facet == "c-o" ? "fill" : "heatmap",
|
||||
},
|
||||
x,
|
||||
y,
|
||||
meta: { avg_x, avg_y, std_x, std_y}
|
||||
}];
|
||||
|
||||
return data;
|
||||
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
watch: {
|
||||
items (cur, prev) {
|
||||
if (cur != prev) {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
"$vuetify.theme.isDark" () {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
plot () {
|
||||
if (this.items?.length) {
|
||||
Plotly.newPlot(this.$refs.graph, this.data, this.layout, this.config);
|
||||
this.plotted = true;
|
||||
} else {
|
||||
Plotly.purge(this.$refs.graph);
|
||||
this.plotted = false;
|
||||
}
|
||||
},
|
||||
|
||||
replot () {
|
||||
if (this.plotted) {
|
||||
const ref = this.$refs.graph;
|
||||
Plotly.relayout(ref, {
|
||||
width: ref.clientWidth,
|
||||
height: ref.clientHeight
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graph);
|
||||
},
|
||||
|
||||
beforeDestroy () {
|
||||
if (this.resizeObserver) {
|
||||
this.resizeObserver.unobserve(this.$refs.graph);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -0,0 +1,205 @@
|
||||
<template>
|
||||
<div ref="graph"
|
||||
class="graph-container"
|
||||
></div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
.graph-container {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import Plotly from 'plotly.js-dist';
|
||||
import unpack from '@/lib/unpack.js';
|
||||
|
||||
export default {
|
||||
name: "DougalGraphProjectSequenceShotpointTiming",
|
||||
|
||||
props: {
|
||||
items: Array,
|
||||
gunDataFormat: { type: String, default: "smsrc" },
|
||||
facet: { type: String, default: "bars" }
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
plotted: false,
|
||||
resizeObserver: null
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
config () {
|
||||
return {
|
||||
editable: false,
|
||||
displaylogo: false
|
||||
};
|
||||
},
|
||||
|
||||
layout () {
|
||||
return {
|
||||
font: {
|
||||
color: this.$vuetify.theme.isDark ? "#fff" : undefined
|
||||
},
|
||||
title: {text: "Shotpoint timing %{data[0].meta.subtitle}"},
|
||||
xaxis: {
|
||||
title: "Shotpoint"
|
||||
},
|
||||
yaxis: {
|
||||
title: "Time (s)"
|
||||
},
|
||||
plot_bgcolor:"rgba(0,0,0,0)",
|
||||
paper_bgcolor:"rgba(0,0,0,0)"
|
||||
};
|
||||
},
|
||||
|
||||
data () {
|
||||
|
||||
const items = this.items.map(i => {
|
||||
return {
|
||||
point: i.point,
|
||||
tstamp: new Date(i.tstamp)
|
||||
}
|
||||
}).sort( (a, b) => a.tstamp - b.tstamp );
|
||||
const x = [...unpack(items, "point")];
|
||||
const y = items.map( (i, idx, ary) => (ary[idx+1]?.tstamp - i.tstamp)/1000 );
|
||||
const src_number = unpack(this.items, ["meta", "raw", this.gunDataFormat, "src_number"]);
|
||||
|
||||
// We're dealing with intervals not points
|
||||
x.pop(); y.pop(); src_number.pop();
|
||||
|
||||
const meta = {};
|
||||
|
||||
const stats = this.stats(x, y, src_number);
|
||||
|
||||
// We need to do the subtitle here rather than in layout as layout knows nothing
|
||||
// about the number of arrays
|
||||
|
||||
if (stats.src_ids.length == 1) {
|
||||
meta.subtitle = `<span style="font-size:smaller;">(μ = ${stats.avg.all.toFixed(2)} ±${stats.std.all.toFixed(2)} s)</span>`;
|
||||
} else {
|
||||
meta.subtitle = `<span style="font-size:smaller;">(μ = ${stats.avg.all.toFixed(2)} ±${stats.std.all.toFixed(2)} s)</span>`;
|
||||
const per_source = [];
|
||||
for (const key in stats.avg) {
|
||||
if (key == "all") continue;
|
||||
const s = `μ<sub>${key}</sub> = ${stats.avg[key].toFixed(2)} ±${stats.std[key].toFixed(2)} s`;
|
||||
per_source.push(s);
|
||||
}
|
||||
meta.subtitle += `<br><span style="font-size:smaller;">` + per_source.join("; ") + "</span>";
|
||||
}
|
||||
|
||||
|
||||
const trace0 = {
|
||||
type: "bar",
|
||||
x,
|
||||
y,
|
||||
transforms: [{
|
||||
type: "groupby",
|
||||
groups: src_number,
|
||||
styles: [
|
||||
{value: {showlegend: false}},
|
||||
{target: 1, value: {line: {color: "green"}}},
|
||||
{target: 2, value: {line: {color: "red"}}},
|
||||
{target: 3, value: {line: {color: "blue"}}}
|
||||
]
|
||||
}],
|
||||
meta
|
||||
};
|
||||
|
||||
switch (this.facet) {
|
||||
case "lines":
|
||||
trace0.type = "scatter";
|
||||
break;
|
||||
case "area":
|
||||
trace0.type = "scatter";
|
||||
trace0.fill = "tozeroy";
|
||||
break;
|
||||
case "bars":
|
||||
default:
|
||||
// Nothing
|
||||
}
|
||||
|
||||
return [trace0]
|
||||
}
|
||||
},
|
||||
|
||||
watch: {
|
||||
items (cur, prev) {
|
||||
if (cur != prev) {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
"$vuetify.theme.isDark" () {
|
||||
this.plot();
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
plot () {
|
||||
if (this.items?.length) {
|
||||
Plotly.newPlot(this.$refs.graph, this.data, this.layout, this.config);
|
||||
this.plotted = true;
|
||||
} else {
|
||||
Plotly.purge(this.$refs.graph);
|
||||
this.plotted = false;
|
||||
}
|
||||
},
|
||||
|
||||
replot () {
|
||||
if (this.plotted) {
|
||||
const ref = this.$refs.graph;
|
||||
Plotly.relayout(ref, {
|
||||
width: ref.clientWidth,
|
||||
height: ref.clientHeight
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
stats (x, y, src_number) {
|
||||
const avg = {};
|
||||
const std = {};
|
||||
|
||||
const avg_all = (y.reduce((acc, cur) => acc + cur, 0) / y.length);
|
||||
const std_all = Math.sqrt(y.reduce((acc, cur) => (cur-avg_all)**2 + acc, 0) / y.length);
|
||||
|
||||
avg.all = avg_all;
|
||||
std.all = std_all;
|
||||
|
||||
const src_ids = new Set(src_number);
|
||||
|
||||
for (const src of src_ids) {
|
||||
// Ignore shots without source data
|
||||
if (!src) continue;
|
||||
|
||||
const v = y.filter((i, idx) => src_number[idx] == src);
|
||||
const μ = (v.reduce((acc, cur) => acc + cur, 0) / v.length);
|
||||
const σ = Math.sqrt(v.reduce((acc, cur) => (cur-μ)**2 + acc, 0) / v.length);
|
||||
avg[src] = μ;
|
||||
std[src] = σ;
|
||||
}
|
||||
|
||||
return { avg, std, src_ids };
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.resizeObserver = new ResizeObserver(this.replot)
|
||||
this.resizeObserver.observe(this.$refs.graph);
|
||||
},
|
||||
|
||||
beforeDestroy () {
|
||||
if (this.resizeObserver) {
|
||||
this.resizeObserver.unobserve(this.$refs.graph);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -0,0 +1,187 @@
|
||||
<template>
|
||||
<v-card v-if="comparison" class="ma-1">
|
||||
<v-card-title>Comparison Summary: Baseline {{ baseline.pid }} vs Monitor {{ monitor.pid }}</v-card-title>
|
||||
<v-card-text>
|
||||
<v-row>
|
||||
<v-col cols="12" md="6">
|
||||
<h3>Deviation Statistics</h3>
|
||||
<v-simple-table dense>
|
||||
<template v-slot:default>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Metric</th>
|
||||
<th>I (m)</th>
|
||||
<th>J (m)</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Mean (μ)</td>
|
||||
<td>{{ comparison['μ'][0].toFixed(3) }}</td>
|
||||
<td>{{ comparison['μ'][1].toFixed(3) }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Std Dev (σ)</td>
|
||||
<td>{{ comparison['σ'][0].toFixed(3) }}</td>
|
||||
<td>{{ comparison['σ'][1].toFixed(3) }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>RMS</td>
|
||||
<td>{{ comparison.rms[0].toFixed(3) }}</td>
|
||||
<td>{{ comparison.rms[1].toFixed(3) }}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</template>
|
||||
</v-simple-table>
|
||||
|
||||
<h3 class="mt-4">Error distribution</h3>
|
||||
<ul>
|
||||
<li title="Relative to I-axis positive direction">Primary Direction: {{ (comparison.primaryDirection * 180 / Math.PI).toFixed(2) }}°</li>
|
||||
<li>Anisotropy: {{ comparison.anisotropy.toFixed(2) }}</li>
|
||||
<li title="Length of the semi-major axis of the error ellipse">Semi-Major Axis: {{ semiMajorAxis.toFixed(2) }} m</li>
|
||||
<li title="Length of the semi-minor axis of the error ellipse">Semi-Minor Axis: {{ semiMinorAxis.toFixed(2) }} m</li>
|
||||
<li title="Area of the error ellipse">Error Ellipse Area: {{ ellipseArea.toFixed(2) }} m²</li>
|
||||
</ul>
|
||||
|
||||
<h3 class="mt-4">Counts</h3>
|
||||
<ul>
|
||||
<li title="Unique line / point pairs found in both projects">Common Points: {{ comparison.common }}</li>
|
||||
<li title="Total number of points compared, including reshoots, infills, etc.">Comparison Length: {{ comparison.length }}</li>
|
||||
<li title="Number of points in the baseline project">Baseline Points: {{ comparison.baselineLength }} (Unique: {{ comparison.baselineUniqueLength }})</li>
|
||||
<li title="Number of points in the monitor project">Monitor Points: {{ comparison.monitorLength }} (Unique: {{ comparison.monitorUniqueLength }})</li>
|
||||
</ul>
|
||||
|
||||
<p class="mt-3" title="Date and time when the comparison was last performed">Computation timestamp: {{ new Date(comparison.tstamp).toLocaleString() }}</p>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="12" md="6">
|
||||
<h3>Error Ellipse</h3>
|
||||
<svg width="300" height="300" style="border: 1px solid #ccc;">
|
||||
<g :transform="`translate(150, 150) scale(${ellipseScale})`">
|
||||
<line x1="0" y1="-150" x2="0" y2="150" stroke="lightgray" stroke-dasharray="5,5"/>
|
||||
<line x1="-150" y1="0" x2="150" y2="0" stroke="lightgray" stroke-dasharray="5,5"/>
|
||||
<ellipse
|
||||
:rx="Math.sqrt(comparison.eigenvalues[0])"
|
||||
:ry="Math.sqrt(comparison.eigenvalues[1])"
|
||||
:transform="`rotate(${ellipseAngle})`"
|
||||
fill="none"
|
||||
stroke="blue"
|
||||
stroke-width="2"
|
||||
/>
|
||||
<line
|
||||
:x1="0"
|
||||
:y1="0"
|
||||
:x2="Math.sqrt(comparison.eigenvalues[0]) * Math.cos(ellipseRad)"
|
||||
:y2="Math.sqrt(comparison.eigenvalues[0]) * Math.sin(ellipseRad)"
|
||||
stroke="red"
|
||||
stroke-width="2"
|
||||
arrow-end="classic-wide-long"
|
||||
/>
|
||||
<line
|
||||
:x1="0"
|
||||
:y1="0"
|
||||
:x2="Math.sqrt(comparison.eigenvalues[1]) * Math.cos(ellipseRad + Math.PI / 2)"
|
||||
:y2="Math.sqrt(comparison.eigenvalues[1]) * Math.sin(ellipseRad + Math.PI / 2)"
|
||||
stroke="green"
|
||||
stroke-width="2"
|
||||
arrow-end="classic-wide-long"
|
||||
/>
|
||||
</g>
|
||||
</svg>
|
||||
<p class="text-caption">Ellipse scaled for visibility (factor: {{ ellipseScale.toFixed(1) }}). Axes represent sqrt(eigenvalues).</p>
|
||||
</v-col>
|
||||
</v-row>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: "DougalGroupComparisonSummary",
|
||||
|
||||
props: {
|
||||
baseline: { type: Object, required: true },
|
||||
monitor: { type: Object, required: true },
|
||||
comparison: { type: Object, required: true }
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
ellipseAngle () {
|
||||
if (!this.comparison) return 0;
|
||||
const ev = this.comparison.eigenvectors[0];
|
||||
return Math.atan2(ev[1], ev[0]) * 180 / Math.PI;
|
||||
},
|
||||
|
||||
ellipseRad () {
|
||||
return this.ellipseAngle * Math.PI / 180;
|
||||
},
|
||||
|
||||
ellipseRx () {
|
||||
if (!this.comparison) return 0;
|
||||
return Math.sqrt(this.comparison.eigenvalues[0]) * this.ellipseScale;
|
||||
},
|
||||
|
||||
ellipseRy () {
|
||||
if (!this.comparison) return 0;
|
||||
return Math.sqrt(this.comparison.eigenvalues[1]) * this.ellipseScale;
|
||||
},
|
||||
|
||||
ellipseScale () {
|
||||
if (!this.comparison) return 1;
|
||||
const maxSigma = Math.max(
|
||||
Math.sqrt(this.comparison.eigenvalues[0]),
|
||||
Math.sqrt(this.comparison.eigenvalues[1])
|
||||
);
|
||||
const maxMu = Math.max(
|
||||
Math.abs(this.comparison['μ'][0]),
|
||||
Math.abs(this.comparison['μ'][1])
|
||||
);
|
||||
//const maxExtent = maxMu + 3 * maxSigma;
|
||||
const maxExtent = 20;
|
||||
return 100 / maxExtent; // Adjust scale to fit within ~200 pixels diameter
|
||||
},
|
||||
|
||||
ellipseArea () {
|
||||
if (!this.comparison) return 0;
|
||||
const a = Math.sqrt(this.comparison.eigenvalues[0]);
|
||||
const b = Math.sqrt(this.comparison.eigenvalues[1]);
|
||||
return Math.PI * a * b;
|
||||
},
|
||||
|
||||
semiMajorAxis () {
|
||||
if (!this.comparison) return 0;
|
||||
return Math.max(
|
||||
Math.sqrt(this.comparison.eigenvalues[0]),
|
||||
Math.sqrt(this.comparison.eigenvalues[1])
|
||||
);
|
||||
},
|
||||
|
||||
semiMinorAxis () {
|
||||
if (!this.comparison) return 0;
|
||||
return Math.min(
|
||||
Math.sqrt(this.comparison.eigenvalues[0]),
|
||||
Math.sqrt(this.comparison.eigenvalues[1])
|
||||
);
|
||||
},
|
||||
|
||||
meanX () {
|
||||
return this.comparison ? this.comparison['μ'][0] : 0;
|
||||
},
|
||||
|
||||
meanY () {
|
||||
return this.comparison ? this.comparison['μ'][1] : 0;
|
||||
},
|
||||
|
||||
ellipseViewBox () {
|
||||
return '-150 -150 300 300';
|
||||
},
|
||||
|
||||
}
|
||||
}
|
||||
</script>
|
||||
1302
lib/www/client/source/src/components/groups/group-map.vue
Normal file
1302
lib/www/client/source/src/components/groups/group-map.vue
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,118 @@
|
||||
<template>
|
||||
<v-card class="ma-1">
|
||||
<v-card-title>Group Repeatability Summary</v-card-title>
|
||||
<v-card-text>
|
||||
<p>Error ellipse area for each baseline-monitor pair. Lower values indicate better repeatability. Colors range from green (best) to red (worst).</p>
|
||||
<v-simple-table dense>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Baseline \ Monitor</th>
|
||||
<th v-for="project in projects" :key="project.pid">{{ project.pid }}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr v-for="(baselineProject, rowIndex) in projects" :key="baselineProject.pid">
|
||||
<td>{{ baselineProject.pid }}</td>
|
||||
<td v-for="(monitorProject, colIndex) in projects" :key="monitorProject.pid">
|
||||
<v-tooltip v-if="colIndex > rowIndex" top>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<div
|
||||
:style="{ backgroundColor: getEllipseAreaColor(baselineProject.pid, monitorProject.pid), color: 'white', textAlign: 'center', padding: '4px' }"
|
||||
v-bind="attrs"
|
||||
v-on="on"
|
||||
@click="emitInput(baselineProject, monitorProject)"
|
||||
>
|
||||
{{ formatEllipseArea(baselineProject.pid, monitorProject.pid) }}
|
||||
</div>
|
||||
</template>
|
||||
<span v-if="getComp(baselineProject.pid, monitorProject.pid)">
|
||||
<div>σ_i: {{ getComp(baselineProject.pid, monitorProject.pid).meta['σ'][0].toFixed(2) }} m</div>
|
||||
<div>σ_j: {{ getComp(baselineProject.pid, monitorProject.pid).meta['σ'][1].toFixed(2) }} m</div>
|
||||
<div>Anisotropy: {{ getComp(baselineProject.pid, monitorProject.pid).meta.anisotropy.toFixed(0) }}</div>
|
||||
<div>Ellipse Area: {{ getEllipseArea(baselineProject.pid, monitorProject.pid).toFixed(2) }} m²</div>
|
||||
<div>Primary Direction: {{ formatPrimaryDirection(getComp(baselineProject.pid, monitorProject.pid)) }}°</div>
|
||||
</span>
|
||||
</v-tooltip>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</v-simple-table>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: 'DougalGroupRepeatabilitySummary',
|
||||
|
||||
props: {
|
||||
comparisons: {
|
||||
type: Array,
|
||||
required: true
|
||||
},
|
||||
projects: {
|
||||
type: Array,
|
||||
required: true
|
||||
}
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
compMap () {
|
||||
return new Map(this.comparisons.map(c => [`${c.baseline_pid}-${c.monitor_pid}`, c]));
|
||||
},
|
||||
minEllipseArea () {
|
||||
if (!this.comparisons.length) return 0;
|
||||
return Math.min(...this.comparisons.map(c => {
|
||||
const a = Math.sqrt(c.meta.eigenvalues[0]);
|
||||
const b = Math.sqrt(c.meta.eigenvalues[1]);
|
||||
return Math.PI * a * b;
|
||||
}));
|
||||
},
|
||||
maxEllipseArea () {
|
||||
if (!this.comparisons.length) return 0;
|
||||
return Math.max(...this.comparisons.map(c => {
|
||||
const a = Math.sqrt(c.meta.eigenvalues[0]);
|
||||
const b = Math.sqrt(c.meta.eigenvalues[1]);
|
||||
return Math.PI * a * b;
|
||||
}));
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
getComp (basePid, monPid) {
|
||||
return this.compMap.get(`${basePid}-${monPid}`);
|
||||
},
|
||||
getEllipseArea (basePid, monPid) {
|
||||
const comp = this.getComp(basePid, monPid);
|
||||
if (!comp) return null;
|
||||
const a = Math.sqrt(comp.meta.eigenvalues[0]);
|
||||
const b = Math.sqrt(comp.meta.eigenvalues[1]);
|
||||
return Math.PI * a * b;
|
||||
},
|
||||
formatEllipseArea (basePid, monPid) {
|
||||
const val = this.getEllipseArea(basePid, monPid);
|
||||
return val !== null ? val.toFixed(1) : '';
|
||||
},
|
||||
getEllipseAreaColor (basePid, monPid) {
|
||||
const val = this.getEllipseArea(basePid, monPid);
|
||||
if (val === null) return '';
|
||||
const ratio = (val - this.minEllipseArea) / (this.maxEllipseArea - this.minEllipseArea);
|
||||
const hue = (1 - ratio) * 120;
|
||||
return `hsl(${hue}, 70%, 70%)`;
|
||||
},
|
||||
formatPrimaryDirection (comp) {
|
||||
if (!comp) return '';
|
||||
return (comp.meta.primaryDirection * 180 / Math.PI).toFixed(1);
|
||||
},
|
||||
emitInput (baselineProject, monitorProject) {
|
||||
if (this.getComp(baselineProject.pid, monitorProject.pid)) {
|
||||
this.$emit('input', baselineProject, monitorProject);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
@@ -2,6 +2,7 @@
|
||||
<v-dialog
|
||||
v-model="dialog"
|
||||
max-width="500"
|
||||
scrollable
|
||||
style="z-index:2020;"
|
||||
>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
@@ -14,15 +15,54 @@
|
||||
</template>
|
||||
|
||||
<v-card>
|
||||
<v-card-title class="headline">
|
||||
Dougal user support
|
||||
</v-card-title>
|
||||
<v-window v-model="page">
|
||||
<v-window-item value="support">
|
||||
<v-card-title class="headline">
|
||||
Dougal user support
|
||||
</v-card-title>
|
||||
|
||||
<v-card-text>
|
||||
<p>You can get help or report a problem by sending an email to <a :href="`mailto:${email}`">{{email}}</a>. Please include as much information as possible about your problem or question—screenshots are often a good idea, and data files may also be attached.</p>
|
||||
<v-card-text>
|
||||
<p>You can get help or report a problem by sending an email to <a :href="`mailto:${email}`">{{email}}</a>. Please include as much information as possible about your problem or question—screenshots are often a good idea, and data files may also be attached.</p>
|
||||
|
||||
<p>When you write to the above address a ticket will be automatically created in the project's issue tracking system.</p>
|
||||
</v-card-text>
|
||||
<p>When you write to the above address a ticket will be automatically created in the project's issue tracking system.</p>
|
||||
|
||||
<v-alert dense type="info" border="left" outlined>
|
||||
<div class="text-body-2">
|
||||
You are using Dougal version:
|
||||
<ul>
|
||||
<li><code>{{clientVersion}}</code> (client)</li>
|
||||
<li><code>{{serverVersion}}</code> (server)</li>
|
||||
</ul>
|
||||
</div>
|
||||
</v-alert>
|
||||
|
||||
</v-card-text>
|
||||
</v-window-item>
|
||||
|
||||
<v-window-item value="changelog">
|
||||
<v-card-title class="headline">
|
||||
Dougal release notes
|
||||
</v-card-title>
|
||||
|
||||
<v-card-text>
|
||||
<v-carousel v-model="releaseShown"
|
||||
:continuous="false"
|
||||
:cycle="false"
|
||||
:show-arrows="true"
|
||||
:hide-delimiters="true"
|
||||
>
|
||||
<v-carousel-item v-for="release in releaseHistory">
|
||||
<pre>{{release}}</pre>
|
||||
</v-carousel-item>
|
||||
</v-carousel>
|
||||
</v-card-text>
|
||||
|
||||
|
||||
</v-window-item>
|
||||
<v-window-item value="serverinfo">
|
||||
<dougal-server-status :status="serverStatus"></dougal-server-status>
|
||||
</v-window-item>
|
||||
</v-window>
|
||||
|
||||
<v-divider></v-divider>
|
||||
|
||||
@@ -33,8 +73,7 @@
|
||||
text
|
||||
:href="`mailto:${email}?Subject=Question`"
|
||||
>
|
||||
<v-icon class="d-lg-none">mdi-help-circle</v-icon>
|
||||
<span class="d-none d-lg-inline">Ask a question</span>
|
||||
<v-icon title="Ask a question">mdi-help-circle</v-icon>
|
||||
</v-btn>
|
||||
|
||||
<v-btn
|
||||
@@ -42,10 +81,10 @@
|
||||
text
|
||||
href="mailto:dougal-support@aaltronav.eu?Subject=Bug report"
|
||||
>
|
||||
<v-icon class="d-lg-none">mdi-bug</v-icon>
|
||||
<span class="d-none d-lg-inline">Report a bug</span>
|
||||
<v-icon title="Report a bug">mdi-bug</v-icon>
|
||||
</v-btn>
|
||||
|
||||
<!---
|
||||
<v-btn
|
||||
color="info"
|
||||
text
|
||||
@@ -54,6 +93,37 @@
|
||||
>
|
||||
<v-icon>mdi-rss</v-icon>
|
||||
</v-btn>
|
||||
--->
|
||||
|
||||
<v-btn
|
||||
color="info"
|
||||
text
|
||||
title="View support info"
|
||||
:input-value="page == 'support'"
|
||||
@click="page = 'support'"
|
||||
>
|
||||
<v-icon>mdi-account-question</v-icon>
|
||||
</v-btn>
|
||||
|
||||
<v-btn v-if="versionHistory"
|
||||
color="info"
|
||||
text
|
||||
title="View release notes"
|
||||
:input-value="page == 'changelog'"
|
||||
@click="page = 'changelog'"
|
||||
>
|
||||
<v-icon>mdi-history</v-icon>
|
||||
</v-btn>
|
||||
|
||||
<v-btn v-if="serverStatus"
|
||||
color="info"
|
||||
text
|
||||
title="View server status"
|
||||
:input-value="page == 'serverinfo'"
|
||||
@click="page = 'serverinfo'"
|
||||
>
|
||||
<v-icon>mdi-server-network</v-icon>
|
||||
</v-btn>
|
||||
|
||||
<v-spacer></v-spacer>
|
||||
|
||||
@@ -75,15 +145,111 @@
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import DougalServerStatus from './server-status';
|
||||
|
||||
export default {
|
||||
name: 'DougalHelpDialog',
|
||||
|
||||
components: {
|
||||
DougalServerStatus
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
dialog: false,
|
||||
email: "dougal-support@aaltronav.eu",
|
||||
feed: btoa(encodeURIComponent("https://gitlab.com/wgp/dougal/software.atom?feed_token=XSPpvsYEny8YmH75Nz5W"))
|
||||
feed: btoa(encodeURIComponent("https://gitlab.com/wgp/dougal/software.atom?feed_token=XSPpvsYEny8YmH75Nz5W")),
|
||||
serverStatus: null,
|
||||
clientVersion: process.env.DOUGAL_FRONTEND_VERSION ?? "(unknown)",
|
||||
serverVersion: null,
|
||||
versionHistory: null,
|
||||
releaseHistory: [],
|
||||
releaseShown: null,
|
||||
page: "support",
|
||||
|
||||
lastUpdate: 0,
|
||||
updateInterval: 12000,
|
||||
refreshTimer: null
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
sinceUpdate () {
|
||||
return this.lastUpdate
|
||||
? (Date.now() - this.lastUpdate)
|
||||
: +Infinity;
|
||||
}
|
||||
},
|
||||
|
||||
watch: {
|
||||
dialog(newVal) {
|
||||
if (newVal) {
|
||||
this.startAutoRefresh();
|
||||
} else {
|
||||
this.stopAutoRefresh();
|
||||
}
|
||||
},
|
||||
page(newVal) {
|
||||
if (newVal === 'serverinfo' && this.dialog) {
|
||||
this.getServerStatus(); // Immediate update when switching to serverinfo
|
||||
this.startAutoRefresh();
|
||||
} else {
|
||||
this.stopAutoRefresh();
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
async getServerVersion () {
|
||||
if (!this.serverVersion) {
|
||||
const version = await this.api(['/version', {}, null, {silent:true}]);
|
||||
this.serverVersion = version?.tag ?? "(unknown)";
|
||||
if (version) this.lastUpdate = Date.now();
|
||||
}
|
||||
if (!this.versionHistory) {
|
||||
const history = await this.api(['/version/history?count=6', {}, null, {silent:true}]);
|
||||
this.releaseHistory = history;
|
||||
this.versionHistory = history?.[this.serverVersion.replace(/-.*$/, "")] ?? null;
|
||||
}
|
||||
},
|
||||
|
||||
async getServerStatus () {
|
||||
const status = await this.api(['/diagnostics', {}, null, {silent: true}]);
|
||||
if (status) {
|
||||
this.serverStatus = status;
|
||||
this.lastUpdate = Date.now();
|
||||
}
|
||||
},
|
||||
|
||||
startAutoRefresh() {
|
||||
if (this.refreshTimer) return; // Prevent multiple timers
|
||||
this.refreshTimer = setInterval(() => {
|
||||
if (this.dialog && this.page === 'serverinfo') {
|
||||
this.getServerStatus();
|
||||
// Optionally refresh server version if needed
|
||||
// this.getServerVersion();
|
||||
}
|
||||
}, this.updateInterval);
|
||||
},
|
||||
|
||||
stopAutoRefresh() {
|
||||
if (this.refreshTimer) {
|
||||
clearInterval(this.refreshTimer);
|
||||
this.refreshTimer = null;
|
||||
}
|
||||
},
|
||||
|
||||
...mapActions(["api"])
|
||||
},
|
||||
|
||||
async mounted () {
|
||||
this.getServerVersion();
|
||||
this.getServerStatus();
|
||||
},
|
||||
|
||||
beforeDestroy() {
|
||||
this.stopAutoRefresh(); // Clean up timer on component destruction
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
@@ -0,0 +1,557 @@
|
||||
<template>
|
||||
<v-container>
|
||||
<v-row>
|
||||
<v-treeview
|
||||
dense
|
||||
activatable
|
||||
hoverable
|
||||
:multiple-active="false"
|
||||
:active.sync="active"
|
||||
:open.sync="open"
|
||||
:items="treeview"
|
||||
style="cursor:pointer;width:100%;"
|
||||
>
|
||||
<template v-slot:prepend="{item}">
|
||||
<template v-if="item.icon">
|
||||
<v-icon
|
||||
small
|
||||
left
|
||||
:title="item.leaf ? item.type : `${item.type} (${item.children.length} children)`"
|
||||
>{{item.icon}}</v-icon>
|
||||
</template>
|
||||
</template>
|
||||
|
||||
<template v-slot:label="{item}">
|
||||
<template v-if="!('path' in item)">
|
||||
{{item.name}}
|
||||
</template>
|
||||
<template v-else-if="item.leaf">
|
||||
<v-chip
|
||||
small
|
||||
label
|
||||
outlined
|
||||
:color="item.isArrayItem ? 'secondary' : 'primary'"
|
||||
>
|
||||
{{item.name}}
|
||||
</v-chip>
|
||||
<code class="ml-4" v-if="item.type == 'bigint'">{{item.value+"n"}}</code>
|
||||
<code class="ml-4" v-else-if="item.type == 'boolean'"><b>{{item.value}}</b></code>
|
||||
<code class="ml-4" v-else>{{item.value}}</code>
|
||||
<v-icon v-if="item.type == 'string' && (/^#[0-9a-fA-F]{3}([0-9a-fA-F]{3}([0-9a-fA-F]{2})?)?$/.test(item.value) || item.name == 'colour' || item.name == 'color')"
|
||||
right
|
||||
:color="item.value"
|
||||
>mdi-square</v-icon>
|
||||
</template>
|
||||
<template v-else>
|
||||
<v-chip
|
||||
small
|
||||
label
|
||||
outlined
|
||||
:color="item.isArrayItem ? 'secondary' : 'primary'"
|
||||
>
|
||||
{{item.name}}
|
||||
</v-chip>
|
||||
</template>
|
||||
</template>
|
||||
|
||||
<template v-slot:append="{item}">
|
||||
<template>
|
||||
<v-icon v-if="item.type == 'array'"
|
||||
small
|
||||
right
|
||||
outline
|
||||
color="primary"
|
||||
title="Add item"
|
||||
@click="itemAddDialog(item)"
|
||||
>mdi-plus</v-icon>
|
||||
<v-icon v-if="item.type == 'object'"
|
||||
small
|
||||
right
|
||||
outline
|
||||
color="primary"
|
||||
title="Add property"
|
||||
@click="itemAddDialog(item)"
|
||||
>mdi-plus</v-icon>
|
||||
<v-icon v-if="item.type == 'boolean'"
|
||||
small
|
||||
right
|
||||
outline
|
||||
color="primary"
|
||||
title="Toggle value"
|
||||
@click="itemToggle(item)"
|
||||
>{{ item.value ? "mdi-checkbox-blank-outline" : "mdi-checkbox-marked-outline" }}</v-icon>
|
||||
<v-icon v-if="item.type == 'string' || item.type == 'number'"
|
||||
small
|
||||
right
|
||||
outline
|
||||
color="primary"
|
||||
title="Edit value"
|
||||
@click="itemAddDialog(item, true)"
|
||||
>mdi-pencil-outline</v-icon>
|
||||
<v-icon
|
||||
small
|
||||
right
|
||||
outlined
|
||||
color="red"
|
||||
title="Delete"
|
||||
:disabled="item.id == rootId"
|
||||
@click="itemDelete(item)"
|
||||
>mdi-minus</v-icon>
|
||||
</template>
|
||||
</template>
|
||||
</v-treeview>
|
||||
<dougal-json-builder-property-dialog
|
||||
:open="editor"
|
||||
v-model="edit"
|
||||
v-bind="editorProperties"
|
||||
@save="editorSave"
|
||||
@close="editorClose"
|
||||
></dougal-json-builder-property-dialog>
|
||||
</v-row>
|
||||
</v-container>
|
||||
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { deepValue, deepSet } from '@/lib/utils';
|
||||
import DougalJsonBuilderPropertyDialog from './property-dialog';
|
||||
|
||||
export default {
|
||||
name: "DougalJsonBuilder",
|
||||
|
||||
components: {
|
||||
DougalJsonBuilderPropertyDialog
|
||||
},
|
||||
|
||||
props: {
|
||||
value: Object,
|
||||
name: String,
|
||||
sort: String
|
||||
},
|
||||
|
||||
data () {
|
||||
const rootId = Symbol("rootNode");
|
||||
return {
|
||||
rootId,
|
||||
active: [],
|
||||
open: [ rootId ],
|
||||
editor: false,
|
||||
editorProperties: {
|
||||
nameShown: true,
|
||||
nameEditable: true,
|
||||
typeShown: true,
|
||||
typeEditable: true,
|
||||
valueShown: true,
|
||||
serialisable: true
|
||||
},
|
||||
onEditorSave: (evt) => {},
|
||||
edit: {
|
||||
name: null,
|
||||
type: null,
|
||||
value: null
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
treeview () {
|
||||
|
||||
function sorter (key) {
|
||||
return function λ (a, b) {
|
||||
return a?.[key] > b?.[key]
|
||||
? 1
|
||||
: a?.[key] < b?.[key]
|
||||
? -1
|
||||
: 0;
|
||||
}
|
||||
}
|
||||
|
||||
function getType (value) {
|
||||
const t = typeof value;
|
||||
switch (t) {
|
||||
case "symbol":
|
||||
case "string":
|
||||
case "bigint":
|
||||
case "number":
|
||||
case "boolean":
|
||||
case "undefined":
|
||||
return t;
|
||||
case "object":
|
||||
return value === null
|
||||
? "null"
|
||||
: Array.isArray(value)
|
||||
? "array"
|
||||
: t;
|
||||
}
|
||||
}
|
||||
|
||||
function getIcon (type) {
|
||||
switch (type) {
|
||||
case "symbol":
|
||||
return "mdi-symbol";
|
||||
case "string":
|
||||
return "mdi-format-text";
|
||||
case "bigint":
|
||||
return "mdi-numeric";
|
||||
case "number":
|
||||
return "mdi-numeric";
|
||||
case "boolean":
|
||||
return "mdi-checkbox-intermediate-variant";
|
||||
case "undefined":
|
||||
return "mdi-border-none-variant";
|
||||
case "null":
|
||||
return "mdi-null";
|
||||
case "array":
|
||||
return "mdi-list-box-outline";
|
||||
case "object":
|
||||
return "mdi-format-list-bulleted-type";
|
||||
}
|
||||
return "mdi-help";
|
||||
}
|
||||
|
||||
const leaf = ([key, value], parent) => {
|
||||
const id = parent
|
||||
? parent.id+"."+key
|
||||
: key;
|
||||
const name = key;
|
||||
const type = getType(value);
|
||||
const icon = getIcon(type);
|
||||
const isArrayItem = parent?.type == "array";
|
||||
|
||||
const obj = {
|
||||
id,
|
||||
name,
|
||||
type,
|
||||
icon,
|
||||
isArrayItem,
|
||||
};
|
||||
|
||||
if (parent) {
|
||||
obj.path = [...parent.path, key];
|
||||
} else {
|
||||
obj.path = [ key ];
|
||||
}
|
||||
|
||||
if (type == "object" || type == "array") {
|
||||
const children = [];
|
||||
for (const child of Object.entries(value)) {
|
||||
children.push(leaf(child, obj));
|
||||
}
|
||||
if (this.sort) {
|
||||
children.sort(sorter(this.sort));
|
||||
}
|
||||
obj.children = children;
|
||||
} else {
|
||||
obj.leaf = true;
|
||||
obj.value = value;
|
||||
/*
|
||||
obj.children = [{
|
||||
id: id+".value",
|
||||
name: String(value)
|
||||
}]
|
||||
*/
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
const rootNode = {
|
||||
id: this.rootId,
|
||||
name: this.name,
|
||||
type: getType(this.value),
|
||||
icon: getIcon(getType(this.value)),
|
||||
children: []
|
||||
};
|
||||
const view = [rootNode];
|
||||
|
||||
if (this.value) {
|
||||
for (const child of Object.entries(this.value)) {
|
||||
rootNode.children.push(leaf(child));
|
||||
}
|
||||
if (this.sort) {
|
||||
rootNode.children.sort(sorter(this.sort));
|
||||
}
|
||||
}
|
||||
|
||||
return view;
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
watch: {
|
||||
treeview () {
|
||||
if (!this.open.includes(this.rootId)) {
|
||||
this.open.push(this.rootId);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
||||
openAll (open = true) {
|
||||
const walk = (obj) => {
|
||||
if (obj?.children) {
|
||||
for (const child of obj.children) {
|
||||
walk(child);
|
||||
}
|
||||
if (obj?.id) {
|
||||
this.open.push(obj.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const item of this.treeview) {
|
||||
walk (item);
|
||||
}
|
||||
},
|
||||
|
||||
itemDelete (item) {
|
||||
const parents = [...item.path];
|
||||
const key = parents.pop();
|
||||
|
||||
if (key) {
|
||||
|
||||
const value = structuredClone(this.value);
|
||||
const obj = parents.length ? deepValue(value, parents) : value;
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
obj.splice(key, 1);
|
||||
} else {
|
||||
delete obj[key];
|
||||
}
|
||||
|
||||
this.$emit("input", value);
|
||||
|
||||
} else {
|
||||
|
||||
this.$emit("input", {});
|
||||
|
||||
}
|
||||
},
|
||||
|
||||
itemToggle (item, state) {
|
||||
const parents = [...item.path];
|
||||
const value = structuredClone(this.value);
|
||||
|
||||
if (parents.length) {
|
||||
deepSet(value, parents, state ?? !item.value)
|
||||
} else {
|
||||
value = state ?? !item.value;
|
||||
}
|
||||
|
||||
this.$emit("input", value);
|
||||
},
|
||||
|
||||
itemSet (path, content) {
|
||||
const parents = [...(path??[])];
|
||||
const key = parents.pop();
|
||||
|
||||
if (key !== undefined) {
|
||||
|
||||
const value = structuredClone(this.value);
|
||||
const obj = parents.length ? deepValue(value, parents) : value;
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
if (key === null) {
|
||||
obj.push(content);
|
||||
} else {
|
||||
obj[key] = content;
|
||||
}
|
||||
} else {
|
||||
obj[key] = content;
|
||||
}
|
||||
|
||||
this.$emit("input", value);
|
||||
|
||||
} else {
|
||||
this.$emit("input", content);
|
||||
|
||||
}
|
||||
},
|
||||
|
||||
itemAdd (path, content) {
|
||||
let value = structuredClone(this.value);
|
||||
let path_ = [...(path??[])];
|
||||
|
||||
if (path_.length) {
|
||||
try {
|
||||
deepSet(value, path_, content);
|
||||
} catch (err) {
|
||||
if (err instanceof TypeError) {
|
||||
this.itemSet(path, content);
|
||||
return;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
value = content;
|
||||
}
|
||||
|
||||
this.$emit("input", value);
|
||||
},
|
||||
|
||||
itemAddDialog (item, edit=false) {
|
||||
|
||||
if (!this.open.includes(item.id)) {
|
||||
this.open.push(item.id);
|
||||
}
|
||||
|
||||
if (edit) {
|
||||
this.editorReset({
|
||||
name: item.name,
|
||||
type: item.type,
|
||||
value: item.value
|
||||
}, {nameEditable: false});
|
||||
} else {
|
||||
this.editorReset({}, {
|
||||
nameShown: item.type != "array",
|
||||
nameRequired: item.type != "array"
|
||||
});
|
||||
}
|
||||
|
||||
this.onEditorSave = (evt) => {
|
||||
this.editor = false;
|
||||
|
||||
let transformer;
|
||||
switch(this.edit.type) {
|
||||
case "symbol":
|
||||
transformer = Symbol;
|
||||
break;
|
||||
case "string":
|
||||
transformer = String;
|
||||
break;
|
||||
case "bigint":
|
||||
transformer = BigInt;
|
||||
break;
|
||||
case "number":
|
||||
transformer = Number;
|
||||
break;
|
||||
case "boolean":
|
||||
transformer = Boolean;
|
||||
break;
|
||||
case "undefined":
|
||||
transformer = () => { return undefined; };
|
||||
break;
|
||||
case "object":
|
||||
transformer = (v) =>
|
||||
typeof v == "object"
|
||||
? v
|
||||
: (typeof v == "string" && v.length)
|
||||
? JSON.parse(v)
|
||||
: {};
|
||||
break;
|
||||
case "null":
|
||||
transformer = () => null;
|
||||
break;
|
||||
case "array":
|
||||
// FIXME not great
|
||||
transformer = (v) =>
|
||||
Array.isArray(v)
|
||||
? v
|
||||
: [];
|
||||
break;
|
||||
}
|
||||
|
||||
const value = transformer(this.edit.value);
|
||||
|
||||
const path = [...(item.path??[])];
|
||||
|
||||
if (!edit) {
|
||||
if (item.type == "array") {
|
||||
path.push(null);
|
||||
} else {
|
||||
path.push(this.edit.name);
|
||||
}
|
||||
}
|
||||
this.itemAdd(path, value);
|
||||
};
|
||||
this.editor = true;
|
||||
|
||||
},
|
||||
|
||||
XXitemEditDialog (item) {
|
||||
|
||||
this.editorReset({
|
||||
name: item.name,
|
||||
type: item.type,
|
||||
value: item.value}, {nameEditable: false});
|
||||
|
||||
this.onEditorSave = (evt) => {
|
||||
this.editor = false;
|
||||
|
||||
let transformer;
|
||||
switch(this.edit.type) {
|
||||
case "symbol":
|
||||
transformer = Symbol;
|
||||
break;
|
||||
case "string":
|
||||
transformer = String;
|
||||
break;
|
||||
case "bigint":
|
||||
transformer = BigInt;
|
||||
break;
|
||||
case "number":
|
||||
transformer = Number;
|
||||
break;
|
||||
case "boolean":
|
||||
transformer = Boolean;
|
||||
break;
|
||||
case "undefined":
|
||||
transformer = () => { return undefined; };
|
||||
break;
|
||||
case "object":
|
||||
transformer = (v) =>
|
||||
typeof v == "object"
|
||||
? v
|
||||
: (typeof v == "string" && v.length)
|
||||
? JSON.parse(v)
|
||||
: {};
|
||||
break;
|
||||
case "null":
|
||||
transformer = () => null;
|
||||
break;
|
||||
case "array":
|
||||
// FIXME not great
|
||||
transformer = (v) =>
|
||||
Array.isArray(v)
|
||||
? v
|
||||
: [];
|
||||
break;
|
||||
}
|
||||
|
||||
const key = this.edit.name;
|
||||
const value = transformer(this.edit.value);
|
||||
this.itemAdd(item, key, value);
|
||||
}
|
||||
this.editor = true;
|
||||
|
||||
},
|
||||
|
||||
editorReset (values, props) {
|
||||
this.edit = {
|
||||
name: values?.name,
|
||||
type: values?.type,
|
||||
value: values?.value
|
||||
};
|
||||
|
||||
this.editorProperties = {
|
||||
nameShown: props?.nameShown ?? true,
|
||||
nameEditable: props?.nameEditable ?? true,
|
||||
nameRequired: props?.nameRequired ?? true,
|
||||
typeShown: props?.typeShown ?? true,
|
||||
typeEditable: props?.typeEditable ?? true,
|
||||
valueShown: props?.valueShown ?? true,
|
||||
serialisable: props?.serialisable ?? true
|
||||
};
|
||||
},
|
||||
|
||||
editorSave (evt) {
|
||||
this.onEditorSave?.(evt);
|
||||
},
|
||||
|
||||
editorClose () {
|
||||
this.editor = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -0,0 +1,125 @@
|
||||
<template>
|
||||
<v-dialog :value="open" @input="$emit('close')">
|
||||
<v-card>
|
||||
<v-card-title>
|
||||
</v-card-title>
|
||||
|
||||
<v-card-text>
|
||||
|
||||
<v-text-field v-if="nameShown"
|
||||
label="Name"
|
||||
:disabled="!nameEditable"
|
||||
v-model.sync="value.name"
|
||||
></v-text-field>
|
||||
|
||||
<v-select v-if="typeShown"
|
||||
label="Type"
|
||||
:items="types"
|
||||
:disabled="!typeEditable"
|
||||
v-model.sync="value.type"
|
||||
></v-select>
|
||||
|
||||
<template v-if="valueShown">
|
||||
<v-text-field v-if="value.type == 'number' || value.type == 'bigint'"
|
||||
label="Value"
|
||||
type="number"
|
||||
v-model.sync="value.value"
|
||||
></v-text-field>
|
||||
|
||||
<v-textarea v-else-if="value.type == 'string'"
|
||||
label="Value"
|
||||
v-model.sync="value.value"
|
||||
></v-textarea>
|
||||
|
||||
<v-radio-group v-else-if="value.type == 'boolean'"
|
||||
v-model.sync="value.value"
|
||||
>
|
||||
<v-radio
|
||||
label="true"
|
||||
:value="true"
|
||||
></v-radio>
|
||||
<v-radio
|
||||
label="false"
|
||||
:value="false"
|
||||
></v-radio>
|
||||
</v-radio-group>
|
||||
</template>
|
||||
|
||||
</v-card-text>
|
||||
|
||||
<v-card-actions>
|
||||
<v-btn
|
||||
:disabled="!canSave"
|
||||
@click="$emit('save')"
|
||||
>Save</v-btn>
|
||||
</v-card-actions>
|
||||
</v-card>
|
||||
</v-dialog>
|
||||
|
||||
|
||||
</template>
|
||||
|
||||
<script>
|
||||
|
||||
export default {
|
||||
name: "DougalJsonBuilderPropertyDialog",
|
||||
|
||||
props: {
|
||||
open: Boolean,
|
||||
value: Object,
|
||||
nameRequired: {type: Boolean, default: true},
|
||||
nameEditable: Boolean,
|
||||
nameShown: {type: Boolean, default: true},
|
||||
typeEditable: Boolean,
|
||||
typeShown: {type: Boolean, default: true},
|
||||
valueShown: {type: Boolean, default: true},
|
||||
serialisable: {type: Boolean, default: true},
|
||||
allowedTypes: Array
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
//key: null,
|
||||
//type: null,
|
||||
allTypes: [
|
||||
"symbol",
|
||||
"string",
|
||||
"bigint",
|
||||
"number",
|
||||
"boolean",
|
||||
"undefined",
|
||||
"object",
|
||||
"null",
|
||||
"array"
|
||||
],
|
||||
serialisableTypes: [
|
||||
"string",
|
||||
"number",
|
||||
"boolean",
|
||||
"object",
|
||||
"null",
|
||||
"array"
|
||||
],
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
|
||||
types () {
|
||||
return this.allowedTypes
|
||||
? this.allowedTypes
|
||||
: this.serialisable
|
||||
? this.serialisableTypes
|
||||
: this.allTypes;
|
||||
},
|
||||
|
||||
canSave () {
|
||||
return this.value.type && (this.value.name || this.nameRequired === false);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
</script>
|
||||
@@ -1,9 +1,6 @@
|
||||
<template>
|
||||
<div class="line-status" v-if="sequences.length == 0">
|
||||
<slot name="empty"></slot>
|
||||
</div>
|
||||
<div class="line-status" v-else-if="sequenceHref">
|
||||
<router-link v-for="sequence in sequences" :key="sequence.sequence"
|
||||
<div class="line-status" v-if="sequenceHref || plannedSequenceHref || pendingReshootHref">
|
||||
<router-link v-for="sequence in sequences" :key="sequence.sequence" v-if="sequenceHref"
|
||||
class="sequence"
|
||||
:class="sequence.status"
|
||||
:style="style(sequence)"
|
||||
@@ -11,15 +8,44 @@
|
||||
:to="sequenceHref(sequence)"
|
||||
>
|
||||
</router-link>
|
||||
<router-link v-for="sequence in plannedSequences" :key="sequence.sequence" v-if="plannedSequenceHref"
|
||||
class="sequence planned"
|
||||
:style="style(sequence)"
|
||||
:title="title(sequence, 'planned')"
|
||||
:to="plannedSequenceHref(sequence)"
|
||||
>
|
||||
</router-link>
|
||||
<router-link v-for="(line, key) in pendingReshoots" :key="key" v-if="pendingReshootHref"
|
||||
class="sequence reshoot"
|
||||
:style="style(line)"
|
||||
:title="title(line, 'reshoot')"
|
||||
:to="pendingReshootHref(line)"
|
||||
>
|
||||
</router-link>
|
||||
</div>
|
||||
<div class="line-status" v-else>
|
||||
<div v-for="sequence in sequences"
|
||||
<div class="line-status" v-else-if="sequences.length || plannedSequences.length || Object.keys(pendingReshoots).length">
|
||||
<div v-for="sequence in sequences" :key="sequence.sequence"
|
||||
class="sequence"
|
||||
:class="sequence.status"
|
||||
:style="style(sequence)"
|
||||
:title="title(sequence)"
|
||||
>
|
||||
</div>
|
||||
<div v-for="sequence in plannedSequences" :key="sequence.sequence"
|
||||
class="sequence planned"
|
||||
:style="style(sequence)"
|
||||
:title="title(sequence, 'planned')"
|
||||
>
|
||||
</div>
|
||||
<div v-for="(line, key) in pendingReshoots" :key="key"
|
||||
class="sequence reshoot"
|
||||
:style="style(line)"
|
||||
:title="title(line, 'reshoot')"
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
<div class="line-status" v-else>
|
||||
<slot name="empty"></slot>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
@@ -48,6 +74,8 @@
|
||||
background-color blue
|
||||
&.planned
|
||||
background-color magenta
|
||||
&.reshoot
|
||||
background repeating-linear-gradient(-45deg, rgba(255,0,255,0.302), brown 5px, rgba(247, 247, 247, 0.1) 5px, rgba(242, 241, 241, 0.08) 10px), repeating-linear-gradient(45deg, rgba(255,0,255,0.302), brown 5px, rgba(247, 247, 247, 0.1) 5px, rgba(242, 241, 241, 0.08) 10px)
|
||||
</style>
|
||||
|
||||
<script>
|
||||
@@ -58,7 +86,11 @@ export default {
|
||||
props: {
|
||||
preplot: Object,
|
||||
sequences: Array,
|
||||
"sequence-href": Function
|
||||
"sequence-href": Function,
|
||||
"planned-sequences": Array,
|
||||
"planned-sequence-href": Function,
|
||||
"pending-reshoots": Array,
|
||||
"pending-reshoot-href": Function
|
||||
},
|
||||
|
||||
methods: {
|
||||
@@ -68,13 +100,13 @@ export default {
|
||||
? s.fsp_final
|
||||
: s.status == "ntbp"
|
||||
? (s.fsp_final || s.fsp)
|
||||
: s.fsp; /* status == "raw" */
|
||||
: s.fsp; /* status == "raw" or planned sequence or pending reshoot */
|
||||
|
||||
const lsp = s.status == "final"
|
||||
? s.lsp_final
|
||||
: s.status == "ntbp"
|
||||
? (s.lsp_final || s.lsp)
|
||||
: s.lsp; /* status == "raw" */
|
||||
: s.lsp; /* status == "raw" or planned sequence or pending reshoot */
|
||||
|
||||
const pp0 = Math.min(this.preplot.fsp, this.preplot.lsp);
|
||||
const pp1 = Math.max(this.preplot.fsp, this.preplot.lsp);
|
||||
@@ -91,20 +123,24 @@ export default {
|
||||
return values;
|
||||
},
|
||||
|
||||
title (s) {
|
||||
const status = s.status == "final"
|
||||
? "Final"
|
||||
: s.status == "raw"
|
||||
? "Acquired"
|
||||
: s.status == "ntbp"
|
||||
? "NTBP"
|
||||
: s.status == "planned"
|
||||
? "Planned"
|
||||
: s.status;
|
||||
title (s, type) {
|
||||
if (s.status || type == "planned") {
|
||||
const status = s.status == "final"
|
||||
? "Final"
|
||||
: s.status == "raw"
|
||||
? "Acquired"
|
||||
: s.status == "ntbp"
|
||||
? "NTBP"
|
||||
: type == "planned"
|
||||
? "Planned"
|
||||
: s.status;
|
||||
|
||||
const remarks = "\n"+[s.remarks, s.remarks_final].join("\n").trim()
|
||||
const remarks = "\n"+[s.remarks, s.remarks_final].join("\n").trim()
|
||||
|
||||
return `Sequence ${s.sequence} – ${status} (${s.fsp_final || s.fsp}−${s.lsp_final || s.lsp})${remarks}`;
|
||||
return `Sequence ${s.sequence} – ${status} (${s.fsp_final || s.fsp}−${s.lsp_final || s.lsp})${remarks}`;
|
||||
} else if (type == "reshoot") {
|
||||
return `Pending reshoot (${s.fsp}‒${s.lsp})${s.remarks? "\n"+s.remarks : ""}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
<v-app-bar
|
||||
app
|
||||
clipped-left
|
||||
elevation="1"
|
||||
>
|
||||
<v-img src="/wgp-logo.png"
|
||||
contain
|
||||
@@ -31,16 +32,61 @@
|
||||
</template>
|
||||
|
||||
<v-list dense>
|
||||
<v-list-item :href="`/settings/equipment`">
|
||||
<v-list-item-title>Equipment list</v-list-item-title>
|
||||
<v-list-item href="/settings/equipment">
|
||||
<v-list-item-content>
|
||||
<v-list-item-title>Equipment list</v-list-item-title>
|
||||
<v-list-item-subtitle>Manage the list of equipment reported in logs</v-list-item-subtitle>
|
||||
</v-list-item-content>
|
||||
<v-list-item-action><v-icon small>mdi-view-list</v-icon></v-list-item-action>
|
||||
</v-list-item>
|
||||
<template v-if="false">
|
||||
<v-divider></v-divider>
|
||||
<v-list-item href="/settings">
|
||||
<v-list-item-content>
|
||||
<v-list-item-title>Local settings</v-list-item-title>
|
||||
<v-list-item-subtitle>Manage this vessel's configuration</v-list-item-subtitle>
|
||||
</v-list-item-content>
|
||||
<v-list-item-action><v-icon small>mdi-ferry</v-icon></v-list-item-action>
|
||||
</v-list-item>
|
||||
</template>
|
||||
</v-list>
|
||||
|
||||
</v-menu>
|
||||
|
||||
|
||||
<v-breadcrumbs :items="path"></v-breadcrumbs>
|
||||
<v-breadcrumbs :items="path">
|
||||
<template v-slot:item="{ item }">
|
||||
<v-breadcrumbs-item :href="item.href" :disabled="item.disabled" v-if="item.organisations">
|
||||
<v-tooltip bottom>
|
||||
<template v-slot:activator="{ on, attrs }">
|
||||
<span v-bind="attrs" v-on="on">{{ item.text }}</span>
|
||||
</template>
|
||||
<div class="text-overline">Project permissions</div>
|
||||
<v-simple-table dense>
|
||||
<template v-slot:default>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Organisation</th><th>Read</th><th>Write</th><th>Edit</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr v-for="(operations, name) in item.organisations">
|
||||
<td v-if="name == '*'"><v-chip small label color="primary">All</v-chip></td>
|
||||
<td v-else><v-chip small label outlined>{{ name }}</v-chip></td>
|
||||
<td>{{ operations.read ? "✔" : " " }}</td>
|
||||
<td>{{ operations.write ? "✔" : " " }}</td>
|
||||
<td>{{ operations.edit ? "✔" : " " }}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</template>
|
||||
</v-simple-table>
|
||||
</v-tooltip>
|
||||
</v-breadcrumbs-item>
|
||||
<v-breadcrumbs-item :href="item.href" :disabled="item.disabled" v-else>
|
||||
{{ item.text }}
|
||||
</v-breadcrumbs-item>
|
||||
</template>
|
||||
</v-breadcrumbs>
|
||||
|
||||
<template v-if="$route.name != 'Login'">
|
||||
<v-btn text link to="/login" v-if="!user && !loading">Log in</v-btn>
|
||||
@@ -49,10 +95,37 @@
|
||||
<v-menu
|
||||
offset-y
|
||||
>
|
||||
<template v-slot:activator="{on, attrs}">
|
||||
<v-avatar :color="user.colour || 'primary'" :title="`${user.name} (${user.role})`" v-bind="attrs" v-on="on">
|
||||
<span class="white--text">{{user.name.slice(0, 5)}}</span>
|
||||
</v-avatar>
|
||||
<template v-slot:activator="{ on: menu, attrs }">
|
||||
<v-tooltip bottom>
|
||||
<template v-slot:activator="{ on: tooltip }">
|
||||
<v-avatar :color="user.colour || 'primary'" v-bind="attrs" v-on="{...tooltip, ...menu}">
|
||||
<span class="white--text">{{user.name.slice(0, 5)}}</span>
|
||||
</v-avatar>
|
||||
</template>
|
||||
<div class="text-overline">{{ user.name }}</div>
|
||||
<v-card flat class="my-1" v-if="user.description">
|
||||
<v-card-text class="pb-1" v-html="$root.markdown(user.description)">
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
<v-simple-table dense>
|
||||
<template v-slot:default>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Organisation</th><th>Read</th><th>Write</th><th>Edit</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr v-for="org in user.organisations">
|
||||
<td v-if="org.name == '*'"><v-chip small label color="primary">All</v-chip></td>
|
||||
<td v-else><v-chip small label outlined>{{ org.name }}</v-chip></td>
|
||||
<td>{{ org.operations.read ? "✔" : " " }}</td>
|
||||
<td>{{ org.operations.write ? "✔" : " " }}</td>
|
||||
<td>{{ org.operations.edit ? "✔" : " " }}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</template>
|
||||
</v-simple-table>
|
||||
</v-tooltip>
|
||||
</template>
|
||||
|
||||
<v-list dense>
|
||||
@@ -65,23 +138,37 @@
|
||||
</v-list-item>
|
||||
<v-list-item link to="/logout" v-else>
|
||||
<v-list-item-icon><v-icon small>mdi-logout</v-icon></v-list-item-icon>
|
||||
<v-list-item-title>Log out</v-list-item-title>
|
||||
<v-list-item-content>
|
||||
<v-list-item-title>Log out</v-list-item-title>
|
||||
</v-list-item-content>
|
||||
</v-list-item>
|
||||
<v-divider></v-divider>
|
||||
<template v-if="canManageUsers">
|
||||
<v-list-item link to="/users">
|
||||
<v-list-item-icon><v-icon small>mdi-account-multiple</v-icon></v-list-item-icon>
|
||||
<v-list-item-content>
|
||||
<v-list-item-title>Manage users</v-list-item-title>
|
||||
<v-list-item-subtitle>Add, edit and remove users</v-list-item-subtitle>
|
||||
</v-list-item-content>
|
||||
</v-list-item>
|
||||
</template>
|
||||
<template v-else-if="user && !user.autologin">
|
||||
<v-list-item link :to="`/users/${user.id}`">
|
||||
<v-list-item-icon><v-icon small>mdi-account</v-icon></v-list-item-icon>
|
||||
<v-list-item-content>
|
||||
<v-list-item-title>User profile</v-list-item-title>
|
||||
<v-list-item-subtitle>Edit your user profile</v-list-item-subtitle>
|
||||
</v-list-item-content>
|
||||
</v-list-item>
|
||||
</template>
|
||||
</v-list>
|
||||
|
||||
</v-menu>
|
||||
|
||||
<!--
|
||||
<v-btn small text class="ml-2" title="Log out" link to="/?logout=1">
|
||||
<v-icon small>mdi-logout</v-icon>
|
||||
</v-btn>
|
||||
-->
|
||||
</template>
|
||||
</template>
|
||||
<template v-slot:extension v-if="$route.matched.find(i => i.name == 'Project')">
|
||||
<v-tabs :value="tab" show-arrows align-with-title>
|
||||
<v-tab v-for="tab, index in tabs" :key="index" link :to="tabLink(tab.href)" v-text="tab.text"></v-tab>
|
||||
</v-tabs>
|
||||
<template v-slot:extension v-if="appBarExtension">
|
||||
<div :is="appBarExtension"></div>
|
||||
</template>
|
||||
</v-app-bar>
|
||||
|
||||
@@ -95,24 +182,30 @@ export default {
|
||||
data() {
|
||||
return {
|
||||
drawer: false,
|
||||
tabs: [
|
||||
{ href: "summary", text: "Summary" },
|
||||
{ href: "lines", text: "Lines" },
|
||||
{ href: "plan", text: "Plan" },
|
||||
{ href: "sequences", text: "Sequences" },
|
||||
{ href: "calendar", text: "Calendar" },
|
||||
{ href: "log", text: "Log" },
|
||||
{ href: "qc", text: "QC" },
|
||||
{ href: "graphs", text: "Graphs" },
|
||||
{ href: "map", text: "Map" }
|
||||
],
|
||||
path: []
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
tab () {
|
||||
return this.tabs.findIndex(t => t.href == this.$route.path.split(/\/+/)[3]);
|
||||
|
||||
appBarExtension () {
|
||||
return this.$route.matched
|
||||
.filter(i => i.meta?.appBarExtension)
|
||||
.map(i => i.meta.appBarExtension)
|
||||
.pop()?.component;
|
||||
},
|
||||
|
||||
title () {
|
||||
return this.user.name + "\n" + [...this.user.organisations].map( ({name, operations}) => {
|
||||
if (name == "*") name = "All organisations";
|
||||
let str = name+": ";
|
||||
str += [ "read", "write", "edit" ].map( op => operations[op] ? op : null ).filter( op => op ).join(", ");
|
||||
return str;
|
||||
}).join("\n")
|
||||
},
|
||||
|
||||
canManageUsers () {
|
||||
return this.user.organisations.accessToOperation("edit").length;
|
||||
},
|
||||
|
||||
...mapGetters(['user', 'loading'])
|
||||
@@ -131,9 +224,6 @@ export default {
|
||||
},
|
||||
|
||||
methods: {
|
||||
tabLink (href) {
|
||||
return `/projects/${this.$route.params.project}/${href}`;
|
||||
},
|
||||
|
||||
breadcrumbs () {
|
||||
this.path = this.$route.matched
|
||||
|
||||
112
lib/www/client/source/src/components/organisations-item.vue
Normal file
112
lib/www/client/source/src/components/organisations-item.vue
Normal file
@@ -0,0 +1,112 @@
|
||||
<template>
|
||||
<v-row dense no-gutters>
|
||||
|
||||
<v-col>
|
||||
<slot name="prepend"></slot>
|
||||
</v-col>
|
||||
|
||||
<v-col cols="6">
|
||||
<v-text-field
|
||||
class="mr-5"
|
||||
dense
|
||||
label="Name"
|
||||
:value="name"
|
||||
:readonly="true"
|
||||
></v-text-field>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<v-checkbox
|
||||
class="mr-3"
|
||||
label="Read"
|
||||
v-model="operations.read"
|
||||
:readonly="readonly"
|
||||
></v-checkbox>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<v-checkbox
|
||||
class="mr-3"
|
||||
label="Write"
|
||||
v-model="operations.write"
|
||||
:readonly="readonly"
|
||||
></v-checkbox>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<v-checkbox
|
||||
class="mr-3"
|
||||
label="Edit"
|
||||
v-model="operations.edit"
|
||||
:readonly="readonly"
|
||||
></v-checkbox>
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<!-- Just to fill the twelve-column grid -->
|
||||
<!--
|
||||
NOTE: this column could also be used for
|
||||
a popdown menu with additional operations
|
||||
if needed.
|
||||
-->
|
||||
</v-col>
|
||||
|
||||
<v-col>
|
||||
<slot name="append"></slot>
|
||||
</v-col>
|
||||
|
||||
</v-row>
|
||||
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
</style>
|
||||
|
||||
<script>
|
||||
import { Organisations } from '@dougal/organisations';
|
||||
|
||||
export default {
|
||||
name: "DougalOrganisationsItem",
|
||||
|
||||
props: {
|
||||
name: String,
|
||||
value: Object,
|
||||
readonly: Boolean,
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
operations: {...this.value}
|
||||
}
|
||||
},
|
||||
|
||||
watch: {
|
||||
value: {
|
||||
handler (newValue) {
|
||||
this.operations = {...this.value};
|
||||
},
|
||||
deep: true,
|
||||
},
|
||||
|
||||
operations: {
|
||||
handler (newValue) {
|
||||
if (["read", "write", "edit"].some( k => newValue[k] != this.value[k] )) {
|
||||
// Only emit if a value has actually changed
|
||||
this.$emit("input", {...newValue});
|
||||
}
|
||||
},
|
||||
deep: true,
|
||||
},
|
||||
},
|
||||
|
||||
methods: {
|
||||
reset () {
|
||||
}
|
||||
},
|
||||
|
||||
mounted () {
|
||||
this.reset();
|
||||
}
|
||||
|
||||
}
|
||||
</script>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user