mirror of
https://gitlab.com/wgp/dougal/software.git
synced 2025-12-06 09:47:08 +00:00
It encodes / decodes sequence / preplot data using an efficient binary format for sending large amounts of data across the wire and for (relatively) memory efficient client-side use.
381 lines
15 KiB
JavaScript
381 lines
15 KiB
JavaScript
const typeToCode = {
|
|
Int8Array: 0,
|
|
Uint8Array: 1,
|
|
Int16Array: 2,
|
|
Uint16Array: 3,
|
|
Int32Array: 4,
|
|
Uint32Array: 5,
|
|
Float32Array: 7, // Float16 not natively supported in JS, use Float32
|
|
Float64Array: 8,
|
|
BigInt64Array: 9,
|
|
BigUint64Array: 10
|
|
};
|
|
|
|
const typeToBytes = {
|
|
Int8Array: 1,
|
|
Uint8Array: 1,
|
|
Int16Array: 2,
|
|
Uint16Array: 2,
|
|
Int32Array: 4,
|
|
Uint32Array: 4,
|
|
Float32Array: 4,
|
|
Float64Array: 8,
|
|
BigInt64Array: 8,
|
|
BigUint64Array: 8
|
|
};
|
|
|
|
function sequential(json, iGetter, jGetter, Δelems = [], elems = [], udv = 0) {
|
|
if (!Array.isArray(json) || !json.length) return new Uint8Array(0);
|
|
if (typeof iGetter !== 'function' || typeof jGetter !== 'function') throw new Error('i and j must be getter functions');
|
|
Δelems.forEach((elem, idx) => {
|
|
if (typeof elem.key !== 'function') throw new Error(`Δelems[${idx}].key must be a getter function`);
|
|
});
|
|
elems.forEach((elem, idx) => {
|
|
if (typeof elem.key !== 'function') throw new Error(`elems[${idx}].key must be a getter function`);
|
|
});
|
|
|
|
// Group records by i value
|
|
const groups = new Map();
|
|
for (const record of json) {
|
|
const iValue = iGetter(record);
|
|
if (iValue == null) throw new Error('Missing i value from getter');
|
|
if (!groups.has(iValue)) groups.set(iValue, []);
|
|
groups.get(iValue).push(record);
|
|
}
|
|
|
|
const maxBundleSize = 0xFFFFFF; // Max bundle length (24 bits)
|
|
const buffers = [];
|
|
|
|
// Process each group (i value)
|
|
for (const [iValue, records] of groups) {
|
|
// Sort records by j to ensure consistent order
|
|
records.sort((a, b) => jGetter(a) - jGetter(b));
|
|
const jValues = records.map(jGetter);
|
|
if (jValues.some(v => v == null)) throw new Error('Missing j value from getter');
|
|
|
|
// Split records into chunks based on Δj continuity
|
|
const chunks = [];
|
|
let currentChunk = [records[0]];
|
|
let currentJ0 = jValues[0];
|
|
let currentΔj = records.length > 1 ? jValues[1] - jValues[0] : 0;
|
|
|
|
for (let idx = 1; idx < records.length; idx++) {
|
|
const chunkIndex = chunks.reduce((sum, c) => sum + c.records.length, 0);
|
|
const expectedJ = currentJ0 + (idx - chunkIndex) * currentΔj;
|
|
if (jValues[idx] !== expectedJ || idx - chunkIndex >= 65536) {
|
|
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
|
|
currentChunk = [records[idx]];
|
|
currentJ0 = jValues[idx];
|
|
currentΔj = idx + 1 < records.length ? jValues[idx + 1] - jValues[idx] : 0;
|
|
} else {
|
|
currentChunk.push(records[idx]);
|
|
}
|
|
}
|
|
if (currentChunk.length > 0) {
|
|
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
|
|
}
|
|
|
|
// Calculate total size for all chunks in this group by simulating offsets
|
|
const chunkSizes = chunks.map(({ records: chunkRecords }) => {
|
|
if (chunkRecords.length > 65535) throw new Error(`Chunk size exceeds 65535 for i=${iValue}`);
|
|
let simulatedOffset = 0; // Relative to chunk start
|
|
simulatedOffset += 12; // Header
|
|
simulatedOffset += Δelems.length + elems.length; // Preface
|
|
simulatedOffset += Δelems.reduce((sum, e) => sum + typeToBytes[e.baseType.name], 0); // Initial values
|
|
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after initial
|
|
simulatedOffset += chunkRecords.length * (
|
|
Δelems.reduce((sum, e) => sum + typeToBytes[e.incrType.name], 0) +
|
|
elems.reduce((sum, e) => sum + typeToBytes[e.type.name], 0)
|
|
); // Record data
|
|
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after record
|
|
return simulatedOffset;
|
|
});
|
|
const totalChunkSize = chunkSizes.reduce((sum, size) => sum + size, 0);
|
|
|
|
// Start a new bundle if needed
|
|
const lastBundle = buffers[buffers.length - 1];
|
|
if (!lastBundle || lastBundle.offset + totalChunkSize > maxBundleSize) {
|
|
buffers.push({ offset: 4, buffer: null, view: null });
|
|
}
|
|
|
|
// Initialize DataView for current bundle
|
|
const currentBundle = buffers[buffers.length - 1];
|
|
if (!currentBundle.buffer) {
|
|
const requiredSize = totalChunkSize + 4;
|
|
currentBundle.buffer = new ArrayBuffer(requiredSize);
|
|
currentBundle.view = new DataView(currentBundle.buffer);
|
|
}
|
|
|
|
// Process each chunk
|
|
for (const { records: chunkRecords, j0, Δj } of chunks) {
|
|
const chunkSize = chunkSizes.shift();
|
|
|
|
// Ensure buffer is large enough
|
|
if (currentBundle.offset + chunkSize > currentBundle.buffer.byteLength) {
|
|
const newSize = currentBundle.offset + chunkSize;
|
|
const newBuffer = new ArrayBuffer(newSize);
|
|
new Uint8Array(newBuffer).set(new Uint8Array(currentBundle.buffer));
|
|
currentBundle.buffer = newBuffer;
|
|
currentBundle.view = new DataView(newBuffer);
|
|
}
|
|
|
|
// Write chunk header
|
|
let offset = currentBundle.offset;
|
|
currentBundle.view.setUint8(offset++, 0x11); // Chunk type
|
|
currentBundle.view.setUint8(offset++, udv); // udv
|
|
currentBundle.view.setUint16(offset, chunkRecords.length, true); offset += 2; // count
|
|
currentBundle.view.setUint16(offset, iValue, true); offset += 2; // i
|
|
currentBundle.view.setUint16(offset, j0, true); offset += 2; // j0
|
|
currentBundle.view.setInt16(offset, Δj, true); offset += 2; // Δj
|
|
currentBundle.view.setUint8(offset++, Δelems.length); // Δelem_count
|
|
currentBundle.view.setUint8(offset++, elems.length); // elem_count
|
|
|
|
// Write chunk preface (element types)
|
|
for (const elem of Δelems) {
|
|
const baseCode = typeToCode[elem.baseType.name];
|
|
const incrCode = typeToCode[elem.incrType.name];
|
|
currentBundle.view.setUint8(offset++, (incrCode << 4) | baseCode);
|
|
}
|
|
for (const elem of elems) {
|
|
currentBundle.view.setUint8(offset++, typeToCode[elem.type.name]);
|
|
}
|
|
|
|
// Write initial values for Δelems
|
|
for (const elem of Δelems) {
|
|
const value = elem.key(chunkRecords[0]);
|
|
if (value == null) throw new Error('Missing Δelem value from getter');
|
|
writeTypedValue(currentBundle.view, offset, value, elem.baseType);
|
|
offset += typeToBytes[elem.baseType.name];
|
|
}
|
|
// Pad to 4-byte boundary
|
|
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
|
|
|
|
// Write record data (non-interleaved)
|
|
for (const elem of Δelems) {
|
|
let prev = elem.key(chunkRecords[0]);
|
|
for (let idx = 0; idx < chunkRecords.length; idx++) {
|
|
const value = idx === 0 ? 0 : elem.key(chunkRecords[idx]) - prev;
|
|
writeTypedValue(currentBundle.view, offset, value, elem.incrType);
|
|
offset += typeToBytes[elem.incrType.name];
|
|
prev = elem.key(chunkRecords[idx]);
|
|
}
|
|
}
|
|
for (const elem of elems) {
|
|
for (const record of chunkRecords) {
|
|
const value = elem.key(record);
|
|
if (value == null) throw new Error('Missing elem value from getter');
|
|
writeTypedValue(currentBundle.view, offset, value, elem.type);
|
|
offset += typeToBytes[elem.type.name];
|
|
}
|
|
}
|
|
// Pad to 4-byte boundary
|
|
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
|
|
|
|
// Update bundle offset
|
|
currentBundle.offset = offset;
|
|
}
|
|
|
|
// Update bundle header
|
|
currentBundle.view.setUint32(0, 0x1C | ((currentBundle.offset - 4) << 8), true);
|
|
}
|
|
|
|
// Combine buffers into final Uint8Array
|
|
const finalLength = buffers.reduce((sum, b) => sum + b.offset, 0);
|
|
const result = new Uint8Array(finalLength);
|
|
let offset = 0;
|
|
for (const { buffer, offset: bundleOffset } of buffers) {
|
|
result.set(new Uint8Array(buffer, 0, bundleOffset), offset);
|
|
offset += bundleOffset;
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
|
|
function interleaved(json, iGetter, jGetter, Δelems = [], elems = [], udv = 0) {
|
|
if (!Array.isArray(json) || !json.length) return new Uint8Array(0);
|
|
if (typeof iGetter !== 'function' || typeof jGetter !== 'function') throw new Error('i and j must be getter functions');
|
|
Δelems.forEach((elem, idx) => {
|
|
if (typeof elem.key !== 'function') throw new Error(`Δelems[${idx}].key must be a getter function`);
|
|
});
|
|
elems.forEach((elem, idx) => {
|
|
if (typeof elem.key !== 'function') throw new Error(`elems[${idx}].key must be a getter function`);
|
|
});
|
|
|
|
// Group records by i value
|
|
const groups = new Map();
|
|
for (const record of json) {
|
|
const iValue = iGetter(record);
|
|
if (iValue == null) throw new Error('Missing i value from getter');
|
|
if (!groups.has(iValue)) groups.set(iValue, []);
|
|
groups.get(iValue).push(record);
|
|
}
|
|
|
|
const maxBundleSize = 0xFFFFFF; // Max bundle length (24 bits)
|
|
const buffers = [];
|
|
|
|
// Process each group (i value)
|
|
for (const [iValue, records] of groups) {
|
|
// Sort records by j to ensure consistent order
|
|
records.sort((a, b) => jGetter(a) - jGetter(b));
|
|
const jValues = records.map(jGetter);
|
|
if (jValues.some(v => v == null)) throw new Error('Missing j value from getter');
|
|
|
|
// Split records into chunks based on Δj continuity
|
|
const chunks = [];
|
|
let currentChunk = [records[0]];
|
|
let currentJ0 = jValues[0];
|
|
let currentΔj = records.length > 1 ? jValues[1] - jValues[0] : 0;
|
|
|
|
for (let idx = 1; idx < records.length; idx++) {
|
|
const chunkIndex = chunks.reduce((sum, c) => sum + c.records.length, 0);
|
|
const expectedJ = currentJ0 + (idx - chunkIndex) * currentΔj;
|
|
if (jValues[idx] !== expectedJ || idx - chunkIndex >= 65536) {
|
|
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
|
|
currentChunk = [records[idx]];
|
|
currentJ0 = jValues[idx];
|
|
currentΔj = idx + 1 < records.length ? jValues[idx + 1] - jValues[idx] : 0;
|
|
} else {
|
|
currentChunk.push(records[idx]);
|
|
}
|
|
}
|
|
if (currentChunk.length > 0) {
|
|
chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj });
|
|
}
|
|
|
|
// Calculate total size for all chunks in this group by simulating offsets
|
|
const chunkSizes = chunks.map(({ records: chunkRecords }) => {
|
|
if (chunkRecords.length > 65535) throw new Error(`Chunk size exceeds 65535 for i=${iValue}`);
|
|
let simulatedOffset = 0; // Relative to chunk start
|
|
simulatedOffset += 12; // Header
|
|
simulatedOffset += Δelems.length + elems.length; // Preface
|
|
simulatedOffset += Δelems.reduce((sum, e) => sum + typeToBytes[e.baseType.name], 0); // Initial values
|
|
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after initial
|
|
simulatedOffset += chunkRecords.length * (
|
|
Δelems.reduce((sum, e) => sum + typeToBytes[e.incrType.name], 0) +
|
|
elems.reduce((sum, e) => sum + typeToBytes[e.type.name], 0)
|
|
); // Interleaved record data
|
|
while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after record
|
|
return simulatedOffset;
|
|
});
|
|
const totalChunkSize = chunkSizes.reduce((sum, size) => sum + size, 0);
|
|
|
|
// Start a new bundle if needed
|
|
const lastBundle = buffers[buffers.length - 1];
|
|
if (!lastBundle || lastBundle.offset + totalChunkSize > maxBundleSize) {
|
|
buffers.push({ offset: 4, buffer: null, view: null });
|
|
}
|
|
|
|
// Initialize DataView for current bundle
|
|
const currentBundle = buffers[buffers.length - 1];
|
|
if (!currentBundle.buffer) {
|
|
const requiredSize = totalChunkSize + 4;
|
|
currentBundle.buffer = new ArrayBuffer(requiredSize);
|
|
currentBundle.view = new DataView(currentBundle.buffer);
|
|
}
|
|
|
|
// Process each chunk
|
|
for (const { records: chunkRecords, j0, Δj } of chunks) {
|
|
const chunkSize = chunkSizes.shift();
|
|
|
|
// Ensure buffer is large enough
|
|
if (currentBundle.offset + chunkSize > currentBundle.buffer.byteLength) {
|
|
const newSize = currentBundle.offset + chunkSize;
|
|
const newBuffer = new ArrayBuffer(newSize);
|
|
new Uint8Array(newBuffer).set(new Uint8Array(currentBundle.buffer));
|
|
currentBundle.buffer = newBuffer;
|
|
currentBundle.view = new DataView(newBuffer);
|
|
}
|
|
|
|
// Write chunk header
|
|
let offset = currentBundle.offset;
|
|
currentBundle.view.setUint8(offset++, 0x12); // Chunk type
|
|
currentBundle.view.setUint8(offset++, udv); // udv
|
|
currentBundle.view.setUint16(offset, chunkRecords.length, true); offset += 2; // count
|
|
currentBundle.view.setUint16(offset, iValue, true); offset += 2; // i
|
|
currentBundle.view.setUint16(offset, j0, true); offset += 2; // j0
|
|
currentBundle.view.setInt16(offset, Δj, true); offset += 2; // Δj
|
|
currentBundle.view.setUint8(offset++, Δelems.length); // Δelem_count
|
|
currentBundle.view.setUint8(offset++, elems.length); // elem_count
|
|
|
|
// Write chunk preface (element types)
|
|
for (const elem of Δelems) {
|
|
const baseCode = typeToCode[elem.baseType.name];
|
|
const incrCode = typeToCode[elem.incrType.name];
|
|
currentBundle.view.setUint8(offset++, (incrCode << 4) | baseCode);
|
|
}
|
|
for (const elem of elems) {
|
|
currentBundle.view.setUint8(offset++, typeToCode[elem.type.name]);
|
|
}
|
|
|
|
// Write initial values for Δelems
|
|
for (const elem of Δelems) {
|
|
const value = elem.key(chunkRecords[0]);
|
|
if (value == null) throw new Error('Missing Δelem value from getter');
|
|
writeTypedValue(currentBundle.view, offset, value, elem.baseType);
|
|
offset += typeToBytes[elem.baseType.name];
|
|
}
|
|
// Pad to 4-byte boundary
|
|
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
|
|
|
|
// Write interleaved record data
|
|
const prevValues = Δelems.map(elem => elem.key(chunkRecords[0]));
|
|
for (let idx = 0; idx < chunkRecords.length; idx++) {
|
|
// Write Δelems increments
|
|
for (let i = 0; i < Δelems.length; i++) {
|
|
const elem = Δelems[i];
|
|
const value = idx === 0 ? 0 : elem.key(chunkRecords[idx]) - prevValues[i];
|
|
writeTypedValue(currentBundle.view, offset, value, elem.incrType);
|
|
offset += typeToBytes[elem.incrType.name];
|
|
prevValues[i] = elem.key(chunkRecords[idx]);
|
|
}
|
|
// Write elems
|
|
for (const elem of elems) {
|
|
const value = elem.key(chunkRecords[idx]);
|
|
if (value == null) throw new Error('Missing elem value from getter');
|
|
writeTypedValue(currentBundle.view, offset, value, elem.type);
|
|
offset += typeToBytes[elem.type.name];
|
|
}
|
|
}
|
|
// Pad to 4-byte boundary
|
|
while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0);
|
|
|
|
// Update bundle offset
|
|
currentBundle.offset = offset;
|
|
}
|
|
|
|
// Update bundle header
|
|
currentBundle.view.setUint32(0, 0x1C | ((currentBundle.offset - 4) << 8), true);
|
|
}
|
|
|
|
// Combine buffers into final Uint8Array
|
|
const finalLength = buffers.reduce((sum, b) => sum + b.offset, 0);
|
|
const result = new Uint8Array(finalLength);
|
|
let offset = 0;
|
|
for (const { buffer, offset: bundleOffset } of buffers) {
|
|
result.set(new Uint8Array(buffer, 0, bundleOffset), offset);
|
|
offset += bundleOffset;
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
|
|
function writeTypedValue(view, offset, value, type) {
|
|
switch (type) {
|
|
case Int8Array: view.setInt8(offset, value); break;
|
|
case Uint8Array: view.setUint8(offset, value); break;
|
|
case Int16Array: view.setInt16(offset, value, true); break;
|
|
case Uint16Array: view.setUint16(offset, value, true); break;
|
|
case Int32Array: view.setInt32(offset, value, true); break;
|
|
case Uint32Array: view.setUint32(offset, value, true); break;
|
|
case Float32Array: view.setFloat32(offset, value, true); break;
|
|
case Float64Array: view.setFloat64(offset, value, true); break;
|
|
case BigInt64Array: view.setBigInt64(offset, BigInt(value), true); break;
|
|
case BigUint64Array: view.setBigUint64(offset, BigInt(value), true); break;
|
|
default: throw new Error(`Unsupported type: ${type.name}`);
|
|
}
|
|
}
|
|
|
|
module.exports = { sequential, interleaved };
|