From 1cb9d4b1e2a0fca6df7aadf1db0a4ed2e81693fa Mon Sep 17 00:00:00 2001 From: "D. Berge" Date: Wed, 30 Jul 2025 17:37:00 +0200 Subject: [PATCH] Add @dougal/binary module. It encodes / decodes sequence / preplot data using an efficient binary format for sending large amounts of data across the wire and for (relatively) memory efficient client-side use. --- lib/modules/@dougal/binary/classes.js | 719 ++++++++++++++++++++++++ lib/modules/@dougal/binary/decode.js | 327 +++++++++++ lib/modules/@dougal/binary/encode.js | 380 +++++++++++++ lib/modules/@dougal/binary/index.js | 139 +++++ lib/modules/@dougal/binary/package.json | 12 + 5 files changed, 1577 insertions(+) create mode 100644 lib/modules/@dougal/binary/classes.js create mode 100644 lib/modules/@dougal/binary/decode.js create mode 100644 lib/modules/@dougal/binary/encode.js create mode 100644 lib/modules/@dougal/binary/index.js create mode 100644 lib/modules/@dougal/binary/package.json diff --git a/lib/modules/@dougal/binary/classes.js b/lib/modules/@dougal/binary/classes.js new file mode 100644 index 0000000..f740587 --- /dev/null +++ b/lib/modules/@dougal/binary/classes.js @@ -0,0 +1,719 @@ +const codeToType = { + 0: Int8Array, + 1: Uint8Array, + 2: Int16Array, + 3: Uint16Array, + 4: Int32Array, + 5: Uint32Array, + 7: Float32Array, + 8: Float64Array, + 9: BigInt64Array, + 10: BigUint64Array +}; + +const typeToBytes = { + Int8Array: 1, + Uint8Array: 1, + Int16Array: 2, + Uint16Array: 2, + Int32Array: 4, + Uint32Array: 4, + Float32Array: 4, + Float64Array: 8, + BigInt64Array: 8, + BigUint64Array: 8 +}; + +function readTypedValue(view, offset, type) { + switch (type) { + case Int8Array: return view.getInt8(offset); + case Uint8Array: return view.getUint8(offset); + case Int16Array: return view.getInt16(offset, true); + case Uint16Array: return view.getUint16(offset, true); + case Int32Array: return view.getInt32(offset, true); + case Uint32Array: return view.getUint32(offset, true); + case Float32Array: return view.getFloat32(offset, true); + case Float64Array: return view.getFloat64(offset, true); + case BigInt64Array: return view.getBigInt64(offset, true); + case BigUint64Array: return view.getBigUint64(offset, true); + default: throw new Error(`Unsupported type: ${type.name}`); + } +} + +class DougalBinaryBundle extends ArrayBuffer { + + static HEADER_LENGTH = 4; // Length of a bundle header + + /** Clone an existing ByteArray into a DougalBinaryBundle + */ + static clone (buffer) { + const clone = new DougalBinaryBundle(buffer.byteLength); + const uint8Array = new Uint8Array(buffer); + const uint8ArrayClone = new Uint8Array(clone); + uint8ArrayClone.set(uint8Array); + return clone; + } + + constructor (length, options) { + super (length, options); + } + + /** Get the count of bundles in this ByteArray. + * + * Stops at the first non-bundle looking offset + */ + get bundleCount () { + let count = 0; + let currentBundleOffset = 0; + const view = new DataView(this); + + while (currentBundleOffset < this.byteLength) { + + const currentBundleHeader = view.getUint32(currentBundleOffset, true); + if ((currentBundleHeader & 0xff) !== 0x1c) { + // This is not a bundle + return count; + } + let currentBundleLength = currentBundleHeader >>> 8; + + currentBundleOffset += currentBundleLength + DougalBinaryBundle.HEADER_LENGTH; + count++; + } + + return count; + } + + + /** Get the number of chunks in the bundles of this ByteArray + */ + get chunkCount () { + let count = 0; + let bundleOffset = 0; + const view = new DataView(this); + + while (bundleOffset < this.byteLength) { + const header = view.getUint32(bundleOffset, true); + if ((header & 0xFF) !== 0x1C) break; + const length = header >>> 8; + if (bundleOffset + 4 + length > this.byteLength) break; + + let chunkOffset = bundleOffset + 4; // relative to buffer start + + while (chunkOffset < bundleOffset + 4 + length) { + const chunkType = view.getUint8(chunkOffset); + if (chunkType !== 0x11 && chunkType !== 0x12) break; + + const cCount = view.getUint16(chunkOffset + 2, true); + const ΔelemC = view.getUint8(chunkOffset + 10); + const elemC = view.getUint8(chunkOffset + 11); + + let localOffset = 12; // header size + + localOffset += ΔelemC + elemC; // preface + + // initial values + for (let k = 0; k < ΔelemC; k++) { + const typeByte = view.getUint8(chunkOffset + 12 + k); + const baseCode = typeByte & 0xF; + const baseType = codeToType[baseCode]; + if (!baseType) throw new Error('Invalid base type code'); + localOffset += typeToBytes[baseType.name]; + } + + // pad after initial + while (localOffset % 4 !== 0) localOffset++; + + if (chunkType === 0x11) { // Sequential + // record data: Δelems incrs + for (let k = 0; k < ΔelemC; k++) { + const typeByte = view.getUint8(chunkOffset + 12 + k); + const incrCode = typeByte >> 4; + const incrType = codeToType[incrCode]; + if (!incrType) throw new Error('Invalid incr type code'); + localOffset += cCount * typeToBytes[incrType.name]; + } + + // elems + for (let k = 0; k < elemC; k++) { + const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k); + const type = codeToType[typeCode]; + if (!type) throw new Error('Invalid elem type code'); + localOffset += cCount * typeToBytes[type.name]; + } + } else { // Interleaved + // Compute exact stride for interleaved record data + let ΔelemStride = 0; + for (let k = 0; k < ΔelemC; k++) { + const typeByte = view.getUint8(chunkOffset + 12 + k); + const incrCode = typeByte >> 4; + const incrType = codeToType[incrCode]; + if (!incrType) throw new Error('Invalid incr type code'); + ΔelemStride += typeToBytes[incrType.name]; + } + let elemStride = 0; + for (let k = 0; k < elemC; k++) { + const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k); + const type = codeToType[typeCode]; + if (!type) throw new Error('Invalid elem type code'); + elemStride += typeToBytes[type.name]; + } + const recordStride = ΔelemStride + elemStride; + localOffset += cCount * recordStride; + } + + // pad after record + while (localOffset % 4 !== 0) localOffset++; + + chunkOffset += localOffset; + count++; + } + + bundleOffset += 4 + length; + } + + return count; + } + + /** Return an array of DougalBinaryChunkSequential or DougalBinaryChunkInterleaved instances + */ + chunks () { + const chunks = []; + let bundleOffset = 0; + const view = new DataView(this); + + while (bundleOffset < this.byteLength) { + const header = view.getUint32(bundleOffset, true); + if ((header & 0xFF) !== 0x1C) break; + const length = header >>> 8; + if (bundleOffset + 4 + length > this.byteLength) break; + + let chunkOffset = bundleOffset + 4; + + while (chunkOffset < bundleOffset + 4 + length) { + const chunkType = view.getUint8(chunkOffset); + if (chunkType !== 0x11 && chunkType !== 0x12) break; + + const cCount = view.getUint16(chunkOffset + 2, true); + const ΔelemC = view.getUint8(chunkOffset + 10); + const elemC = view.getUint8(chunkOffset + 11); + + let localOffset = 12; + + localOffset += ΔelemC + elemC; + + // initial values + for (let k = 0; k < ΔelemC; k++) { + const typeByte = view.getUint8(chunkOffset + 12 + k); + const baseCode = typeByte & 0xF; + const baseType = codeToType[baseCode]; + if (!baseType) throw new Error('Invalid base type code'); + localOffset += typeToBytes[baseType.name]; + } + + // pad after initial + while (localOffset % 4 !== 0) localOffset++; + + if (chunkType === 0x11) { // Sequential + // record data: Δelems incrs + for (let k = 0; k < ΔelemC; k++) { + const typeByte = view.getUint8(chunkOffset + 12 + k); + const incrCode = typeByte >> 4; + const incrType = codeToType[incrCode]; + if (!incrType) throw new Error('Invalid incr type code'); + localOffset += cCount * typeToBytes[incrType.name]; + } + + // elems + for (let k = 0; k < elemC; k++) { + const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k); + const type = codeToType[typeCode]; + if (!type) throw new Error('Invalid elem type code'); + localOffset += cCount * typeToBytes[type.name]; + } + } else { // Interleaved + // Compute exact stride for interleaved record data + let ΔelemStride = 0; + for (let k = 0; k < ΔelemC; k++) { + const typeByte = view.getUint8(chunkOffset + 12 + k); + const incrCode = typeByte >> 4; + const incrType = codeToType[incrCode]; + if (!incrType) throw new Error('Invalid incr type code'); + ΔelemStride += typeToBytes[incrType.name]; + } + let elemStride = 0; + for (let k = 0; k < elemC; k++) { + const typeCode = view.getUint8(chunkOffset + 12 + ΔelemC + k); + const type = codeToType[typeCode]; + if (!type) throw new Error('Invalid elem type code'); + elemStride += typeToBytes[type.name]; + } + const recordStride = ΔelemStride + elemStride; + localOffset += cCount * recordStride; + } + + // pad after record + while (localOffset % 4 !== 0) localOffset++; + + switch (chunkType) { + case 0x11: + chunks.push(new DougalBinaryChunkSequential(this, chunkOffset, localOffset)); + break; + case 0x12: + chunks.push(new DougalBinaryChunkInterleaved(this, chunkOffset, localOffset)); + break; + default: + throw new Error('Invalid chunk type'); + } + + chunkOffset += localOffset; + } + + bundleOffset += 4 + length; + } + + return chunks; + } + + /** Return a ByteArray containing all data from all + * chunks including reconstructed i, j and incremental + * values as follows: + * + * // i values (constant) + * // j values (j0 + Δj*i) + * <Δelem_0_0> <Δelem_0_1> … <Δelem_0_x> // reconstructed Δelem0 (uses baseType) + * <Δelem_1_0> <Δelem_1_1> … <Δelem_1_x> // reconstructed Δelem1 + * … + * <Δelem_y_0> <Δelem_y_1> … <Δelem_y_x> // reconstructed Δelem1 + * // First elem + * // Second elem + * … + * // Last elem + * + * It does not matter whether the underlying chunks are + * sequential or interleaved. This function will transform + * as necessary. + * + */ + getDataSequentially () { + // TODO + } + + /** Return a ByteArray containing all data from all + * chunks including reconstructed i, j and incremental + * values, interleaved as follows: + * + * <Δelem_0_0> <Δelem_1_0> … <Δelem_y_0> + * <Δelem_0_1> <Δelem_1_1> … <Δelem_y_1> + * <Δelem_0_x> <Δelem_1_x> … <Δelem_y_x> + * + * It does not matter whether the underlying chunks are + * sequential or interleaved. This function will transform + * as necessary. + * + */ + getDataInterleaved () { + // TODO + } + +} + + +class DougalBinaryChunkSequential extends ArrayBuffer { + + constructor (buffer, offset, length) { + super(length); + new Uint8Array(this).set(new Uint8Array(buffer, offset, length)); + this._ΔelemCaches = new Array(this.ΔelemCount); + this._elemCaches = new Array(this.elemCount); + this._ΔelemBlockOffsets = null; + this._elemBlockOffsets = null; + this._recordOffset = null; + } + + _getRecordOffset() { + if (this._recordOffset !== null) return this._recordOffset; + const view = new DataView(this); + const ΔelemC = this.ΔelemCount; + const elemC = this.elemCount; + + let recordOffset = 12 + ΔelemC + elemC; + for (let k = 0; k < ΔelemC; k++) { + const tb = view.getUint8(12 + k); + const bc = tb & 0xF; + const bt = codeToType[bc]; + recordOffset += typeToBytes[bt.name]; + } + while (recordOffset % 4 !== 0) recordOffset++; + this._recordOffset = recordOffset; + return recordOffset; + } + + _initBlockOffsets() { + if (this._ΔelemBlockOffsets !== null) return; + const view = new DataView(this); + const count = this.jCount; + const ΔelemC = this.ΔelemCount; + const elemC = this.elemCount; + + const recordOffset = this._getRecordOffset(); + + this._ΔelemBlockOffsets = []; + let o = recordOffset; + for (let k = 0; k < ΔelemC; k++) { + this._ΔelemBlockOffsets[k] = o; + const tb = view.getUint8(12 + k); + const ic = tb >> 4; + const it = codeToType[ic]; + o += count * typeToBytes[it.name]; + } + + this._elemBlockOffsets = []; + for (let k = 0; k < elemC; k++) { + this._elemBlockOffsets[k] = o; + const tc = view.getUint8(12 + ΔelemC + k); + const t = codeToType[tc]; + o += count * typeToBytes[t.name]; + } + } + + /** Return the user-defined value + */ + get udv () { + return new DataView(this).getUint8(1); + } + + /** Return the number of j elements in this chunk + */ + get jCount () { + return new DataView(this).getUint16(2, true); + } + + /** Return the i value in this chunk + */ + get i () { + return new DataView(this).getUint16(4, true); + } + + /** Return the j0 value in this chunk + */ + get j0 () { + return new DataView(this).getUint16(6, true); + } + + /** Return the Δj value in this chunk + */ + get Δj () { + return new DataView(this).getInt16(8, true); + } + + /** Return the Δelem_count value in this chunk + */ + get ΔelemCount () { + return new DataView(this).getUint8(10); + } + + /** Return the elem_count value in this chunk + */ + get elemCount () { + return new DataView(this).getUint8(11); + } + + /** Return a TypedArray (e.g., Uint16Array, …) for the n-th Δelem in the chunk + */ + Δelem (n) { + if (this._ΔelemCaches[n]) return this._ΔelemCaches[n]; + + if (n < 0 || n >= this.ΔelemCount) throw new Error(`Invalid Δelem index: ${n}`); + const view = new DataView(this); + const count = this.jCount; + const ΔelemC = this.ΔelemCount; + + const typeByte = view.getUint8(12 + n); + const baseCode = typeByte & 0xF; + const incrCode = typeByte >> 4; + const baseType = codeToType[baseCode]; + const incrType = codeToType[incrCode]; + if (!baseType || !incrType) throw new Error('Invalid type codes for Δelem'); + + // Find offset for initial value of this Δelem + let initialOffset = 12 + ΔelemC + this.elemCount; + for (let k = 0; k < n; k++) { + const tb = view.getUint8(12 + k); + const bc = tb & 0xF; + const bt = codeToType[bc]; + initialOffset += typeToBytes[bt.name]; + } + + let current = readTypedValue(view, initialOffset, baseType); + + // Advance to start of record data (after all initials and pad) + const recordOffset = this._getRecordOffset(); + + // Find offset for deltas of this Δelem (skip previous Δelems' delta blocks) + this._initBlockOffsets(); + const deltaOffset = this._ΔelemBlockOffsets[n]; + + // Reconstruct the array + const arr = new baseType(count); + const isBigInt = baseType === BigInt64Array || baseType === BigUint64Array; + arr[0] = current; + for (let idx = 1; idx < count; idx++) { + let delta = readTypedValue(view, deltaOffset + idx * typeToBytes[incrType.name], incrType); + if (isBigInt) { + delta = BigInt(delta); + current += delta; + } else { + current += delta; + } + arr[idx] = current; + } + + this._ΔelemCaches[n] = arr; + return arr; + } + + /** Return a TypedArray (e.g., Uint16Array, …) for the n-th elem in the chunk + */ + elem (n) { + if (this._elemCaches[n]) return this._elemCaches[n]; + + if (n < 0 || n >= this.elemCount) throw new Error(`Invalid elem index: ${n}`); + const view = new DataView(this); + const count = this.jCount; + const ΔelemC = this.ΔelemCount; + const elemC = this.elemCount; + + const typeCode = view.getUint8(12 + ΔelemC + n); + const type = codeToType[typeCode]; + if (!type) throw new Error('Invalid type code for elem'); + + // Find offset for this elem's data block + this._initBlockOffsets(); + const elemOffset = this._elemBlockOffsets[n]; + + // Create and populate the array + const arr = new type(count); + const bytes = typeToBytes[type.name]; + for (let idx = 0; idx < count; idx++) { + arr[idx] = readTypedValue(view, elemOffset + idx * bytes, type); + } + + this._elemCaches[n] = arr; + return arr; + } + + getRecord (index) { + if (index < 0 || index >= this.jCount) throw new Error(`Invalid record index: ${index}`); + + const arr = [thid.udv, this.i, this.j0 + index * this.Δj]; + + for (let m = 0; m < this.ΔelemCount; m++) { + const values = this.Δelem(m); + arr.push(values[index]); + } + + for (let m = 0; m < this.elemCount; m++) { + const values = this.elem(m); + arr.push(values[index]); + } + + return arr; + } +} + + +class DougalBinaryChunkInterleaved extends ArrayBuffer { + constructor(buffer, offset, length) { + super(length); + new Uint8Array(this).set(new Uint8Array(buffer, offset, length)); + this._incrStrides = []; + this._elemStrides = []; + this._incrOffsets = []; + this._elemOffsets = []; + this._recordStride = 0; + this._recordOffset = null; + this._initStrides(); + this._ΔelemCaches = new Array(this.ΔelemCount); + this._elemCaches = new Array(this.elemCount); + } + + _getRecordOffset() { + if (this._recordOffset !== null) return this._recordOffset; + const view = new DataView(this); + const ΔelemC = this.ΔelemCount; + const elemC = this.elemCount; + + let recordOffset = 12 + ΔelemC + elemC; + for (let k = 0; k < ΔelemC; k++) { + const tb = view.getUint8(12 + k); + const bc = tb & 0xF; + const bt = codeToType[bc]; + recordOffset += typeToBytes[bt.name]; + } + while (recordOffset % 4 !== 0) recordOffset++; + this._recordOffset = recordOffset; + return recordOffset; + } + + _initStrides() { + const view = new DataView(this); + const ΔelemC = this.ΔelemCount; + const elemC = this.elemCount; + + // Compute incr strides and offsets + let incrOffset = 0; + for (let k = 0; k < ΔelemC; k++) { + const typeByte = view.getUint8(12 + k); + const incrCode = typeByte >> 4; + const incrType = codeToType[incrCode]; + if (!incrType) throw new Error('Invalid incr type code'); + this._incrOffsets.push(incrOffset); + const bytes = typeToBytes[incrType.name]; + this._incrStrides.push(bytes); + incrOffset += bytes; + this._recordStride += bytes; + } + + // Compute elem strides and offsets + let elemOffset = incrOffset; + for (let k = 0; k < elemC; k++) { + const typeCode = view.getUint8(12 + ΔelemC + k); + const type = codeToType[typeCode]; + if (!type) throw new Error('Invalid elem type code'); + this._elemOffsets.push(elemOffset); + const bytes = typeToBytes[type.name]; + this._elemStrides.push(bytes); + elemOffset += bytes; + this._recordStride += bytes; + } + } + + get udv() { + return new DataView(this).getUint8(1); + } + + get jCount() { + return new DataView(this).getUint16(2, true); + } + + get i() { + return new DataView(this).getUint16(4, true); + } + + get j0() { + return new DataView(this).getUint16(6, true); + } + + get Δj() { + return new DataView(this).getInt16(8, true); + } + + get ΔelemCount() { + return new DataView(this).getUint8(10); + } + + get elemCount() { + return new DataView(this).getUint8(11); + } + + Δelem(n) { + if (this._ΔelemCaches[n]) return this._ΔelemCaches[n]; + + if (n < 0 || n >= this.ΔelemCount) throw new Error(`Invalid Δelem index: ${n}`); + const view = new DataView(this); + const count = this.jCount; + const ΔelemC = this.ΔelemCount; + + const typeByte = view.getUint8(12 + n); + const baseCode = typeByte & 0xF; + const incrCode = typeByte >> 4; + const baseType = codeToType[baseCode]; + const incrType = codeToType[incrCode]; + if (!baseType || !incrType) throw new Error('Invalid type codes for Δelem'); + + // Find offset for initial value of this Δelem + let initialOffset = 12 + ΔelemC + this.elemCount; + for (let k = 0; k < n; k++) { + const tb = view.getUint8(12 + k); + const bc = tb & 0xF; + const bt = codeToType[bc]; + initialOffset += typeToBytes[bt.name]; + } + + let current = readTypedValue(view, initialOffset, baseType); + + // Find offset to start of record data + const recordOffset = this._getRecordOffset(); + + // Use precomputed offset for this Δelem + const deltaOffset = recordOffset + this._incrOffsets[n]; + + // Reconstruct the array + const arr = new baseType(count); + const isBigInt = baseType === BigInt64Array || baseType === BigUint64Array; + arr[0] = current; + for (let idx = 1; idx < count; idx++) { + let delta = readTypedValue(view, deltaOffset + idx * this._recordStride, incrType); + if (isBigInt) { + delta = BigInt(delta); + current += delta; + } else { + current += delta; + } + arr[idx] = current; + } + + this._ΔelemCaches[n] = arr; + return arr; + } + + elem(n) { + if (this._elemCaches[n]) return this._elemCaches[n]; + + if (n < 0 || n >= this.elemCount) throw new Error(`Invalid elem index: ${n}`); + const view = new DataView(this); + const count = this.jCount; + const ΔelemC = this.ΔelemCount; + + const typeCode = view.getUint8(12 + ΔelemC + n); + const type = codeToType[typeCode]; + if (!type) throw new Error('Invalid type code for elem'); + + // Find offset to start of record data + const recordOffset = this._getRecordOffset(); + + // Use precomputed offset for this elem (relative to start of record data) + const elemOffset = recordOffset + this._elemOffsets[n]; + + // Create and populate the array + const arr = new type(count); + const bytes = typeToBytes[type.name]; + for (let idx = 0; idx < count; idx++) { + arr[idx] = readTypedValue(view, elemOffset + idx * this._recordStride, type); + } + + this._elemCaches[n] = arr; + return arr; + } + + getRecord (index) { + if (index < 0 || index >= this.jCount) throw new Error(`Invalid record index: ${index}`); + + const arr = [this.udv, this.i, this.j0 + index * this.Δj]; + + for (let m = 0; m < this.ΔelemCount; m++) { + const values = this.Δelem(m); + arr.push(values[index]); + } + + for (let m = 0; m < this.elemCount; m++) { + const values = this.elem(m); + arr.push(values[index]); + } + + return arr; + } +} + + +module.exports = { DougalBinaryBundle, DougalBinaryChunkSequential, DougalBinaryChunkInterleaved } diff --git a/lib/modules/@dougal/binary/decode.js b/lib/modules/@dougal/binary/decode.js new file mode 100644 index 0000000..98340fd --- /dev/null +++ b/lib/modules/@dougal/binary/decode.js @@ -0,0 +1,327 @@ +const codeToType = { + 0: Int8Array, + 1: Uint8Array, + 2: Int16Array, + 3: Uint16Array, + 4: Int32Array, + 5: Uint32Array, + 7: Float32Array, + 8: Float64Array, + 9: BigInt64Array, + 10: BigUint64Array +}; + +const typeToBytes = { + Int8Array: 1, + Uint8Array: 1, + Int16Array: 2, + Uint16Array: 2, + Int32Array: 4, + Uint32Array: 4, + Float32Array: 4, + Float64Array: 8, + BigInt64Array: 8, + BigUint64Array: 8 +}; + +function sequential(binary) { + if (!(binary instanceof Uint8Array) || binary.length < 4) { + throw new Error('Invalid binary input'); + } + + const view = new DataView(binary.buffer, binary.byteOffset, binary.byteLength); + let offset = 0; + + // Initialize result (assuming single i value for simplicity; extend for multiple i values if needed) + const result = { i: null, j: [], Δelems: [], elems: [] }; + + // Process bundles + while (offset < binary.length) { + // Read bundle header + if (offset + 4 > binary.length) throw new Error('Incomplete bundle header'); + + const bundleHeader = view.getUint32(offset, true); + if ((bundleHeader & 0xFF) !== 0x1C) throw new Error('Invalid bundle marker'); + const bundleLength = bundleHeader >> 8; + offset += 4; + const bundleEnd = offset + bundleLength; + + if (bundleEnd > binary.length) throw new Error('Bundle length exceeds input size'); + + // Process chunks in bundle + while (offset < bundleEnd) { + // Read chunk header + if (offset + 12 > bundleEnd) throw new Error('Incomplete chunk header'); + const chunkType = view.getUint8(offset); + if (chunkType !== 0x11) throw new Error(`Unsupported chunk type: ${chunkType}`); + offset += 1; // Skip udv + offset += 1; + const count = view.getUint16(offset, true); offset += 2; + if (count > 65535) throw new Error('Chunk count exceeds 65535'); + const iValue = view.getUint16(offset, true); offset += 2; + const j0 = view.getUint16(offset, true); offset += 2; + const Δj = view.getInt16(offset, true); offset += 2; + const ΔelemCount = view.getUint8(offset++); // Δelem_count + const elemCount = view.getUint8(offset++); // elem_count + + // Set i value (assuming all chunks share the same i) + if (result.i === null) result.i = iValue; + else if (result.i !== iValue) throw new Error('Multiple i values not supported'); + + // Read preface (element types) + const ΔelemTypes = []; + for (let i = 0; i < ΔelemCount; i++) { + if (offset >= bundleEnd) throw new Error('Incomplete Δelem types'); + const typeByte = view.getUint8(offset++); + const baseCode = typeByte & 0x0F; + const incrCode = typeByte >> 4; + if (!codeToType[baseCode] || !codeToType[incrCode]) { + throw new Error(`Invalid type code in Δelem: ${typeByte}`); + } + ΔelemTypes.push({ baseType: codeToType[baseCode], incrType: codeToType[incrCode] }); + } + const elemTypes = []; + for (let i = 0; i < elemCount; i++) { + if (offset >= bundleEnd) throw new Error('Incomplete elem types'); + const typeCode = view.getUint8(offset++); + if (!codeToType[typeCode]) throw new Error(`Invalid type code in elem: ${typeCode}`); + elemTypes.push(codeToType[typeCode]); + } + + // Initialize Δelems and elems arrays if first chunk + if (!result.Δelems.length && ΔelemCount > 0) { + result.Δelems = Array(ΔelemCount).fill().map(() => []); + } + if (!result.elems.length && elemCount > 0) { + result.elems = Array(elemCount).fill().map(() => []); + } + + // Read initial values for Δelems + const initialValues = []; + for (const { baseType } of ΔelemTypes) { + if (offset + typeToBytes[baseType.name] > bundleEnd) { + throw new Error('Incomplete initial values'); + } + initialValues.push(readTypedValue(view, offset, baseType)); + offset += typeToBytes[baseType.name]; + } + // Skip padding + while (offset % 4 !== 0) { + if (offset >= bundleEnd) throw new Error('Incomplete padding after initial values'); + offset++; + } + + // Reconstruct j values + for (let idx = 0; idx < count; idx++) { + result.j.push(j0 + idx * Δj); + } + + // Read record data (non-interleaved) + for (let i = 0; i < ΔelemCount; i++) { + let current = initialValues[i]; + const values = result.Δelems[i]; + const incrType = ΔelemTypes[i].incrType; + const isBigInt = typeof current === 'bigint'; + for (let idx = 0; idx < count; idx++) { + if (offset + typeToBytes[incrType.name] > bundleEnd) { + throw new Error('Incomplete Δelem data'); + } + let delta = readTypedValue(view, offset, incrType); + if (idx === 0) { + values.push(isBigInt ? Number(current) : current); + } else { + if (isBigInt) { + delta = BigInt(delta); + current += delta; + values.push(Number(current)); + } else { + current += delta; + values.push(current); + } + } + offset += typeToBytes[incrType.name]; + } + } + for (let i = 0; i < elemCount; i++) { + const values = result.elems[i]; + const type = elemTypes[i]; + const isBigInt = type === BigInt64Array || type === BigUint64Array; + for (let idx = 0; idx < count; idx++) { + if (offset + typeToBytes[type.name] > bundleEnd) { + throw new Error('Incomplete elem data'); + } + let value = readTypedValue(view, offset, type); + values.push(isBigInt ? Number(value) : value); + offset += typeToBytes[type.name]; + } + } + // Skip padding + while (offset % 4 !== 0) { + if (offset >= bundleEnd) throw new Error('Incomplete padding after record data'); + offset++; + } + } + } + + return result; +} + + +function interleaved(binary) { + if (!(binary instanceof Uint8Array) || binary.length < 4) { + throw new Error('Invalid binary input'); + } + + const view = new DataView(binary.buffer, binary.byteOffset, binary.byteLength); + let offset = 0; + + // Initialize result (assuming single i value for simplicity; extend for multiple i values if needed) + const result = { i: null, j: [], Δelems: [], elems: [] }; + + // Process bundles + while (offset < binary.length) { + // Read bundle header + if (offset + 4 > binary.length) throw new Error('Incomplete bundle header'); + + const bundleHeader = view.getUint32(offset, true); + if ((bundleHeader & 0xFF) !== 0x1C) throw new Error('Invalid bundle marker'); + const bundleLength = bundleHeader >> 8; + offset += 4; + const bundleEnd = offset + bundleLength; + + if (bundleEnd > binary.length) throw new Error('Bundle length exceeds input size'); + + // Process chunks in bundle + while (offset < bundleEnd) { + // Read chunk header + if (offset + 12 > bundleEnd) throw new Error('Incomplete chunk header'); + const chunkType = view.getUint8(offset); + if (chunkType !== 0x12) throw new Error(`Unsupported chunk type: ${chunkType}`); + offset += 1; // Skip udv + offset += 1; + const count = view.getUint16(offset, true); offset += 2; + if (count > 65535) throw new Error('Chunk count exceeds 65535'); + const iValue = view.getUint16(offset, true); offset += 2; + const j0 = view.getUint16(offset, true); offset += 2; + const Δj = view.getInt16(offset, true); offset += 2; + const ΔelemCount = view.getUint8(offset++); // Δelem_count + const elemCount = view.getUint8(offset++); // elem_count + + // Set i value (assuming all chunks share the same i) + if (result.i === null) result.i = iValue; + else if (result.i !== iValue) throw new Error('Multiple i values not supported'); + + // Read preface (element types) + const ΔelemTypes = []; + for (let i = 0; i < ΔelemCount; i++) { + if (offset >= bundleEnd) throw new Error('Incomplete Δelem types'); + const typeByte = view.getUint8(offset++); + const baseCode = typeByte & 0x0F; + const incrCode = typeByte >> 4; + if (!codeToType[baseCode] || !codeToType[incrCode]) { + throw new Error(`Invalid type code in Δelem: ${typeByte}`); + } + ΔelemTypes.push({ baseType: codeToType[baseCode], incrType: codeToType[incrCode] }); + } + const elemTypes = []; + for (let i = 0; i < elemCount; i++) { + if (offset >= bundleEnd) throw new Error('Incomplete elem types'); + const typeCode = view.getUint8(offset++); + if (!codeToType[typeCode]) throw new Error(`Invalid type code in elem: ${typeCode}`); + elemTypes.push(codeToType[typeCode]); + } + + // Initialize Δelems and elems arrays if first chunk + if (!result.Δelems.length && ΔelemCount > 0) { + result.Δelems = Array(ΔelemCount).fill().map(() => []); + } + if (!result.elems.length && elemCount > 0) { + result.elems = Array(elemCount).fill().map(() => []); + } + + // Read initial values for Δelems + const initialValues = []; + for (const { baseType } of ΔelemTypes) { + if (offset + typeToBytes[baseType.name] > bundleEnd) { + throw new Error('Incomplete initial values'); + } + initialValues.push(readTypedValue(view, offset, baseType)); + offset += typeToBytes[baseType.name]; + } + // Skip padding + while (offset % 4 !== 0) { + if (offset >= bundleEnd) throw new Error('Incomplete padding after initial values'); + offset++; + } + + // Reconstruct j values + for (let idx = 0; idx < count; idx++) { + result.j.push(j0 + idx * Δj); + } + + // Read interleaved record data + for (let idx = 0; idx < count; idx++) { + // Read Δelems + for (let i = 0; i < ΔelemCount; i++) { + const values = result.Δelems[i]; + const incrType = ΔelemTypes[i].incrType; + const isBigInt = typeof initialValues[i] === 'bigint'; + if (offset + typeToBytes[incrType.name] > bundleEnd) { + throw new Error('Incomplete Δelem data'); + } + let delta = readTypedValue(view, offset, incrType); + offset += typeToBytes[incrType.name]; + if (idx === 0) { + values.push(isBigInt ? Number(initialValues[i]) : initialValues[i]); + } else { + if (isBigInt) { + delta = BigInt(delta); + initialValues[i] += delta; + values.push(Number(initialValues[i])); + } else { + initialValues[i] += delta; + values.push(initialValues[i]); + } + } + } + // Read elems + for (let i = 0; i < elemCount; i++) { + const values = result.elems[i]; + const type = elemTypes[i]; + const isBigInt = type === BigInt64Array || type === BigUint64Array; + if (offset + typeToBytes[type.name] > bundleEnd) { + throw new Error('Incomplete elem data'); + } + let value = readTypedValue(view, offset, type); + values.push(isBigInt ? Number(value) : value); + offset += typeToBytes[type.name]; + } + } + // Skip padding + while (offset % 4 !== 0) { + if (offset >= bundleEnd) throw new Error('Incomplete padding after record data'); + offset++; + } + } + } + + return result; +} + +function readTypedValue(view, offset, type) { + switch (type) { + case Int8Array: return view.getInt8(offset); + case Uint8Array: return view.getUint8(offset); + case Int16Array: return view.getInt16(offset, true); + case Uint16Array: return view.getUint16(offset, true); + case Int32Array: return view.getInt32(offset, true); + case Uint32Array: return view.getUint32(offset, true); + case Float32Array: return view.getFloat32(offset, true); + case Float64Array: return view.getFloat64(offset, true); + case BigInt64Array: return view.getBigInt64(offset, true); + case BigUint64Array: return view.getBigUint64(offset, true); + default: throw new Error(`Unsupported type: ${type.name}`); + } +} + +module.exports = { sequential, interleaved }; diff --git a/lib/modules/@dougal/binary/encode.js b/lib/modules/@dougal/binary/encode.js new file mode 100644 index 0000000..b483e26 --- /dev/null +++ b/lib/modules/@dougal/binary/encode.js @@ -0,0 +1,380 @@ +const typeToCode = { + Int8Array: 0, + Uint8Array: 1, + Int16Array: 2, + Uint16Array: 3, + Int32Array: 4, + Uint32Array: 5, + Float32Array: 7, // Float16 not natively supported in JS, use Float32 + Float64Array: 8, + BigInt64Array: 9, + BigUint64Array: 10 +}; + +const typeToBytes = { + Int8Array: 1, + Uint8Array: 1, + Int16Array: 2, + Uint16Array: 2, + Int32Array: 4, + Uint32Array: 4, + Float32Array: 4, + Float64Array: 8, + BigInt64Array: 8, + BigUint64Array: 8 +}; + +function sequential(json, iGetter, jGetter, Δelems = [], elems = [], udv = 0) { + if (!Array.isArray(json) || !json.length) return new Uint8Array(0); + if (typeof iGetter !== 'function' || typeof jGetter !== 'function') throw new Error('i and j must be getter functions'); + Δelems.forEach((elem, idx) => { + if (typeof elem.key !== 'function') throw new Error(`Δelems[${idx}].key must be a getter function`); + }); + elems.forEach((elem, idx) => { + if (typeof elem.key !== 'function') throw new Error(`elems[${idx}].key must be a getter function`); + }); + + // Group records by i value + const groups = new Map(); + for (const record of json) { + const iValue = iGetter(record); + if (iValue == null) throw new Error('Missing i value from getter'); + if (!groups.has(iValue)) groups.set(iValue, []); + groups.get(iValue).push(record); + } + + const maxBundleSize = 0xFFFFFF; // Max bundle length (24 bits) + const buffers = []; + + // Process each group (i value) + for (const [iValue, records] of groups) { + // Sort records by j to ensure consistent order + records.sort((a, b) => jGetter(a) - jGetter(b)); + const jValues = records.map(jGetter); + if (jValues.some(v => v == null)) throw new Error('Missing j value from getter'); + + // Split records into chunks based on Δj continuity + const chunks = []; + let currentChunk = [records[0]]; + let currentJ0 = jValues[0]; + let currentΔj = records.length > 1 ? jValues[1] - jValues[0] : 0; + + for (let idx = 1; idx < records.length; idx++) { + const chunkIndex = chunks.reduce((sum, c) => sum + c.records.length, 0); + const expectedJ = currentJ0 + (idx - chunkIndex) * currentΔj; + if (jValues[idx] !== expectedJ || idx - chunkIndex >= 65536) { + chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj }); + currentChunk = [records[idx]]; + currentJ0 = jValues[idx]; + currentΔj = idx + 1 < records.length ? jValues[idx + 1] - jValues[idx] : 0; + } else { + currentChunk.push(records[idx]); + } + } + if (currentChunk.length > 0) { + chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj }); + } + + // Calculate total size for all chunks in this group by simulating offsets + const chunkSizes = chunks.map(({ records: chunkRecords }) => { + if (chunkRecords.length > 65535) throw new Error(`Chunk size exceeds 65535 for i=${iValue}`); + let simulatedOffset = 0; // Relative to chunk start + simulatedOffset += 12; // Header + simulatedOffset += Δelems.length + elems.length; // Preface + simulatedOffset += Δelems.reduce((sum, e) => sum + typeToBytes[e.baseType.name], 0); // Initial values + while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after initial + simulatedOffset += chunkRecords.length * ( + Δelems.reduce((sum, e) => sum + typeToBytes[e.incrType.name], 0) + + elems.reduce((sum, e) => sum + typeToBytes[e.type.name], 0) + ); // Record data + while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after record + return simulatedOffset; + }); + const totalChunkSize = chunkSizes.reduce((sum, size) => sum + size, 0); + + // Start a new bundle if needed + const lastBundle = buffers[buffers.length - 1]; + if (!lastBundle || lastBundle.offset + totalChunkSize > maxBundleSize) { + buffers.push({ offset: 4, buffer: null, view: null }); + } + + // Initialize DataView for current bundle + const currentBundle = buffers[buffers.length - 1]; + if (!currentBundle.buffer) { + const requiredSize = totalChunkSize + 4; + currentBundle.buffer = new ArrayBuffer(requiredSize); + currentBundle.view = new DataView(currentBundle.buffer); + } + + // Process each chunk + for (const { records: chunkRecords, j0, Δj } of chunks) { + const chunkSize = chunkSizes.shift(); + + // Ensure buffer is large enough + if (currentBundle.offset + chunkSize > currentBundle.buffer.byteLength) { + const newSize = currentBundle.offset + chunkSize; + const newBuffer = new ArrayBuffer(newSize); + new Uint8Array(newBuffer).set(new Uint8Array(currentBundle.buffer)); + currentBundle.buffer = newBuffer; + currentBundle.view = new DataView(newBuffer); + } + + // Write chunk header + let offset = currentBundle.offset; + currentBundle.view.setUint8(offset++, 0x11); // Chunk type + currentBundle.view.setUint8(offset++, udv); // udv + currentBundle.view.setUint16(offset, chunkRecords.length, true); offset += 2; // count + currentBundle.view.setUint16(offset, iValue, true); offset += 2; // i + currentBundle.view.setUint16(offset, j0, true); offset += 2; // j0 + currentBundle.view.setInt16(offset, Δj, true); offset += 2; // Δj + currentBundle.view.setUint8(offset++, Δelems.length); // Δelem_count + currentBundle.view.setUint8(offset++, elems.length); // elem_count + + // Write chunk preface (element types) + for (const elem of Δelems) { + const baseCode = typeToCode[elem.baseType.name]; + const incrCode = typeToCode[elem.incrType.name]; + currentBundle.view.setUint8(offset++, (incrCode << 4) | baseCode); + } + for (const elem of elems) { + currentBundle.view.setUint8(offset++, typeToCode[elem.type.name]); + } + + // Write initial values for Δelems + for (const elem of Δelems) { + const value = elem.key(chunkRecords[0]); + if (value == null) throw new Error('Missing Δelem value from getter'); + writeTypedValue(currentBundle.view, offset, value, elem.baseType); + offset += typeToBytes[elem.baseType.name]; + } + // Pad to 4-byte boundary + while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0); + + // Write record data (non-interleaved) + for (const elem of Δelems) { + let prev = elem.key(chunkRecords[0]); + for (let idx = 0; idx < chunkRecords.length; idx++) { + const value = idx === 0 ? 0 : elem.key(chunkRecords[idx]) - prev; + writeTypedValue(currentBundle.view, offset, value, elem.incrType); + offset += typeToBytes[elem.incrType.name]; + prev = elem.key(chunkRecords[idx]); + } + } + for (const elem of elems) { + for (const record of chunkRecords) { + const value = elem.key(record); + if (value == null) throw new Error('Missing elem value from getter'); + writeTypedValue(currentBundle.view, offset, value, elem.type); + offset += typeToBytes[elem.type.name]; + } + } + // Pad to 4-byte boundary + while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0); + + // Update bundle offset + currentBundle.offset = offset; + } + + // Update bundle header + currentBundle.view.setUint32(0, 0x1C | ((currentBundle.offset - 4) << 8), true); + } + + // Combine buffers into final Uint8Array + const finalLength = buffers.reduce((sum, b) => sum + b.offset, 0); + const result = new Uint8Array(finalLength); + let offset = 0; + for (const { buffer, offset: bundleOffset } of buffers) { + result.set(new Uint8Array(buffer, 0, bundleOffset), offset); + offset += bundleOffset; + } + + return result; +} + + +function interleaved(json, iGetter, jGetter, Δelems = [], elems = [], udv = 0) { + if (!Array.isArray(json) || !json.length) return new Uint8Array(0); + if (typeof iGetter !== 'function' || typeof jGetter !== 'function') throw new Error('i and j must be getter functions'); + Δelems.forEach((elem, idx) => { + if (typeof elem.key !== 'function') throw new Error(`Δelems[${idx}].key must be a getter function`); + }); + elems.forEach((elem, idx) => { + if (typeof elem.key !== 'function') throw new Error(`elems[${idx}].key must be a getter function`); + }); + + // Group records by i value + const groups = new Map(); + for (const record of json) { + const iValue = iGetter(record); + if (iValue == null) throw new Error('Missing i value from getter'); + if (!groups.has(iValue)) groups.set(iValue, []); + groups.get(iValue).push(record); + } + + const maxBundleSize = 0xFFFFFF; // Max bundle length (24 bits) + const buffers = []; + + // Process each group (i value) + for (const [iValue, records] of groups) { + // Sort records by j to ensure consistent order + records.sort((a, b) => jGetter(a) - jGetter(b)); + const jValues = records.map(jGetter); + if (jValues.some(v => v == null)) throw new Error('Missing j value from getter'); + + // Split records into chunks based on Δj continuity + const chunks = []; + let currentChunk = [records[0]]; + let currentJ0 = jValues[0]; + let currentΔj = records.length > 1 ? jValues[1] - jValues[0] : 0; + + for (let idx = 1; idx < records.length; idx++) { + const chunkIndex = chunks.reduce((sum, c) => sum + c.records.length, 0); + const expectedJ = currentJ0 + (idx - chunkIndex) * currentΔj; + if (jValues[idx] !== expectedJ || idx - chunkIndex >= 65536) { + chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj }); + currentChunk = [records[idx]]; + currentJ0 = jValues[idx]; + currentΔj = idx + 1 < records.length ? jValues[idx + 1] - jValues[idx] : 0; + } else { + currentChunk.push(records[idx]); + } + } + if (currentChunk.length > 0) { + chunks.push({ records: currentChunk, j0: currentJ0, Δj: currentΔj }); + } + + // Calculate total size for all chunks in this group by simulating offsets + const chunkSizes = chunks.map(({ records: chunkRecords }) => { + if (chunkRecords.length > 65535) throw new Error(`Chunk size exceeds 65535 for i=${iValue}`); + let simulatedOffset = 0; // Relative to chunk start + simulatedOffset += 12; // Header + simulatedOffset += Δelems.length + elems.length; // Preface + simulatedOffset += Δelems.reduce((sum, e) => sum + typeToBytes[e.baseType.name], 0); // Initial values + while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after initial + simulatedOffset += chunkRecords.length * ( + Δelems.reduce((sum, e) => sum + typeToBytes[e.incrType.name], 0) + + elems.reduce((sum, e) => sum + typeToBytes[e.type.name], 0) + ); // Interleaved record data + while (simulatedOffset % 4 !== 0) simulatedOffset++; // Pad after record + return simulatedOffset; + }); + const totalChunkSize = chunkSizes.reduce((sum, size) => sum + size, 0); + + // Start a new bundle if needed + const lastBundle = buffers[buffers.length - 1]; + if (!lastBundle || lastBundle.offset + totalChunkSize > maxBundleSize) { + buffers.push({ offset: 4, buffer: null, view: null }); + } + + // Initialize DataView for current bundle + const currentBundle = buffers[buffers.length - 1]; + if (!currentBundle.buffer) { + const requiredSize = totalChunkSize + 4; + currentBundle.buffer = new ArrayBuffer(requiredSize); + currentBundle.view = new DataView(currentBundle.buffer); + } + + // Process each chunk + for (const { records: chunkRecords, j0, Δj } of chunks) { + const chunkSize = chunkSizes.shift(); + + // Ensure buffer is large enough + if (currentBundle.offset + chunkSize > currentBundle.buffer.byteLength) { + const newSize = currentBundle.offset + chunkSize; + const newBuffer = new ArrayBuffer(newSize); + new Uint8Array(newBuffer).set(new Uint8Array(currentBundle.buffer)); + currentBundle.buffer = newBuffer; + currentBundle.view = new DataView(newBuffer); + } + + // Write chunk header + let offset = currentBundle.offset; + currentBundle.view.setUint8(offset++, 0x12); // Chunk type + currentBundle.view.setUint8(offset++, udv); // udv + currentBundle.view.setUint16(offset, chunkRecords.length, true); offset += 2; // count + currentBundle.view.setUint16(offset, iValue, true); offset += 2; // i + currentBundle.view.setUint16(offset, j0, true); offset += 2; // j0 + currentBundle.view.setInt16(offset, Δj, true); offset += 2; // Δj + currentBundle.view.setUint8(offset++, Δelems.length); // Δelem_count + currentBundle.view.setUint8(offset++, elems.length); // elem_count + + // Write chunk preface (element types) + for (const elem of Δelems) { + const baseCode = typeToCode[elem.baseType.name]; + const incrCode = typeToCode[elem.incrType.name]; + currentBundle.view.setUint8(offset++, (incrCode << 4) | baseCode); + } + for (const elem of elems) { + currentBundle.view.setUint8(offset++, typeToCode[elem.type.name]); + } + + // Write initial values for Δelems + for (const elem of Δelems) { + const value = elem.key(chunkRecords[0]); + if (value == null) throw new Error('Missing Δelem value from getter'); + writeTypedValue(currentBundle.view, offset, value, elem.baseType); + offset += typeToBytes[elem.baseType.name]; + } + // Pad to 4-byte boundary + while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0); + + // Write interleaved record data + const prevValues = Δelems.map(elem => elem.key(chunkRecords[0])); + for (let idx = 0; idx < chunkRecords.length; idx++) { + // Write Δelems increments + for (let i = 0; i < Δelems.length; i++) { + const elem = Δelems[i]; + const value = idx === 0 ? 0 : elem.key(chunkRecords[idx]) - prevValues[i]; + writeTypedValue(currentBundle.view, offset, value, elem.incrType); + offset += typeToBytes[elem.incrType.name]; + prevValues[i] = elem.key(chunkRecords[idx]); + } + // Write elems + for (const elem of elems) { + const value = elem.key(chunkRecords[idx]); + if (value == null) throw new Error('Missing elem value from getter'); + writeTypedValue(currentBundle.view, offset, value, elem.type); + offset += typeToBytes[elem.type.name]; + } + } + // Pad to 4-byte boundary + while (offset % 4 !== 0) currentBundle.view.setUint8(offset++, 0); + + // Update bundle offset + currentBundle.offset = offset; + } + + // Update bundle header + currentBundle.view.setUint32(0, 0x1C | ((currentBundle.offset - 4) << 8), true); + } + + // Combine buffers into final Uint8Array + const finalLength = buffers.reduce((sum, b) => sum + b.offset, 0); + const result = new Uint8Array(finalLength); + let offset = 0; + for (const { buffer, offset: bundleOffset } of buffers) { + result.set(new Uint8Array(buffer, 0, bundleOffset), offset); + offset += bundleOffset; + } + + return result; +} + + +function writeTypedValue(view, offset, value, type) { + switch (type) { + case Int8Array: view.setInt8(offset, value); break; + case Uint8Array: view.setUint8(offset, value); break; + case Int16Array: view.setInt16(offset, value, true); break; + case Uint16Array: view.setUint16(offset, value, true); break; + case Int32Array: view.setInt32(offset, value, true); break; + case Uint32Array: view.setUint32(offset, value, true); break; + case Float32Array: view.setFloat32(offset, value, true); break; + case Float64Array: view.setFloat64(offset, value, true); break; + case BigInt64Array: view.setBigInt64(offset, BigInt(value), true); break; + case BigUint64Array: view.setBigUint64(offset, BigInt(value), true); break; + default: throw new Error(`Unsupported type: ${type.name}`); + } +} + +module.exports = { sequential, interleaved }; diff --git a/lib/modules/@dougal/binary/index.js b/lib/modules/@dougal/binary/index.js new file mode 100644 index 0000000..7c2b33a --- /dev/null +++ b/lib/modules/@dougal/binary/index.js @@ -0,0 +1,139 @@ + +/** Binary encoder + * + * This module encodes scalar data from a grid-like source + * into a packed binary format for bandwidth efficiency and + * speed of access. + * + * Data are indexed by i & j values, with "i" being constant + * (e.g., a sequence or line number) and "j" expected to change + * by a constant, linear amount (e.g., point numbers). All data + * from consecutive "j" values will be encoded as a single array + * (or series of arrays if multiple values are encoded). + * If there is a jump in the "j" progression, a new "chunk" will + * be started with a new array (or series of arrays). + * + * Multiple values may be encoded per (i, j) pair, using any of + * the types supported by JavaScript's TypedArray except for + * Float16 and Uint8Clamped. Each variable can be encoded with + * a different size. + * + * Values may be encoded directly or as deltas from an initial + * value. The latter is particularly efficient when dealing with + * monotonically incrementing data, such as timestamps. + * + * The conceptual packet format for sequentially encoded data + * looks like this: + * + * <Δj> + * + * <Δelement_count: y> + * + * + * <Δelement_1_type_base> … <Δelement_y_type_base> + * <Δelement_1_type_incr> … <Δelement_y_type_incr> + * + * + * <Δelement_1_first> … <Δelement_z_first> + * + * <Δelem_1_0> … <Δelem_1_x> + * … + * <Δelem_y_0> … <Δelem_y_x> + * + * … + * + * + * + * The conceptual packet format for interleaved encoded data + * looks like this: + * + * + * <Δj> + * + * <Δelement_count: y> + * + * + * <Δelement_1_type_base> … <Δelement_y_type_base> + * <Δelement_1_type_incr> … <Δelement_y_type_incr> + * + * + * <Δelement_1_first> … <Δelement_y_first> + * + * <Δelem_1_0> <Δelem_2_0> … <Δelem_y_0> + * <Δelem_1_1> <Δelem_2_1> … <Δelem_y_1> + * … + * <Δelem_1_x> <Δelem_2_x> … <Δelem_y_x> + * + * + * Usage example: + * + * json = [ + * { + * sequence: 7, + * sailline: 5354, + * line: 5356, + * point: 1068, + * tstamp: 1695448704372, + * objrefraw: 3, + * objreffinal: 4 + * }, + * { + * sequence: 7, + * sailline: 5354, + * line: 5352, + * point: 1070, + * tstamp: 1695448693612, + * objrefraw: 2, + * objreffinal: 3 + * }, + * { + * sequence: 7, + * sailline: 5354, + * line: 5356, + * point: 1072, + * tstamp: 1695448684624, + * objrefraw: 3, + * objreffinal: 4 + * } + * ]; + * + * deltas = [ + * { key: el => el.tstamp, baseType: BigUint64Array, incrType: Int16Array } + * ]; + * + * elems = [ + * { key: el => el.objrefraw, type: Uint8Array }, + * { key: el => el.objreffinal, type: Uint8Array } + * ]; + * + * i = el => el.sequence; + * + * j = el => el.point; + * + * bundle = encode(json, i, j, deltas, elems); + * + * // bundle: + * + * Uint8Array(40) [ + * 36, 0, 0, 28, 17, 0, 3, 0, 7, 0, + * 44, 4, 2, 0, 1, 2, 42, 1, 1, 116, + * 37, 158, 192, 138, 1, 0, 0, 0, 0, 0, + * 248, 213, 228, 220, 3, 2, 3, 4, 3, 4 + * ] + * + * decode(bundle); + * + * { + * i: 7, + * j: [ 1068, 1070, 1072 ], + * 'Δelems': [ [ 1695448704372, 1695448693612, 1695448684624 ] ], + * elems: [ [ 3, 2, 3 ], [ 4, 3, 4 ] ] + * } + * + */ + +module.exports = { + encode: {...require('./encode')}, + decode: {...require('./decode')}, + ...require('./classes') +}; diff --git a/lib/modules/@dougal/binary/package.json b/lib/modules/@dougal/binary/package.json new file mode 100644 index 0000000..8a1a08d --- /dev/null +++ b/lib/modules/@dougal/binary/package.json @@ -0,0 +1,12 @@ +{ + "name": "@dougal/binary", + "version": "1.0.0", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "description": "" +}