From 7eed8657133e96a1b869335144358518076b69e8 Mon Sep 17 00:00:00 2001 From: Alan Gutierrez Date: Sat, 11 Jul 2020 17:39:03 -0500 Subject: [PATCH] Serialize and parse fixed byte arrays as buffers. Implement serialization and parse of buffers, fixed and terminated. Incremental parsing padding fixed arrays is not redirected to the terminated parser implementation which will add checks for reaching the maxiumu fixed length array. Incremental terminated buffer parsing is implemented, but not serialization and not synchrnous operation. Buffers are either catenated, meaning during incremental parse, multiple buffers are gathered, than catenated into a single buffer, or else they are left as an array of buffers which are themselves slices of the underlying buffer. Closes #506. Closes #513. Closes #517. --- hex.js | 10 + parse.all.js | 53 +- parse.inc.js | 513 +++++++++++++----- redux.md | 51 ++ serialize.all.js | 63 ++- serialize.inc.js | 228 +++++++- test/cycle/cycle.js | 26 +- test/cycle/fixed.t.js | 105 +++- .../accumulator/counted.parser.inc.js | 12 +- .../accumulator/counted.serializer.inc.js | 2 +- .../accumulator/function.serializer.inc.js | 2 +- .../accumulator/object.serializer.inc.js | 2 +- .../accumulator/regex.serializer.inc.js | 2 +- test/generated/array/fixed.serializer.inc.js | 2 +- test/generated/array/nested.serializer.inc.js | 2 +- .../array/variable.serializer.inc.js | 2 +- test/generated/array/words.serializer.inc.js | 2 +- .../assert/dollarunder.serializer.inc.js | 2 +- test/generated/assert/named.serializer.inc.js | 2 +- .../assert/positional.serializer.inc.js | 2 +- .../bidirectional.serializer.inc.js | 2 +- .../conditional/mysql.serializer.inc.js | 2 +- .../conditional/packed.serializer.inc.js | 2 +- .../conditional/sipless.serializer.inc.js | 2 +- .../fixed/chunked/multiple.parser.all.js | 28 + .../fixed/chunked/multiple.parser.bff.js | 34 ++ .../fixed/chunked/multiple.parser.inc.js | 123 +++++ .../fixed/chunked/multiple.serializer.all.js | 26 + .../fixed/chunked/multiple.serializer.bff.js | 30 + .../fixed/chunked/multiple.serializer.inc.js | 112 ++++ .../multiple.sizeof.js} | 0 .../fixed/chunked/single.parser.all.js | 40 ++ .../fixed/chunked/single.parser.bff.js | 54 ++ .../fixed/chunked/single.parser.inc.js | 136 +++++ .../fixed/chunked/single.serializer.all.js | 30 + .../fixed/chunked/single.serializer.bff.js | 34 ++ .../fixed/chunked/single.serializer.inc.js | 119 ++++ .../single.sizeof.js} | 0 .../unpadded.parser.all.js} | 20 +- .../unpadded.parser.bff.js} | 26 +- .../fixed/chunked/unpadded.parser.inc.js | 93 ++++ .../fixed/chunked/unpadded.serializer.all.js | 26 + .../fixed/chunked/unpadded.serializer.bff.js | 30 + .../fixed/chunked/unpadded.serializer.inc.js | 98 ++++ .../unpadded.sizeof.js} | 4 +- .../fixed/concat/multiple.parser.all.js | 40 ++ .../fixed/concat/multiple.parser.bff.js | 54 ++ .../fixed/concat/multiple.parser.inc.js | 150 +++++ .../fixed/concat/multiple.serializer.all.js | 28 + .../fixed/concat/multiple.serializer.bff.js | 32 ++ .../fixed/concat/multiple.serializer.inc.js | 120 ++++ .../generated/fixed/concat/multiple.sizeof.js | 15 + .../fixed/concat/single.parser.all.js | 40 ++ .../fixed/concat/single.parser.bff.js | 54 ++ .../fixed/concat/single.parser.inc.js | 136 +++++ .../fixed/concat/single.serializer.all.js | 28 + .../fixed/concat/single.serializer.bff.js | 32 ++ .../fixed/concat/single.serializer.inc.js | 106 ++++ test/generated/fixed/concat/single.sizeof.js | 15 + .../fixed/concat/unpadded.parser.all.js | 34 ++ .../fixed/concat/unpadded.parser.bff.js | 48 ++ .../fixed/concat/unpadded.parser.inc.js | 90 +++ .../fixed/concat/unpadded.serializer.all.js | 24 + .../fixed/concat/unpadded.serializer.bff.js | 28 + .../fixed/concat/unpadded.serializer.inc.js | 85 +++ .../generated/fixed/concat/unpadded.sizeof.js | 15 + .../{padded => words}/multi.parser.all.js | 20 +- .../{padded => words}/multi.parser.bff.js | 30 +- .../{padded => words}/multi.parser.inc.js | 66 ++- .../{padded => words}/multi.serializer.all.js | 6 +- .../{padded => words}/multi.serializer.bff.js | 8 +- .../{padded => words}/multi.serializer.inc.js | 45 +- test/generated/fixed/words/multi.sizeof.js | 15 + .../fixed/words/single.parser.all.js | 52 ++ .../fixed/words/single.parser.bff.js | 70 +++ .../{padded => words}/single.parser.inc.js | 68 ++- .../single.serializer.all.js | 6 +- .../single.serializer.bff.js | 8 +- .../single.serializer.inc.js | 43 +- test/generated/fixed/words/single.sizeof.js | 15 + .../unpadded.parser.all.js} | 0 .../unpadded.parser.bff.js} | 4 +- .../unpadded.parser.inc.js} | 45 +- .../unpadded.serializer.all.js} | 0 .../unpadded.serializer.bff.js} | 0 .../unpadded.serializer.inc.js} | 12 +- test/generated/fixed/words/unpadded.sizeof.js | 15 + test/generated/ieee/be/double.parser.inc.js | 23 +- .../ieee/be/double.serializer.inc.js | 12 +- test/generated/ieee/be/float.parser.inc.js | 23 +- .../generated/ieee/be/float.serializer.inc.js | 12 +- test/generated/ieee/le/double.parser.inc.js | 23 +- .../ieee/le/double.serializer.inc.js | 12 +- test/generated/ieee/le/float.parser.inc.js | 23 +- .../generated/ieee/le/float.serializer.inc.js | 12 +- .../ieee/specified/be/double.parser.inc.js | 23 +- .../specified/be/double.serializer.inc.js | 12 +- .../ieee/specified/be/float.parser.inc.js | 23 +- .../ieee/specified/be/float.serializer.inc.js | 12 +- .../ieee/specified/le/double.parser.inc.js | 23 +- .../specified/le/double.serializer.inc.js | 12 +- .../ieee/specified/le/float.parser.inc.js | 23 +- .../ieee/specified/le/float.serializer.inc.js | 12 +- test/generated/inline/after.serializer.inc.js | 2 +- .../generated/inline/before.serializer.inc.js | 2 +- test/generated/inline/both.serializer.inc.js | 2 +- .../inline/mirrored.serializer.inc.js | 2 +- test/generated/inline/named.serializer.inc.js | 2 +- .../be/compliment/int.serializer.inc.js | 2 +- .../be/compliment/long.serializer.inc.js | 2 +- .../be/compliment/short.serializer.inc.js | 2 +- .../integer/be/word/int.serializer.inc.js | 2 +- .../integer/be/word/long.serializer.inc.js | 2 +- .../integer/be/word/short.serializer.inc.js | 2 +- .../integer/byte/whole.serializer.inc.js | 2 +- .../le/compliment/int.serializer.inc.js | 2 +- .../le/compliment/long.serializer.inc.js | 2 +- .../le/compliment/short.serializer.inc.js | 2 +- .../integer/le/word/int.serializer.inc.js | 2 +- .../integer/le/word/long.serializer.inc.js | 2 +- .../integer/le/word/short.serializer.inc.js | 2 +- test/generated/literal.serializer.inc.js | 2 +- test/generated/little.serializer.inc.js | 2 +- test/generated/lookup.serializer.inc.js | 2 +- .../require/module.serializer.inc.js | 2 +- test/generated/skip.serializer.inc.js | 2 +- test/generated/structure.serializer.inc.js | 2 +- .../switched/packed/strings.serializer.inc.js | 2 +- .../switched/packed/variant.serializer.inc.js | 2 +- .../switched/strings.serializer.inc.js | 2 +- .../switched/variant.serializer.inc.js | 2 +- .../generated/terminated/concat.parser.inc.js | 16 +- .../terminated/concat.serializer.inc.js | 2 +- .../generated/terminated/nested.parser.inc.js | 6 +- .../terminated/nested.serializer.inc.js | 2 +- test/generated/terminated/words.parser.inc.js | 4 +- .../terminated/words.serializer.inc.js | 2 +- 137 files changed, 3681 insertions(+), 543 deletions(-) create mode 100644 hex.js create mode 100644 test/generated/fixed/chunked/multiple.parser.all.js create mode 100644 test/generated/fixed/chunked/multiple.parser.bff.js create mode 100644 test/generated/fixed/chunked/multiple.parser.inc.js create mode 100644 test/generated/fixed/chunked/multiple.serializer.all.js create mode 100644 test/generated/fixed/chunked/multiple.serializer.bff.js create mode 100644 test/generated/fixed/chunked/multiple.serializer.inc.js rename test/generated/fixed/{padded/single.sizeof.js => chunked/multiple.sizeof.js} (100%) create mode 100644 test/generated/fixed/chunked/single.parser.all.js create mode 100644 test/generated/fixed/chunked/single.parser.bff.js create mode 100644 test/generated/fixed/chunked/single.parser.inc.js create mode 100644 test/generated/fixed/chunked/single.serializer.all.js create mode 100644 test/generated/fixed/chunked/single.serializer.bff.js create mode 100644 test/generated/fixed/chunked/single.serializer.inc.js rename test/generated/fixed/{words.sizeof.js => chunked/single.sizeof.js} (100%) rename test/generated/fixed/{padded/single.parser.all.js => chunked/unpadded.parser.all.js} (63%) rename test/generated/fixed/{padded/single.parser.bff.js => chunked/unpadded.parser.bff.js} (63%) create mode 100644 test/generated/fixed/chunked/unpadded.parser.inc.js create mode 100644 test/generated/fixed/chunked/unpadded.serializer.all.js create mode 100644 test/generated/fixed/chunked/unpadded.serializer.bff.js create mode 100644 test/generated/fixed/chunked/unpadded.serializer.inc.js rename test/generated/fixed/{padded/multi.sizeof.js => chunked/unpadded.sizeof.js} (71%) create mode 100644 test/generated/fixed/concat/multiple.parser.all.js create mode 100644 test/generated/fixed/concat/multiple.parser.bff.js create mode 100644 test/generated/fixed/concat/multiple.parser.inc.js create mode 100644 test/generated/fixed/concat/multiple.serializer.all.js create mode 100644 test/generated/fixed/concat/multiple.serializer.bff.js create mode 100644 test/generated/fixed/concat/multiple.serializer.inc.js create mode 100644 test/generated/fixed/concat/multiple.sizeof.js create mode 100644 test/generated/fixed/concat/single.parser.all.js create mode 100644 test/generated/fixed/concat/single.parser.bff.js create mode 100644 test/generated/fixed/concat/single.parser.inc.js create mode 100644 test/generated/fixed/concat/single.serializer.all.js create mode 100644 test/generated/fixed/concat/single.serializer.bff.js create mode 100644 test/generated/fixed/concat/single.serializer.inc.js create mode 100644 test/generated/fixed/concat/single.sizeof.js create mode 100644 test/generated/fixed/concat/unpadded.parser.all.js create mode 100644 test/generated/fixed/concat/unpadded.parser.bff.js create mode 100644 test/generated/fixed/concat/unpadded.parser.inc.js create mode 100644 test/generated/fixed/concat/unpadded.serializer.all.js create mode 100644 test/generated/fixed/concat/unpadded.serializer.bff.js create mode 100644 test/generated/fixed/concat/unpadded.serializer.inc.js create mode 100644 test/generated/fixed/concat/unpadded.sizeof.js rename test/generated/fixed/{padded => words}/multi.parser.all.js (61%) rename test/generated/fixed/{padded => words}/multi.parser.bff.js (57%) rename test/generated/fixed/{padded => words}/multi.parser.inc.js (69%) rename test/generated/fixed/{padded => words}/multi.serializer.all.js (80%) rename test/generated/fixed/{padded => words}/multi.serializer.bff.js (77%) rename test/generated/fixed/{padded => words}/multi.serializer.inc.js (76%) create mode 100644 test/generated/fixed/words/multi.sizeof.js create mode 100644 test/generated/fixed/words/single.parser.all.js create mode 100644 test/generated/fixed/words/single.parser.bff.js rename test/generated/fixed/{padded => words}/single.parser.inc.js (63%) rename test/generated/fixed/{padded => words}/single.serializer.all.js (76%) rename test/generated/fixed/{padded => words}/single.serializer.bff.js (74%) rename test/generated/fixed/{padded => words}/single.serializer.inc.js (74%) create mode 100644 test/generated/fixed/words/single.sizeof.js rename test/generated/fixed/{words.parser.all.js => words/unpadded.parser.all.js} (100%) rename test/generated/fixed/{words.parser.bff.js => words/unpadded.parser.bff.js} (94%) rename test/generated/fixed/{words.parser.inc.js => words/unpadded.parser.inc.js} (84%) rename test/generated/fixed/{words.serializer.all.js => words/unpadded.serializer.all.js} (100%) rename test/generated/fixed/{words.serializer.bff.js => words/unpadded.serializer.bff.js} (100%) rename test/generated/fixed/{words.serializer.inc.js => words/unpadded.serializer.inc.js} (92%) create mode 100644 test/generated/fixed/words/unpadded.sizeof.js diff --git a/hex.js b/hex.js new file mode 100644 index 00000000..9efa6937 --- /dev/null +++ b/hex.js @@ -0,0 +1,10 @@ +function hex (value) { + switch (typeof value) { + case 'number': + return `0x${value.toString(16)}` + case 'object': + return `[ ${value.map(value => hex(value)).join(', ')} ]` + } +} + +module.exports = hex diff --git a/parse.all.js b/parse.all.js index 4c5839b0..24e558ca 100644 --- a/parse.all.js +++ b/parse.all.js @@ -387,17 +387,57 @@ function generate (packet, { require, bff }) { } function fixed (path, field) { + // Fetch the type of element. + const element = field.fields[field.fields.length - 1] + // + + // Buffers can use `indexOf`, `fill` and `copy` and will be much faster + // than operating byte-by-byte. + + // + if (element.type == 'buffer') { + variables.register = true + variables.slice = true + // Advance past buffer read to padding skip. + $step += field.pad.length == 0 ? 2 : 3 + const slice = $(` + $slice = $buffer.slice($start, ${field.length}) + $start += ${field.length} + `) + const assign = element.concat ? `${path} = $slice` : `${path}.push($slice)` + if (field.pad.length != 0) { + $step += field.pad.length + const pad = field.pad.length > 1 + ? `Buffer.from(${util.format(field.pad)})` + : field.pad[0] + return ($(` + `, slice, ` + + $_ = $slice.indexOf(${pad}) + if (~$_) { + $slice = $buffer.slice(0, $_) + } + + `, assign, ` + `)) + } + // See: https://marcradziwill.com/blog/mastering-javascript-high-performance/ + return ($(` + `, slice, ` + `, assign, ` + `)) + } variables.i = true const i = `$i[${++$i}]` $step += 1 const check = bff && field.pad.length != 0 ? checkpoint({ lengths: [ field.pad.length ] }) : null - $step += 1 - $step += field.pad.length + // Advance past initialization and terminator tests. + $step += 1 + field.pad.length const looped = join(field.fields.map(field => dispatch(path + `[${i}]`, field))) - $step += field.pad.length - $step += 3 // Skip termination test and fill. + // Advance past end-of-loop test and fill skip. + $step += 1 + (field.pad.length != 0 ? 2 : 0) const terminator = field.pad.map((bite, index) => { if (index == 0) { return `$buffer[$start] == 0x${bite.toString(16)}` @@ -438,7 +478,9 @@ function generate (packet, { require, bff }) { return $(` `, source, ` - $start += (${field.length} - ${i}) * ${field.bits / field.length / 8} - ${field.pad.length} + $start += ${field.length} != ${i} + ? (${field.length} - ${i}) * ${field.bits / field.length / 8} - ${field.pad.length} + : 0 `) } return source @@ -663,6 +705,7 @@ function generate (packet, { require, bff }) { i: '$i = []', I: '$I = []', sip: '$sip = []', + slice: '$slice = null', accumulator: '$accumulator = {}', starts: '$starts = []' } diff --git a/parse.inc.js b/parse.inc.js index 905b9374..bc52880d 100644 --- a/parse.inc.js +++ b/parse.inc.js @@ -1,5 +1,9 @@ // Node.js API. const util = require('util') +const { inspect } = require('util') + +// Convert numbers and arrays to numbers to literals with hex literals. +const hex = require('./hex') // Format source code maintaining indentation. const $ = require('programmatic') @@ -28,20 +32,42 @@ const inliner = require('./inliner') // Generate required modules and functions. const required = require('./required') -const map = require('./map') - // Format source code maintaining indentation. const join = require('./join') +const map = require('./map') + // Join an array of strings with first line of subsequent element catenated to // last line of previous element. const snuggle = require('./snuggle') +// + +// Generate an incremental parser from our AST. + +// function generate (packet, { require = null }) { - let $step = 0, $i = -1, $sip = -1, accumulators = {}, surround = false + // Whether or not to surround the switch statement with a forever loop. + let surround = false + + // Current step being generated. + let $step = 0 + + // Current position in the array of array indices. + let $i = -1 + + // Current position in the array of sipped words. + let $sip = -1 + + // An map of parser scoped variable definitions to their initialization + // values. + const locals = {} + // Determine which variables will be passed into in this parser from a + // best-foot-forward parse. const variables = declare(packet) + // An object that tracks the declaration of accumulators. const accumulate = { accumulator: {}, accumulated: [], @@ -50,6 +76,8 @@ function generate (packet, { require = null }) { packet: packet.name, direction: 'parse' } + + // Gather up declared lookup constants. const $lookup = {} function integer (path, field) { @@ -72,6 +100,7 @@ function generate (packet, { require = null }) { `) } + // const start = field.endianness == 'big' ? bytes - 1 : 0 const stop = field.endianness == 'big' ? -1 : bytes const direction = field.endianness == 'big' ? '--' : '++' @@ -179,44 +208,132 @@ function generate (packet, { require = null }) { // Seems like in the past I would read the terminator into an array and if // it didn't match, I'd feed the array to the parser, this would handle long // weird terminators. + + // function terminated (path, field) { + // We will be looping. surround = true + // Get the element type contained by the array. + const element = field.fields[field.fields.length - 1] + // Our terminator is the padding definition for padded fixed arrays. + const bytes = field.terminator || field.pad + // Generate any buffered function calls to process the buffer if we + // reach the end of the buffer. + const buffered = accumulate.buffered.length != 0 + ? accumulate.buffered.map(buffered => buffered.source).join('\n') + : null + // Skip the remainder for of a fixed padded buffer. Common to buffered + // and byte-by-byte fixed arrays, not used for terminated. Note that + // it's a function because of the `$step++`. + function skip (i) { + return $(` + case ${$step++}: { + + const length = Math.min($_, $end - $start) + $start += length + $_ -= length + + if ($_ != 0) { + `, buffered, ` + return { start: $start, parse } + } + + $step = ${$step} + + } + `) + } + // + + // Buffers are a special case. Data is raw, can be copied in bulk, + // terminators can be found with `indexOf`. Separate implemention for + // buffers. + + // if (field.fields[0].type == 'buffer') { - variables.buffers = true + locals['buffers'] = '[]' + if (field.fixed) { + locals['length'] = 0 + } const redo = $step + 1 - const terminator = field.terminator - const buffered = accumulate.buffered.map(buffered => buffered.source) - const slice = $(` + // **TODO** This is off for a multi-byte terminator that occurs at + // the last element. Would begin trying to match the terminator and + // go past the end of the buffer. + const slice = field.type == 'fixed' ? $(` case ${$step++}: - $_ = $buffer.indexOf(${terminator[0]}, $start) - if (~$_) { - $buffers.push($buffer.slice($start, $_)) - $start = $_ + 1 + $_ = 0 + + $step = ${$step} + + case ${$step++}: { + + const $index = $buffer.indexOf(${hex(bytes[0])}, $start) + if (~$index) { + if ($_ + $index > ${field.length}) { + const $length = ${field.length} - $_ + $buffers.push($buffer.slice($start, $start + $length)) + $_ += $length + $start += $length + $step = ${$step + field.pad.length - 1} + continue + } else { + $buffers.push($buffer.slice($start, $index)) + $_ += ($index - $start) + 1 + $start = $index + 1 + $step = ${$step} + continue + } + } else if ($_ + ($end - $start) >= ${field.length}) { + const $length = ${field.length} - $_ + $buffers.push($buffer.slice($start, $start + $length)) + $_ += $length + $start += $length + $step = ${$step + field.pad.length - 1} + continue + } else { + $_ += $end - $start + $buffers.push($buffer.slice($start)) + `, buffered, ` + return { start: $end, parse } + } + + $step = ${$step} + + } + + `) : $(` + case ${$step++}: { + + const $index = $buffer.indexOf(${hex(bytes[0])}, $start) + if (~$index) { + $buffers.push($buffer.slice($start, $index)) + $start = $index + 1 $step = ${$step} continue } else { $buffers.push($buffer.slice($start)) - `, buffered.length != 0 ? buffered.join('\n') : null, ` + `, buffered, ` return { start: $end, parse } } $step = ${$step} + } `) const subsequent = [] - const done = $step + terminator.length - for (let i = 1; i < terminator.length; i++) { - const sofar = util.inspect(terminator.slice(0, i)) + const done = $step + bytes.length + for (let i = 1; i < bytes.length; i++) { + const sofar = util.inspect(bytes.slice(0, i)) subsequent.push($(` case ${$step++}: if ($start == $end) { - `, buffered.length != 0 ? buffered.join('\n') : null, ` + `, buffered, ` return { start: $start, parse } } - if ($buffer[$start++] != ${terminator[1]}) { + if ($buffer[$start++] != ${hex(bytes[1])}) { $buffers.push(Buffer.from(${sofar}.concat($buffer[$start]))) $step = ${redo} continue @@ -225,68 +342,156 @@ function generate (packet, { require = null }) { $step = ${$step} `)) } + // Assignment buffer with a possible recording of length so far if + // we have to skip padding. + function assign () { + // **TODO** Could use the calculation of `$_` above, but would + // have to special case `$_` everywhere for fixed/terminated and + // make the code in here ugly. + const length = field.type == 'fixed' ? $(` + $_ = ${field.length} - Math.min($buffers.reduce((sum, buffer) => { + return sum + buffer.length + }, ${bytes.length}), ${field.length}) + `) : null + return element.concat ? $(` + case ${$step++}: + + `, length, ` + + ${path} = $buffers.length == 1 ? $buffers[0] : Buffer.concat($buffers) + $buffers.length = 0 + + $step = ${$step} + `) : $(` + case ${$step++}: + + `, length, ` + + ${path} = $buffers + $buffers = [] + + $step = ${$step} + `) + } + if (field.type == 'terminated') { + return $(` + `, slice, ` + + `, subsequent.length != 0 ? join(subsequent) : null, -1, ` + + `, assign(), ` + `) + } return $(` `, slice, ` - `, subsequent.length != 0 ? join(subsequent) : null, ` + `, subsequent.length != 0 ? join(subsequent) : null, -1, ` - case ${$step++}: - - ${path} = $buffers.length == 1 ? $buffers[0] : Buffer.concat($buffers) - $buffers.length = 0 + `, assign(), ` - $step = 5 + `, skip(), ` `) } - $i++ - const i = `$i[${$i}]` + // Our regular parsing seeks terminators at the start of each iteration + // of the parse loop. + + // Obtain a next index from the index array. + const i = `$i[${++$i}]` + // Initialization step. const init = $step - let sip = ++$step - const redo = $step - const begin = $step += field.terminator.length + // Start of element fields, loop reset. + const redo = ++$step + // We need a step for each byte in the terminator. + const begin = $step += bytes.length + // We will sometimes have a vivification step to an object element. $step++ - const buffered = accumulate.buffered.map(buffered => buffered.source) + // Create the body of the loop. const looped = join(field.fields.map(field => dispatch(`${path}[${i}]`, field))) - const literal = field.terminator.map(bite => `0x${bite.toString(16)}`) - const terminator = join(field.terminator.map((bite, index) => { - if (index != field.terminator.length - 1) { - return $(` - case ${sip++}: + // Step of next field is after a final loop jump step. + const done = $step + 1 + // - if ($start == $end) { - `, buffered.length != 0 ? buffered.join('\n') : null, ` - return { start: $start, parse } - } + // Generate the terminator detection. - if ($buffer[$start] != 0x${bite.toString(16)}) { - $step = ${begin} - continue - } + // + const fixed = field.type == 'fixed' + ? $(` + if (${i} == ${field.length}) { + $step = ${done} + continue + } + `) : null + const terminator = bytes.length == 1 + // If we have a single byte terminator, we skip over the loop if the + // we see the byte. A multi-byte terminator is more complicated. + ? $(` + case ${redo}: + + `, fixed, -1, ` + + if ($start == $end) { + `, buffered, ` + return { start: $start, parse } + } + + if ($buffer[$start] == ${hex(bytes[0])}) { $start++ + $step = ${done} + continue + } - $step = ${sip} - `) - } else { + $step = ${begin} + `) + // For a multi-byte terminator we have a step for each byte. + // + // For every terminator byte last check to see if it + // matches the byte in the buffer. If it does we fall through to + // test next byte. If not we set the `$step` to the start of the + // body. + // + // Subsequent to the first byte we will have matched and skipped + // bytes but we'll know what they where, so we can still parse them + // by calling the defined `$parse` function with a literal buffer. + // + // If the last byte does not match we jump to the end. The last byte + // might seem like a good place to fall through instead of jumping, + // but we will have already begun parsing by parsing the terminator + // literal and it will have proceded past the initialization of the + // next field. We won't know how many initialization steps there, it + // varies based on field and even if we did attempt to ensure that + // every field type had a single initialization step it would still + // vary due to nesting. + : join(bytes.map((bite, index) => { + const parse = index != 0 + ? `parse(Buffer.from(${hex(bytes.slice(0, index))}), 0, ${index})` + : null + const next = index != literal.length - 1 + ? `$step = ${redo + index + 1}` + : $(` + $step = ${done} + continue + `) return $(` - case ${sip++}: + case ${redo + index}: + + `, index == 0 ? fixed : null, -1, ` if ($start == $end) { - `, buffered.length != 0 ? buffered.join('\n') : null, ` + `, buffered, ` return { start: $start, parse } } - if ($buffer[$start] != 0x${bite.toString(16)}) { + if ($buffer[$start] != ${hex(bite)}) { $step = ${begin} - parse([ ${literal.slice(0, index).join(', ')} ], 0, ${index}) + `, parse, ` continue } $start++ - $step = ${$step + 1} - continue + `, next, ` `) - } - })) + })) + // Put it all together. const source = $(` case ${init}: @@ -306,7 +511,26 @@ function generate (packet, { require = null }) { $step = ${redo} continue `) + // Release the array index from the array of indices. $i-- + // If we are actually padded fixed array, we need to skip over the + // remaining bytes in the fixed width field. + locals['length'] = 0 + if (field.type == 'fixed') { + return $(` + `, source, ` + + case ${$step++}: + + $_ = ${field.length} != ${i} + ? (${field.length} - ${i}) * ${element.bits / 8} - ${bytes.length} + : 0 + + $step = ${$step} + + `, skip(`(${i} + ${bytes.length})`), ` + `) + } return source } @@ -398,6 +622,8 @@ function generate (packet, { require = null }) { `) } + // TODO: Folling is notes on things to come. + // We will have a special case for bite arrays where we can use index of to // find the terminator, when the termiantor is zero or `\n\n` or the like, // because we can use `indexOf` to find the boundary. Maybe byte arrays @@ -409,118 +635,110 @@ function generate (packet, { require = null }) { // Seems like in the past I would read the terminator into an array and if // it didn't match, I'd feed the array to the parser, this would handle long // weird terminators. + + // function fixed (path, field) { - surround = true - const i = `$i[${++$i}]` - const init = $step - let sip = ++$step - const redo = $step - const begin = $step += field.pad.length - $step++ - const looped = join(field.fields.map(field => dispatch(`${path}[${i}]`, field))) - const literal = field.pad.map(bite => `0x${bite.toString(16)}`) - // TODO Seems like there ought to be some rules. I'm only going to - // support multi-character string terminators, really. If you have an - // terminated array of variable structures that could also be fixed, - // that's a horrible format. - const fit = Math.ceil(field.pad.length / (field.bits / 8)) - const terminator = function () { - switch (literal.length) { - case 0: - return null - case 1: - return $(` - case ${sip++}: + if (field.pad.length != 0) { + return terminated(path, field) + } + const element = field.fields[field.fields.length - 1] + const buffered = accumulate.buffered.map(buffered => buffered.source) + // + + // Use `Buffer` functions when fixed array is a `Buffer`. + // + // **TODO** I'm going to make this a todo, not an issue, but it would be + // nice to use `TypedArray` when we have an array of words and the + // desired byte order matches the machine byte order. + // + // **TODO** Use `concat` instead of `copy`. + + // + if (element.type == 'buffer') { + locals['buffers'] = '[]' + return element.concat + ? $(` + case ${$step++}: - if ($start == $end) { - return { start: $start, parse } - } + $_ = 0 - if ($buffer[$start] == ${literal[0].toString(16)}) { - $start++ - $step = ${$step + 1} - continue - } + $step = ${$step} - $step = ${$step} - `) - default: - return join(literal.map((bite, index) => { - const remaining - = false && field.fixed && index == 0 ? $(` - if (${field.length} - ${i} < ${field.pad.length}) { - $step = ${$step + 1} - continue - } - `) : null - if (index != field.pad.length - 1) { - return $(` - case ${sip++}: - - `, remaining, -1, ` - - if ($start == $end) { - return { start: $start, parse } - } - - if ($buffer[$start] != ${bite}) { - $step = ${begin} - continue - } - $start++ - - $step = ${sip} - `) - } else { - return $(` - case ${sip++}: - - if ($start == $end) { - return { start: $start, parse } - } - - if ($buffer[$start] != ${bite}) { - $step = ${begin} - parse(Buffer.from([ ${literal.slice(0, index).join(', ')} ]), 0, ${index}) - continue - } - $start++ - - $step = ${$step + 1} - continue - `) + case ${$step++}: { + + const length = Math.min($end - $start, ${field.length} - $_) + $buffer.copy(${path}, $_, $start, $start + length) + $start += length + $_ += length + + if ($_ != ${field.length}) { + `, buffered.length != 0 ? buffered.join('\n') : null, ` + return { start: $start, parse } } - })) - } - } () - // TODO Eliminate vivify step if not used. + + $step = ${$step} + + } + `) + : $(` + case ${$step++}: + + $_ = 0 + + $step = ${$step} + + case ${$step++}: { + + const length = Math.min($end - $start, ${field.length} - $_) + $buffers.push($buffer.slice($start, $start + length)) + $start += length + $_ += length + + if ($_ != ${field.length}) { + `, buffered.length != 0 ? buffered.join('\n') : null, ` + return { start: $start, parse } + } + + ${path} = $buffers + $buffers = [] + + $step = ${$step} + + } + `) + } + // + + // For everything but `Buffer`, generate byte-by-byte parsers. + + // + surround = true + // Obtain a next index from the index array. + const i = `$i[${++$i}]` + // The loop return step is after loop index initialization. + const redo = $step + 2 + // We sometimes have a vivification step to create an object element. + // **TODO** Eliminate vivify step if not used. const source = $(` - case ${init}: + case ${$step++}: ${i} = 0 - `, terminator, -1, ` - - case ${begin}: + case ${$step++}: `, vivify.array(`${path}[${i}]`, field), -1, ` - `, looped, ` + `, map(dispatch,`${path}[${i}]`, field.fields), ` case ${$step++}: ${i}++ - if (${i} == ${field.length}) { - $step = ${$step} + if (${i} != ${field.length}) { + $step = ${redo} continue } - $step = ${redo} - continue - - case ${$step++}: - $_ = (${field.length} - ${i}) * ${field.bits / field.length / 8} - ${field.pad.length} $step = ${$step} @@ -536,6 +754,7 @@ function generate (packet, { require = null }) { $step = ${$step} `) + // Release the array index from the array of indices. $i-- return source } @@ -722,12 +941,14 @@ function generate (packet, { require = null }) { register: '$_', bite: '$bite', starts: '$restart = false', - buffers: '$buffers = []' + buffers: '$buffers = []', + begin: '$begin = 0' } const lets = Object.keys(declarations) .filter(key => variables[key]) .map(key => declarations[key]) + .concat(Object.keys(locals).map(name => `$${name} = ${locals[name]}`)) const restart = variables.starts ? $(` if ($restart) { diff --git a/redux.md b/redux.md index cf4a8e9a..f6bcffb7 100644 --- a/redux.md +++ b/redux.md @@ -1,3 +1,54 @@ +## Thu Jul 9 14:20:56 CDT 2020 + +It is decided that fixed buffers will strip after the buffers are read into +memory in every case. Trying to reuse the terminated code is aesthetically +unpleasing. With it, I could stop when I hit a padding, but it complicates the +code such that the terminated code has to also stop at a fixed point. Fixed +length buffers with padding are for older protocols, specifically `tar` which +has these strange null terminated and fixed strings, that I believe are +sometimes unfixed in later implementations of `tar`. + +Although, upon consideration, maybe reusing the terminated code is not so bad. + +The reasoning going into this diary entry is that the fixed width means some +reasonable size and the data is always there, so you may as well read it all in, +then trim with buffer slice. This would allow for the same slice code in both +the incremental parser and synchronous parser. There ought not to be a protocol +that has an enormous fixed length, but commonly uses it for a handful of bytes. + +However, it occurs to me that the ugliness comes from duplicating the +termination code. My latest foray into this mess is to implement special +handling for buffers. Padded fixed termination is implemented as terminated, but +with checking to see if we've reached the of the width of the field in addition +to checking for the terminator. This was bascially copy and paste from the +terminator implementation. + +Rather than having the padding be a property of a fixed field, why not make the +limited length of the field a property of a terminated field. If the width is +not zero, then we adjust the generated terminator code to stop at a limit. + +Note that we do not want to use this limit to prevent starvation from a client +sending us a terminated field without a terminator. That sort of checking should +be external to the parser. You'll want to add that to the documentation. That is +maxiumum length terminated verusus fixed length padded. We could add a counter +to the API externally counting how many bytes have been fed to a particular +parser. + +So, the documentation and language will call this a fixed length padded, but the +AST calls it terminated and I can corral this mess into a single generator +function. The direction I was going was making helper functions that both +functions call. This was getting so very ugly. Actually routing everything to +the single terminator function would probably be more both more performant, in +theory, cancelling a continuation of the parse, proceding to skip. + +Seems like this should be done during expansion since when we are doing a +synchronous parse we can slice out the full buffer always, or else we'd already +gone incremental, and then slice out anything after the teminator. The existing +implemenation works fine. + +Should note that you should test that you don't overrun by placing a field +afterward that has the terminator character in it. + ## Tue Jul 7 07:52:20 CDT 2020 ~ buffer, streaming `Buffer` based byte arrays might be nice, but then why not do `TypeArray`s as diff --git a/serialize.all.js b/serialize.all.js index 5cc22aa3..fe3c7770 100644 --- a/serialize.all.js +++ b/serialize.all.js @@ -1,5 +1,11 @@ +// Node.js API. +const assert = require('assert') + const map = require('./map') +// Convert numbers and arrays to numbers to literals with hex literals. +const hex = require('./hex') + // Generate integer packing. const pack = require('./pack') @@ -291,16 +297,55 @@ function generate (packet, { require = null, bff }) { } function fixed (path, field) { - $step += 2 - const i = `$i[${++$i}]` const element = field.fields[field.fields.length - 1] - if (element.type == 'buffer' && ! element.concat) { - console.log($(` - for (${i} = 0; i < ${field.length}; i++) { - } - `)) - throw new Error + if (element.type == 'buffer') { + $step += 2 + let source = '' + variables.register = true + // For whole buffers, we slice the buffer out of the underlying + // buffer. + // + // **TODO** Create buffer as `null` in vivified object for bff parse + // because we're allocating memory we might not use if we end up + // failing toward incremental parse. Oh, no, better still, let's + // slice the incoming buffer. If this bothers the user, they can + // make a copy of the buffer on parse using an inline. + if (element.concat) { + source = $(` + $_ = $start + ${path}.copy($buffer, $start) + $start += ${path}.length + $_ += ${path}.length + `) + // If we're gathering the chunks, we push them onto an array of + // chunks. + } else { + const i = `$i[${++$i}]` + source = $(` + $_ = $start + for (${i} = 0; ${i} < ${path}.length; ${i}++) { + ${path}[${i}].copy($buffer, $start) + $start += ${path}[${i}].length + $_ += ${path}[${i}].length + } + `) + $i-- + } + if (field.pad.length == 0) { + return source + } + $step += 2 + const fill = field.pad.length > 1 ? `Buffer.from(${hex(field.pad)})` : hex(field.pad[0]) + return $(` + `, source, ` + + $_ = ${field.length} - $_ + $buffer.fill(${fill}, $start, $start + $_) + $start += $_ + `) } + $step += 2 + const i = `$i[${++$i}]` const looped = map(dispatch, `${path}[${i}]`, field.fields) const pad = field.pad.length == 0 ? null : $(` for (;;) { @@ -538,6 +583,8 @@ function generate (packet, { require = null, bff }) { const requires = required(require) + assert.equal($i, -1) + return $(` serializers.${bff ? 'bff' : 'all'}.${packet.name} = function () { `, requires, -1, ` diff --git a/serialize.inc.js b/serialize.inc.js index b6ef69a4..afb7b4bb 100644 --- a/serialize.inc.js +++ b/serialize.inc.js @@ -1,6 +1,9 @@ // Format source code maintaining indentation. const $ = require('programmatic') +// Convert numbers and arrays to numbers to literals with hex literals. +const hex = require('./hex') + // Generate integer packing. const pack = require('./pack') @@ -33,6 +36,8 @@ function generate (packet, { require = null }) { const variables = declare(packet) + const locals = {} + const accumulate = { accumulator: {}, accumulated: [], @@ -210,52 +215,210 @@ function generate (packet, { require = null }) { } function fixed (path, field) { + // We will be looping. surround = true - $i++ - const init = $step - const again = ++$step - const i = `$i[${$i}]` - const looped = join(field.fields.map(field => dispatch(`${path}[${i}]`, field))) - const done = $step - const pad = join(field.pad.map(bite => { + // Get the element type contained by the array. + const element = field.fields[field.fields.length - 1] + // Generate any buffered function calls to process the buffer if we + // reach the end of the buffer. + const buffered = accumulate.buffered.length != 0 + ? accumulate.buffered.map(buffered => buffered.source).join('\n') + : null + // The byte-by-byte implementation of pad is used by byte-by-byte, of + // course, and buffers when the terminator is multi-byte. + // + // **TODO** Seems like pad should use `fill` in both cases and use as + // `fill` as much as possible when multi-byte, track the offsets, etc. + // Would worry about it more if fixed buffers weren't such a goofball + // case. It would be slice remainder copy, fill, slice remainder copy + // each time. + // + // **TODO** Lengths seem off, array length and not byte length? I've + // added the multiplication, let's see if it breaks. + function pad (i) { + // First step of padding. + const redo = $step + // First step of next field. + const done = $step + field.pad.length + // Assign the padding byte to the buffer, break if we've reached the + // end of the buffer. + const pad = join(field.pad.map(bite => { + return $(` + case ${$step++}: + + if ($start == $end) { + `, buffered, ` + return { start: $start, serialize } + } + + if ($_++ == ${field.bits / 8}) { + $step = ${done} + continue + } + + $buffer[$start++] = ${hex(bite)} + + $step = ${$step} + `) + })) + // Repeat the padding fill if we've not filled the buffer return $(` + `, pad, ` + + if ($_ != ${field.bits / 8}) { + $step = ${redo} + continue + } + `) + } + // + + // If a buffer, use `copy` and `fill`. + + // + if (element.type == 'buffer') { + // If we have an array of buffers, we need a loop index and a + // variable to track the offset in the specific buffer. + let i + if (!element.concat) { + locals['offset'] = 0 + locals['length'] = 0 + i = `$i[${++$i}]` + } + const source = element.concat + // Copy the single buffer using copy. + ? $(` case ${$step++}: - if ($start == $end) { - return { start: $start, serialize } - } + $_ = 0 + + $step = ${$step} + + case ${$step++}: { + + const length = Math.min($end - $start, ${path}.length - $_) + ${path}.copy($buffer, $start, $_, $_ + length) + $start += length + $_ += length + + if ($_ != ${path}.length) { + `, buffered, ` + return { start: $start, serialize } + } + + $step = ${$step} - if (${i}++ == ${field.length}) { - $step = ${done + field.pad.length} - continue } + `) + // Loop through an array of buffers copying to the serialization + // buffer using `Buffer.copy()`. Need to track the index of the + // current buffer in the array the offset in the current buffer. + : $(` + case ${$step++}: - $buffer[$start++] = 0x${bite.toString(16)} + $_ = 0 + $offset = 0 + $length = ${path}.reduce((sum, buffer) => sum + buffer.length, 0) + ${i} = 0 + + $step = ${$step} + + case ${$step++}: { + + for (;;) { + const length = Math.min($end - $start, ${path}[${i}].length - $offset) + ${path}[${i}].copy($buffer, $start, $offset, $offset + length) + $offset += length + $start += length + $_ += length + + if ($offset == ${path}[${i}].length) { + ${i}++ + $offset = 0 + } + + if ($_ == $length) { + break + } + + `, buffered, ` + return { start: $start, serialize } + } $step = ${$step} + + } `) - })) + // If we have an array of buffers, we need to release the allocated + // array index. + if (!element.concat) { + i-- + } + // If there is no padding, we are done. + if (field.pad.length == 0) { + return source + } + // We can use `Buffer.fill()` for single-byte padding. + // TODO Unnecessary `$_` assignment. + if (field.pad.length == 1) { + return $(` + `, source, ` + + case ${$step++}: + + $_ = ${field.length} - $_ + + $step = ${$step} + + case ${$step++}: { + + const length = Math.min($end - $start, $_) + $buffer.fill(${hex(field.pad[0])}, $start, $start + length) + $start += length + $_ -= length + + if ($_ != 0) { + return { start: $start, serialize } + } + + $step = ${$step} + + } + `) + } + // We use bite-by-bite padfor multi-byte padding. + return $(` + `, source, ` + + `, pad(), ` + `) + } + // Obtain a next index from the index array. + const i = `$i[${++$i}]` + // Initialization step. + const init = $step++ + // Start of element fields, loop reset. + const redo = $step + // Put it all together. const source = $(` case ${init}: ${i} = 0 - $step = ${again} + $step = ${redo} - `, looped, ` + `, map(dispatch, `${path}[${i}]`, field.fields), ` if (++${i} != ${path}.length) { - $step = ${again} + $step = ${redo} continue } - $step = ${done} + $_ = ${i} * ${element.bits / 8} - `, pad, ` + $step = ${$step} - if (${i} != ${field.length}) { - $step = ${done} - continue - } + `, pad(), ` `) + // Release the array index from the array of indices. $i-- return source } @@ -522,9 +685,20 @@ function generate (packet, { require = null }) { $restart = true `) : null - const lets = [ '$bite', '$stop', '$_' ].concat( - variables.starts ? [ '$restart = false' ] : [] - ) + const declarations = { + register: '$_', + bite: '$bite', + starts: '$restart = false', + length: '$length = 0' + } + + variables.register = true + variables.bite = true + + const lets = Object.keys(declarations) + .filter(key => variables[key]) + .map(key => declarations[key]) + .concat(Object.keys(locals).map(name => `$${name} = ${locals[name]}`)) return $(` serializers.inc.${packet.name} = function () { diff --git a/test/cycle/cycle.js b/test/cycle/cycle.js index b387dd19..4172cc74 100644 --- a/test/cycle/cycle.js +++ b/test/cycle/cycle.js @@ -109,10 +109,30 @@ module.exports = function (okay, options) { } } + function concat (object) { + if (typeof object == 'object') { + if (Array.isArray(object)) { + for (const element of object) { + concat(element) + } + } else { + for (const key in object) { + if (Array.isArray(object[key]) && Buffer.isBuffer(object[key][0])) { + object[key] = [ Buffer.concat(object[key]) ] + } else { + concat(object[key]) + } + } + } + } + } + try { const object = packet.parsers.all.object(expected, 0) - okay(object, actual, `${name} whole parse`) fast(object) + concat(object) + concat(actual) + okay(object, actual, `${name} whole parse`) } catch (error) { console.log(packet.parsers.all.object.toString()) throw error @@ -185,6 +205,8 @@ module.exports = function (okay, options) { const slice = expected.slice(start, expected.length) ; ({ start, object, parse } = parse(slice, 0, expected.length - start)) } + concat(object) + concat(actual) okay({ start, partial, parse, object }, { start: i == 0 ? expected.length : expected.length - partial, partial: expected.length - i, @@ -270,6 +292,8 @@ module.exports = function (okay, options) { const slice = expected.slice(start, expected.length) ; ({ start, object, parse } = parse(slice, 0, expected.length - start)) } + concat(object) + concat(actual) okay({ start, partial, parse, object }, { start: i == 0 ? expected.length : expected.length - partial, partial: expected.length - i, diff --git a/test/cycle/fixed.t.js b/test/cycle/fixed.t.js index 2772dc7c..0756ac00 100644 --- a/test/cycle/fixed.t.js +++ b/test/cycle/fixed.t.js @@ -3,7 +3,7 @@ require('proof')(0, prove) function prove (okay) { const cycle = require('./cycle') cycle(okay, { - name: 'fixed/words', + name: 'fixed/words/unpadded', define: { object: { array: [ [ 4 ], [ 16 ] ], @@ -16,32 +16,115 @@ function prove (okay) { }] }) cycle(okay, { - name: 'fixed/padded/single', + name: 'fixed/words/single', define: { object: { array: [ [ 8 ], [ 8 ], 0x0 ], - sentry: 8 + sentry: [ [ 8 ], 0x0 ] } }, - objects: [{ array: [ 0xa, 0xb, 0xc, 0xd ], sentry: 0xaa }] + objects: [{ + array: [ 0xa, 0xb, 0xc, 0xd, 0xa, 0xb, 0xc, 0xd ], + sentry: [ 0xaa ] + }, { + array: [ 0xa, 0xb, 0xc, 0xd ], + sentry: [ 0xaa ] + }] }) + // **TODO** What do you with a multi-byte terminator when there is only + // partial room for it to fit? cycle(okay, { - name: 'fixed/padded/multi', + name: 'fixed/words/multi', define: { - object: { array: [ [ 16 ], [ 8 ], 0xd, 0xa ], sentry: 8 } + object: { + array: [ [ 16 ], [ 8 ], 0xd, 0xa ], + sentry: [ [ 8 ], 0x0 ] + } }, - objects: [{ array: Buffer.from('hello, world').toJSON().data, sentry: 0xaa }] + objects: [{ + array: Buffer.from('hello, world').toJSON().data, + sentry: [ 0xaa ] + }, { + array: Buffer.from('hello, world!!!!').toJSON().data, + sentry: [ 0xaa ] + }] }) - return - return cycle(okay, { - name: 'fixed/buffer/concat', + name: 'fixed/concat/unpadded', define: { object: { array: [ [ 8 ], [ Buffer ] ], + sentry: [ [ 8 ], 0x0 ] + } + }, + objects: [{ array: Buffer.from('abcdefgh'), sentry: [ 0xaa ] }] + }) + cycle(okay, { + name: 'fixed/concat/single', + define: { + object: { + array: [ [ 8 ], [ Buffer ], 0x0 ], + sentry: [ [ 8 ], 0x0 ] + } + }, + objects: [{ + array: Buffer.from('abcdefgh'), sentry: [ 0xaa ] + }, { + array: Buffer.from('abcd'), sentry: [ 0xaa ] + }] + }) + cycle(okay, { + name: 'fixed/concat/multiple', + define: { + object: { + array: [ [ 8 ], [ Buffer ], 0xa, 0xb ], + sentry: [ [ 8 ], 0x0 ] + } + }, + objects: [{ + array: Buffer.from('abcdefgh'), sentry: [ 0xaa ] + }, { + array: Buffer.from('abcd'), sentry: [ 0xaa ] + }] + }) + cycle(okay, { + name: 'fixed/chunked/unpadded', + define: { + object: { + array: [ [ 8 ], [[ Buffer ]] ], + sentry: [ [ 8 ], 0x0 ] + } + }, + objects: [{ + array: [ Buffer.from('abcdefgh') ], sentry: [ 0xaa ] + }] + }) + cycle(okay, { + name: 'fixed/chunked/single', + define: { + object: { + array: [ [ 8 ], [[ Buffer ]], 0x0 ], + sentry: [ [ 8 ], 0x0 ] + } + }, + objects: [{ + array: [ Buffer.from('abcd') ], sentry: [ 0xaa ] + }, { + array: [ Buffer.from('abcdefgh') ], sentry: [ 0xaa ] + }] + }) + cycle(okay, { + name: 'fixed/chunked/multiple', + define: { + object: { + array: [ [ 8 ], [[ Buffer ]], 0xd, 0xa ], sentry: 8 } }, - objects: [{ array: Buffer.from('abcdefgh'), sentry: 0xaa }] + objects: [{ + array: [ Buffer.from('abcd') ], sentry: 0xaa + }, { + array: [ Buffer.from('abcdefgh') ], sentry: 0xaa + }] }) } diff --git a/test/generated/accumulator/counted.parser.inc.js b/test/generated/accumulator/counted.parser.inc.js index d5197a3a..a08ce8e4 100644 --- a/test/generated/accumulator/counted.parser.inc.js +++ b/test/generated/accumulator/counted.parser.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ parsers }) { return function (object = {}, $step = 0, $i = [], $accumulator = [], $starts = []) { - let $_, $bite, $restart = false + let $_, $bite, $restart = false, $length = 0 return function parse ($buffer, $start, $end) { if ($restart) { @@ -79,15 +79,13 @@ module.exports = function ({ parsers }) { return { start: $start, parse } } - if ($buffer[$start] != 0x0) { - $step = 7 - parse([ ], 0, 0) + if ($buffer[$start] == 0x0) { + $start++ + $step = 11 continue } - $start++ - $step = 11 - continue + $step = 7 case 7: diff --git a/test/generated/accumulator/counted.serializer.inc.js b/test/generated/accumulator/counted.serializer.inc.js index 38253587..368f58d3 100644 --- a/test/generated/accumulator/counted.serializer.inc.js +++ b/test/generated/accumulator/counted.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = [], $accumulator = {}, $starts = []) { - let $bite, $stop, $_, $restart = false + let $_, $bite, $restart = false return function serialize ($buffer, $start, $end) { if ($restart) { diff --git a/test/generated/accumulator/function.serializer.inc.js b/test/generated/accumulator/function.serializer.inc.js index b93a584f..6fb3ee52 100644 --- a/test/generated/accumulator/function.serializer.inc.js +++ b/test/generated/accumulator/function.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { const assert = require('assert') return function (object, $step = 0, $$ = [], $accumulator = {}) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/accumulator/object.serializer.inc.js b/test/generated/accumulator/object.serializer.inc.js index ad8e5399..8d56f1d7 100644 --- a/test/generated/accumulator/object.serializer.inc.js +++ b/test/generated/accumulator/object.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { const assert = require('assert') return function (object, $step = 0, $$ = [], $accumulator = {}) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/accumulator/regex.serializer.inc.js b/test/generated/accumulator/regex.serializer.inc.js index 506e0744..d39c7420 100644 --- a/test/generated/accumulator/regex.serializer.inc.js +++ b/test/generated/accumulator/regex.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { const assert = require('assert') return function (object, $step = 0, $$ = [], $accumulator = {}) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/array/fixed.serializer.inc.js b/test/generated/array/fixed.serializer.inc.js index e6d8ab36..a5e53aca 100644 --- a/test/generated/array/fixed.serializer.inc.js +++ b/test/generated/array/fixed.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { diff --git a/test/generated/array/nested.serializer.inc.js b/test/generated/array/nested.serializer.inc.js index 5920ea99..121672c4 100644 --- a/test/generated/array/nested.serializer.inc.js +++ b/test/generated/array/nested.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { diff --git a/test/generated/array/variable.serializer.inc.js b/test/generated/array/variable.serializer.inc.js index 259187b7..46fe4b31 100644 --- a/test/generated/array/variable.serializer.inc.js +++ b/test/generated/array/variable.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { diff --git a/test/generated/array/words.serializer.inc.js b/test/generated/array/words.serializer.inc.js index 9e28c64b..8b7717df 100644 --- a/test/generated/array/words.serializer.inc.js +++ b/test/generated/array/words.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { diff --git a/test/generated/assert/dollarunder.serializer.inc.js b/test/generated/assert/dollarunder.serializer.inc.js index 9d32e8d7..2436ee46 100644 --- a/test/generated/assert/dollarunder.serializer.inc.js +++ b/test/generated/assert/dollarunder.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $$ = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/assert/named.serializer.inc.js b/test/generated/assert/named.serializer.inc.js index d227ccd0..89771f6a 100644 --- a/test/generated/assert/named.serializer.inc.js +++ b/test/generated/assert/named.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $$ = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/assert/positional.serializer.inc.js b/test/generated/assert/positional.serializer.inc.js index d80a2fd1..f12dce6f 100644 --- a/test/generated/assert/positional.serializer.inc.js +++ b/test/generated/assert/positional.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $$ = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/conditional/bidirectional.serializer.inc.js b/test/generated/conditional/bidirectional.serializer.inc.js index fe82cae5..c34c04e2 100644 --- a/test/generated/conditional/bidirectional.serializer.inc.js +++ b/test/generated/conditional/bidirectional.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { diff --git a/test/generated/conditional/mysql.serializer.inc.js b/test/generated/conditional/mysql.serializer.inc.js index 508711c7..2487061a 100644 --- a/test/generated/conditional/mysql.serializer.inc.js +++ b/test/generated/conditional/mysql.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { diff --git a/test/generated/conditional/packed.serializer.inc.js b/test/generated/conditional/packed.serializer.inc.js index 32994fd5..4ae5f0f4 100644 --- a/test/generated/conditional/packed.serializer.inc.js +++ b/test/generated/conditional/packed.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/conditional/sipless.serializer.inc.js b/test/generated/conditional/sipless.serializer.inc.js index 05066653..4a6d440e 100644 --- a/test/generated/conditional/sipless.serializer.inc.js +++ b/test/generated/conditional/sipless.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { diff --git a/test/generated/fixed/chunked/multiple.parser.all.js b/test/generated/fixed/chunked/multiple.parser.all.js new file mode 100644 index 00000000..2dda5208 --- /dev/null +++ b/test/generated/fixed/chunked/multiple.parser.all.js @@ -0,0 +1,28 @@ +module.exports = function ({ parsers }) { + parsers.all.object = function () { + + + return function ($buffer, $start) { + let $_, $i = [], $slice = null + + let object = { + array: [], + sentry: 0 + } + + $slice = $buffer.slice($start, 8) + $start += 8 + + $_ = $slice.indexOf(Buffer.from([ 13, 10 ])) + if (~$_) { + $slice = $buffer.slice(0, $_) + } + + object.array.push($slice) + + object.sentry = ($buffer[$start++]) + + return object + } + } () +} diff --git a/test/generated/fixed/chunked/multiple.parser.bff.js b/test/generated/fixed/chunked/multiple.parser.bff.js new file mode 100644 index 00000000..d63799dc --- /dev/null +++ b/test/generated/fixed/chunked/multiple.parser.bff.js @@ -0,0 +1,34 @@ +module.exports = function ({ parsers }) { + parsers.bff.object = function () { + + + return function () { + return function parse ($buffer, $start, $end) { + let $_, $i = [], $slice = null + + let object = { + array: [], + sentry: 0 + } + + if ($end - $start < 9) { + return parsers.inc.object(object, 1, $i)($buffer, $start, $end) + } + + $slice = $buffer.slice($start, 8) + $start += 8 + + $_ = $slice.indexOf(Buffer.from([ 13, 10 ])) + if (~$_) { + $slice = $buffer.slice(0, $_) + } + + object.array.push($slice) + + object.sentry = ($buffer[$start++]) + + return { start: $start, object: object, parse: null } + } + } () + } +} diff --git a/test/generated/fixed/chunked/multiple.parser.inc.js b/test/generated/fixed/chunked/multiple.parser.inc.js new file mode 100644 index 00000000..82a449c1 --- /dev/null +++ b/test/generated/fixed/chunked/multiple.parser.inc.js @@ -0,0 +1,123 @@ +module.exports = function ({ parsers }) { + parsers.inc.object = function () { + + + return function (object = {}, $step = 0, $i = []) { + let $_, $bite, $buffers = [], $length = 0 + + return function parse ($buffer, $start, $end) { + for (;;) { + switch ($step) { + case 0: + + object = { + array: [], + sentry: 0 + } + + $step = 1 + + case 1: + + $_ = 0 + + $step = 2 + + case 2: { + + const $index = $buffer.indexOf(0xd, $start) + if (~$index) { + if ($_ + $index > 8) { + const $length = 8 - $_ + $buffers.push($buffer.slice($start, $start + $length)) + $_ += $length + $start += $length + $step = 4 + continue + } else { + $buffers.push($buffer.slice($start, $index)) + $_ += ($index - $start) + 1 + $start = $index + 1 + $step = 3 + continue + } + } else if ($_ + ($end - $start) >= 8) { + const $length = 8 - $_ + $buffers.push($buffer.slice($start, $start + $length)) + $_ += $length + $start += $length + $step = 4 + continue + } else { + $_ += $end - $start + $buffers.push($buffer.slice($start)) + return { start: $end, parse } + } + + $step = 3 + + } + + + case 3: + + if ($start == $end) { + return { start: $start, parse } + } + + if ($buffer[$start++] != 0xa) { + $buffers.push(Buffer.from([ 13 ].concat($buffer[$start]))) + $step = 2 + continue + } + + $step = 4 + + case 4: + + $_ = 8 - Math.min($buffers.reduce((sum, buffer) => { + return sum + buffer.length + }, 2), 8) + + object.array = $buffers + $buffers = [] + + $step = 5 + + case 5: { + + const length = Math.min($_, $end - $start) + $start += length + $_ -= length + + if ($_ != 0) { + return { start: $start, parse } + } + + $step = 6 + + } + + case 6: + + $step = 7 + + case 7: + + if ($start == $end) { + return { start: $start, object: null, parse } + } + + object.sentry = $buffer[$start++] + + + case 8: + + return { start: $start, object: object, parse: null } + } + break + } + } + } + } () +} diff --git a/test/generated/fixed/chunked/multiple.serializer.all.js b/test/generated/fixed/chunked/multiple.serializer.all.js new file mode 100644 index 00000000..f5e38d5e --- /dev/null +++ b/test/generated/fixed/chunked/multiple.serializer.all.js @@ -0,0 +1,26 @@ +module.exports = function ({ serializers }) { + serializers.all.object = function () { + + + return function (object) { + return function ($buffer, $start, $end) { + let $_, $i = [] + + $_ = $start + for ($i[0] = 0; $i[0] < object.array.length; $i[0]++) { + object.array[$i[0]].copy($buffer, $start) + $start += object.array[$i[0]].length + $_ += object.array[$i[0]].length + } + + $_ = 8 - $_ + $buffer.fill(Buffer.from([ 0xd, 0xa ]), $start, $start + $_) + $start += $_ + + $buffer[$start++] = (object.sentry & 0xff) + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/fixed/chunked/multiple.serializer.bff.js b/test/generated/fixed/chunked/multiple.serializer.bff.js new file mode 100644 index 00000000..f8738973 --- /dev/null +++ b/test/generated/fixed/chunked/multiple.serializer.bff.js @@ -0,0 +1,30 @@ +module.exports = function ({ serializers }) { + serializers.bff.object = function () { + + + return function (object) { + return function ($buffer, $start, $end) { + let $_, $i = [] + + if ($end - $start < 9) { + return serializers.inc.object(object, 0, $i)($buffer, $start, $end) + } + + $_ = $start + for ($i[0] = 0; $i[0] < object.array.length; $i[0]++) { + object.array[$i[0]].copy($buffer, $start) + $start += object.array[$i[0]].length + $_ += object.array[$i[0]].length + } + + $_ = 8 - $_ + $buffer.fill(Buffer.from([ 0xd, 0xa ]), $start, $start + $_) + $start += $_ + + $buffer[$start++] = (object.sentry & 0xff) + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/fixed/chunked/multiple.serializer.inc.js b/test/generated/fixed/chunked/multiple.serializer.inc.js new file mode 100644 index 00000000..9709bd28 --- /dev/null +++ b/test/generated/fixed/chunked/multiple.serializer.inc.js @@ -0,0 +1,112 @@ +module.exports = function ({ serializers }) { + serializers.inc.object = function () { + + + return function (object, $step = 0, $i = []) { + let $_, $bite, $offset = 0, $length = 0 + + return function serialize ($buffer, $start, $end) { + for (;;) { + switch ($step) { + case 0: + + $_ = 0 + $offset = 0 + $length = object.array.reduce((sum, buffer) => sum + buffer.length, 0) + $i[0] = 0 + + $step = 1 + + case 1: { + + for (;;) { + const length = Math.min($end - $start, object.array[$i[0]].length - $offset) + object.array[$i[0]].copy($buffer, $start, $offset, $offset + length) + $offset += length + $start += length + $_ += length + + if ($offset == object.array[$i[0]].length) { + $i[0]++ + $offset = 0 + } + + if ($_ == $length) { + break + } + + return { start: $start, serialize } + } + + $step = 2 + + } + + case 2: + + if ($start == $end) { + return { start: $start, serialize } + } + + if ($_++ == 8) { + $step = 4 + continue + } + + $buffer[$start++] = 0xd + + $step = 3 + + case 3: + + if ($start == $end) { + return { start: $start, serialize } + } + + if ($_++ == 8) { + $step = 4 + continue + } + + $buffer[$start++] = 0xa + + $step = 4 + + if ($_ != 8) { + $step = 2 + continue + } + + case 4: + + $step = 5 + $bite = 0 + $_ = object.sentry + + case 5: + + while ($bite != -1) { + if ($start == $end) { + return { start: $start, serialize } + } + $buffer[$start++] = ($_ >>> $bite * 8 & 0xff) + $bite-- + } + + + $step = 6 + + case 6: + + break + + } + + break + } + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/fixed/padded/single.sizeof.js b/test/generated/fixed/chunked/multiple.sizeof.js similarity index 100% rename from test/generated/fixed/padded/single.sizeof.js rename to test/generated/fixed/chunked/multiple.sizeof.js diff --git a/test/generated/fixed/chunked/single.parser.all.js b/test/generated/fixed/chunked/single.parser.all.js new file mode 100644 index 00000000..f2d800e1 --- /dev/null +++ b/test/generated/fixed/chunked/single.parser.all.js @@ -0,0 +1,40 @@ +module.exports = function ({ parsers }) { + parsers.all.object = function () { + + + return function ($buffer, $start) { + let $_, $i = [], $slice = null + + let object = { + array: [], + sentry: [] + } + + $slice = $buffer.slice($start, 8) + $start += 8 + + $_ = $slice.indexOf(0) + if (~$_) { + $slice = $buffer.slice(0, $_) + } + + object.array.push($slice) + + $i[0] = 0 + for (;;) { + if ( + $buffer[$start] == 0x0 + ) { + $start += 1 + break + } + + object.sentry[$i[0]] = ($buffer[$start++]) + + $i[0]++ + } + + return object + } + } () +} diff --git a/test/generated/fixed/chunked/single.parser.bff.js b/test/generated/fixed/chunked/single.parser.bff.js new file mode 100644 index 00000000..ecd81354 --- /dev/null +++ b/test/generated/fixed/chunked/single.parser.bff.js @@ -0,0 +1,54 @@ +module.exports = function ({ parsers }) { + parsers.bff.object = function () { + + + return function () { + return function parse ($buffer, $start, $end) { + let $_, $i = [], $slice = null + + let object = { + array: [], + sentry: [] + } + + if ($end - $start < 8) { + return parsers.inc.object(object, 1, $i)($buffer, $start, $end) + } + + $slice = $buffer.slice($start, 8) + $start += 8 + + $_ = $slice.indexOf(0) + if (~$_) { + $slice = $buffer.slice(0, $_) + } + + object.array.push($slice) + + $i[0] = 0 + for (;;) { + if ($end - $start < 1) { + return parsers.inc.object(object, 6, $i)($buffer, $start, $end) + } + + if ( + $buffer[$start] == 0x0 + ) { + $start += 1 + break + } + + if ($end - $start < 1) { + return parsers.inc.object(object, 8, $i)($buffer, $start, $end) + } + + object.sentry[$i[0]] = ($buffer[$start++]) + + $i[0]++ + } + + return { start: $start, object: object, parse: null } + } + } () + } +} diff --git a/test/generated/fixed/chunked/single.parser.inc.js b/test/generated/fixed/chunked/single.parser.inc.js new file mode 100644 index 00000000..f31835ab --- /dev/null +++ b/test/generated/fixed/chunked/single.parser.inc.js @@ -0,0 +1,136 @@ +module.exports = function ({ parsers }) { + parsers.inc.object = function () { + + + return function (object = {}, $step = 0, $i = []) { + let $_, $bite, $buffers = [], $length = 0 + + return function parse ($buffer, $start, $end) { + for (;;) { + switch ($step) { + case 0: + + object = { + array: [], + sentry: [] + } + + $step = 1 + + case 1: + + $_ = 0 + + $step = 2 + + case 2: { + + const $index = $buffer.indexOf(0x0, $start) + if (~$index) { + if ($_ + $index > 8) { + const $length = 8 - $_ + $buffers.push($buffer.slice($start, $start + $length)) + $_ += $length + $start += $length + $step = 3 + continue + } else { + $buffers.push($buffer.slice($start, $index)) + $_ += ($index - $start) + 1 + $start = $index + 1 + $step = 3 + continue + } + } else if ($_ + ($end - $start) >= 8) { + const $length = 8 - $_ + $buffers.push($buffer.slice($start, $start + $length)) + $_ += $length + $start += $length + $step = 3 + continue + } else { + $_ += $end - $start + $buffers.push($buffer.slice($start)) + return { start: $end, parse } + } + + $step = 3 + + } + + + case 3: + + $_ = 8 - Math.min($buffers.reduce((sum, buffer) => { + return sum + buffer.length + }, 1), 8) + + object.array = $buffers + $buffers = [] + + $step = 4 + + case 4: { + + const length = Math.min($_, $end - $start) + $start += length + $_ -= length + + if ($_ != 0) { + return { start: $start, parse } + } + + $step = 5 + + } + + case 5: + + $i[0] = 0 + + case 6: + + if ($start == $end) { + return { start: $start, parse } + } + + if ($buffer[$start] == 0x0) { + $start++ + $step = 11 + continue + } + + $step = 7 + + case 7: + + + case 8: + + $step = 9 + + case 9: + + if ($start == $end) { + return { start: $start, object: null, parse } + } + + object.sentry[$i[0]] = $buffer[$start++] + + + case 10: + + $i[0]++ + $step = 6 + continue + + case 11: + + return { start: $start, object: object, parse: null } + } + break + } + } + } + } () +} diff --git a/test/generated/fixed/chunked/single.serializer.all.js b/test/generated/fixed/chunked/single.serializer.all.js new file mode 100644 index 00000000..978bdc97 --- /dev/null +++ b/test/generated/fixed/chunked/single.serializer.all.js @@ -0,0 +1,30 @@ +module.exports = function ({ serializers }) { + serializers.all.object = function () { + + + return function (object) { + return function ($buffer, $start, $end) { + let $_, $i = [] + + $_ = $start + for ($i[0] = 0; $i[0] < object.array.length; $i[0]++) { + object.array[$i[0]].copy($buffer, $start) + $start += object.array[$i[0]].length + $_ += object.array[$i[0]].length + } + + $_ = 8 - $_ + $buffer.fill(0x0, $start, $start + $_) + $start += $_ + + for ($i[0] = 0; $i[0] < object.sentry.length; $i[0]++) { + $buffer[$start++] = (object.sentry[$i[0]] & 0xff) + } + + $buffer[$start++] = 0x0 + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/fixed/chunked/single.serializer.bff.js b/test/generated/fixed/chunked/single.serializer.bff.js new file mode 100644 index 00000000..a849b166 --- /dev/null +++ b/test/generated/fixed/chunked/single.serializer.bff.js @@ -0,0 +1,34 @@ +module.exports = function ({ serializers }) { + serializers.bff.object = function () { + + + return function (object) { + return function ($buffer, $start, $end) { + let $_, $i = [] + + if ($end - $start < 9 + 1 * object.sentry.length) { + return serializers.inc.object(object, 0, $i)($buffer, $start, $end) + } + + $_ = $start + for ($i[0] = 0; $i[0] < object.array.length; $i[0]++) { + object.array[$i[0]].copy($buffer, $start) + $start += object.array[$i[0]].length + $_ += object.array[$i[0]].length + } + + $_ = 8 - $_ + $buffer.fill(0x0, $start, $start + $_) + $start += $_ + + for ($i[0] = 0; $i[0] < object.sentry.length; $i[0]++) { + $buffer[$start++] = (object.sentry[$i[0]] & 0xff) + } + + $buffer[$start++] = 0x0 + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/fixed/chunked/single.serializer.inc.js b/test/generated/fixed/chunked/single.serializer.inc.js new file mode 100644 index 00000000..cf3ab413 --- /dev/null +++ b/test/generated/fixed/chunked/single.serializer.inc.js @@ -0,0 +1,119 @@ +module.exports = function ({ serializers }) { + serializers.inc.object = function () { + + + return function (object, $step = 0, $i = []) { + let $_, $bite, $offset = 0, $length = 0 + + return function serialize ($buffer, $start, $end) { + for (;;) { + switch ($step) { + case 0: + + $_ = 0 + $offset = 0 + $length = object.array.reduce((sum, buffer) => sum + buffer.length, 0) + $i[0] = 0 + + $step = 1 + + case 1: { + + for (;;) { + const length = Math.min($end - $start, object.array[$i[0]].length - $offset) + object.array[$i[0]].copy($buffer, $start, $offset, $offset + length) + $offset += length + $start += length + $_ += length + + if ($offset == object.array[$i[0]].length) { + $i[0]++ + $offset = 0 + } + + if ($_ == $length) { + break + } + + return { start: $start, serialize } + } + + $step = 2 + + } + + case 2: + + $_ = 8 - $_ + + $step = 3 + + case 3: { + + const length = Math.min($end - $start, $_) + $buffer.fill(0x0, $start, $start + length) + $start += length + $_ -= length + + if ($_ != 0) { + return { start: $start, serialize } + } + + $step = 4 + + } + + case 4: + + $i[1] = 0 + $step = 5 + + case 5: + + $step = 6 + $bite = 0 + $_ = object.sentry[$i[1]] + + case 6: + + while ($bite != -1) { + if ($start == $end) { + return { start: $start, serialize } + } + $buffer[$start++] = ($_ >>> $bite * 8 & 0xff) + $bite-- + } + + if (++$i[1] != object.sentry.length) { + $step = 5 + continue + } + + $step = 7 + + case 7: + + if ($start == $end) { + return { start: $start, serialize } + } + + $buffer[$start++] = 0x0 + + $step = 8 + + $step = 8 + + case 8: + + break + + } + + break + } + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/fixed/words.sizeof.js b/test/generated/fixed/chunked/single.sizeof.js similarity index 100% rename from test/generated/fixed/words.sizeof.js rename to test/generated/fixed/chunked/single.sizeof.js diff --git a/test/generated/fixed/padded/single.parser.all.js b/test/generated/fixed/chunked/unpadded.parser.all.js similarity index 63% rename from test/generated/fixed/padded/single.parser.all.js rename to test/generated/fixed/chunked/unpadded.parser.all.js index 2b9144b2..60e10622 100644 --- a/test/generated/fixed/padded/single.parser.all.js +++ b/test/generated/fixed/chunked/unpadded.parser.all.js @@ -3,13 +3,17 @@ module.exports = function ({ parsers }) { return function ($buffer, $start) { - let $i = [] + let $_, $i = [], $slice = null let object = { array: [], - sentry: 0 + sentry: [] } + $slice = $buffer.slice($start, 8) + $start += 8 + object.array.push($slice) + $i[0] = 0 for (;;) { if ( @@ -19,19 +23,11 @@ module.exports = function ({ parsers }) { break } - object.array[$i[0]] = ($buffer[$start++]) - $i[0]++ + object.sentry[$i[0]] = ($buffer[$start++]) - if ($i[0] == 8) { - break - } + $i[0]++ } - - $start += (8 - $i[0]) * 1 - 1 - - object.sentry = ($buffer[$start++]) - return object } } () diff --git a/test/generated/fixed/padded/single.parser.bff.js b/test/generated/fixed/chunked/unpadded.parser.bff.js similarity index 63% rename from test/generated/fixed/padded/single.parser.bff.js rename to test/generated/fixed/chunked/unpadded.parser.bff.js index 79c10592..413b546f 100644 --- a/test/generated/fixed/padded/single.parser.bff.js +++ b/test/generated/fixed/chunked/unpadded.parser.bff.js @@ -4,21 +4,25 @@ module.exports = function ({ parsers }) { return function () { return function parse ($buffer, $start, $end) { - let $i = [] + let $_, $i = [], $slice = null let object = { array: [], - sentry: 0 + sentry: [] } - if ($end - $start < 9) { + if ($end - $start < 8) { return parsers.inc.object(object, 1, $i)($buffer, $start, $end) } + $slice = $buffer.slice($start, 8) + $start += 8 + object.array.push($slice) + $i[0] = 0 for (;;) { if ($end - $start < 1) { - return parsers.inc.object(object, 2, $i)($buffer, $start, $end) + return parsers.inc.object(object, 4, $i)($buffer, $start, $end) } if ( @@ -28,18 +32,14 @@ module.exports = function ({ parsers }) { break } - object.array[$i[0]] = ($buffer[$start++]) - $i[0]++ - - if ($i[0] == 8) { - break + if ($end - $start < 1) { + return parsers.inc.object(object, 6, $i)($buffer, $start, $end) } - } - - $start += (8 - $i[0]) * 1 - 1 + object.sentry[$i[0]] = ($buffer[$start++]) - object.sentry = ($buffer[$start++]) + $i[0]++ + } return { start: $start, object: object, parse: null } } diff --git a/test/generated/fixed/chunked/unpadded.parser.inc.js b/test/generated/fixed/chunked/unpadded.parser.inc.js new file mode 100644 index 00000000..ea0efeb9 --- /dev/null +++ b/test/generated/fixed/chunked/unpadded.parser.inc.js @@ -0,0 +1,93 @@ +module.exports = function ({ parsers }) { + parsers.inc.object = function () { + + + return function (object = {}, $step = 0, $i = []) { + let $_, $bite, $buffers = [], $length = 0 + + return function parse ($buffer, $start, $end) { + for (;;) { + switch ($step) { + case 0: + + object = { + array: [], + sentry: [] + } + + $step = 1 + + case 1: + + $_ = 0 + + $step = 2 + + case 2: { + + const length = Math.min($end - $start, 8 - $_) + $buffers.push($buffer.slice($start, $start + length)) + $start += length + $_ += length + + if ($_ != 8) { + return { start: $start, parse } + } + + object.array = $buffers + $buffers = [] + + $step = 3 + + } + + case 3: + + $i[0] = 0 + + case 4: + + if ($start == $end) { + return { start: $start, parse } + } + + if ($buffer[$start] == 0x0) { + $start++ + $step = 9 + continue + } + + $step = 5 + + case 5: + + + case 6: + + $step = 7 + + case 7: + + if ($start == $end) { + return { start: $start, object: null, parse } + } + + object.sentry[$i[0]] = $buffer[$start++] + + + case 8: + + $i[0]++ + $step = 4 + continue + + case 9: + + return { start: $start, object: object, parse: null } + } + break + } + } + } + } () +} diff --git a/test/generated/fixed/chunked/unpadded.serializer.all.js b/test/generated/fixed/chunked/unpadded.serializer.all.js new file mode 100644 index 00000000..9e622b2c --- /dev/null +++ b/test/generated/fixed/chunked/unpadded.serializer.all.js @@ -0,0 +1,26 @@ +module.exports = function ({ serializers }) { + serializers.all.object = function () { + + + return function (object) { + return function ($buffer, $start, $end) { + let $_, $i = [] + + $_ = $start + for ($i[0] = 0; $i[0] < object.array.length; $i[0]++) { + object.array[$i[0]].copy($buffer, $start) + $start += object.array[$i[0]].length + $_ += object.array[$i[0]].length + } + + for ($i[0] = 0; $i[0] < object.sentry.length; $i[0]++) { + $buffer[$start++] = (object.sentry[$i[0]] & 0xff) + } + + $buffer[$start++] = 0x0 + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/fixed/chunked/unpadded.serializer.bff.js b/test/generated/fixed/chunked/unpadded.serializer.bff.js new file mode 100644 index 00000000..969e96e8 --- /dev/null +++ b/test/generated/fixed/chunked/unpadded.serializer.bff.js @@ -0,0 +1,30 @@ +module.exports = function ({ serializers }) { + serializers.bff.object = function () { + + + return function (object) { + return function ($buffer, $start, $end) { + let $_, $i = [] + + if ($end - $start < 9 + 1 * object.sentry.length) { + return serializers.inc.object(object, 0, $i)($buffer, $start, $end) + } + + $_ = $start + for ($i[0] = 0; $i[0] < object.array.length; $i[0]++) { + object.array[$i[0]].copy($buffer, $start) + $start += object.array[$i[0]].length + $_ += object.array[$i[0]].length + } + + for ($i[0] = 0; $i[0] < object.sentry.length; $i[0]++) { + $buffer[$start++] = (object.sentry[$i[0]] & 0xff) + } + + $buffer[$start++] = 0x0 + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/fixed/chunked/unpadded.serializer.inc.js b/test/generated/fixed/chunked/unpadded.serializer.inc.js new file mode 100644 index 00000000..19c2142c --- /dev/null +++ b/test/generated/fixed/chunked/unpadded.serializer.inc.js @@ -0,0 +1,98 @@ +module.exports = function ({ serializers }) { + serializers.inc.object = function () { + + + return function (object, $step = 0, $i = []) { + let $_, $bite, $offset = 0, $length = 0 + + return function serialize ($buffer, $start, $end) { + for (;;) { + switch ($step) { + case 0: + + $_ = 0 + $offset = 0 + $length = object.array.reduce((sum, buffer) => sum + buffer.length, 0) + $i[0] = 0 + + $step = 1 + + case 1: { + + for (;;) { + const length = Math.min($end - $start, object.array[$i[0]].length - $offset) + object.array[$i[0]].copy($buffer, $start, $offset, $offset + length) + $offset += length + $start += length + $_ += length + + if ($offset == object.array[$i[0]].length) { + $i[0]++ + $offset = 0 + } + + if ($_ == $length) { + break + } + + return { start: $start, serialize } + } + + $step = 2 + + } + + case 2: + + $i[1] = 0 + $step = 3 + + case 3: + + $step = 4 + $bite = 0 + $_ = object.sentry[$i[1]] + + case 4: + + while ($bite != -1) { + if ($start == $end) { + return { start: $start, serialize } + } + $buffer[$start++] = ($_ >>> $bite * 8 & 0xff) + $bite-- + } + + if (++$i[1] != object.sentry.length) { + $step = 3 + continue + } + + $step = 5 + + case 5: + + if ($start == $end) { + return { start: $start, serialize } + } + + $buffer[$start++] = 0x0 + + $step = 6 + + $step = 6 + + case 6: + + break + + } + + break + } + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/fixed/padded/multi.sizeof.js b/test/generated/fixed/chunked/unpadded.sizeof.js similarity index 71% rename from test/generated/fixed/padded/multi.sizeof.js rename to test/generated/fixed/chunked/unpadded.sizeof.js index 33d6dc41..b4c4734d 100644 --- a/test/generated/fixed/padded/multi.sizeof.js +++ b/test/generated/fixed/chunked/unpadded.sizeof.js @@ -5,7 +5,9 @@ module.exports = function ({ sizeOf }) { return function (object) { let $start = 0 - $start += 17 + $start += 8 + + $start += 1 * object.sentry.length + 1 return $start } diff --git a/test/generated/fixed/concat/multiple.parser.all.js b/test/generated/fixed/concat/multiple.parser.all.js new file mode 100644 index 00000000..4e10cc0f --- /dev/null +++ b/test/generated/fixed/concat/multiple.parser.all.js @@ -0,0 +1,40 @@ +module.exports = function ({ parsers }) { + parsers.all.object = function () { + + + return function ($buffer, $start) { + let $_, $i = [], $slice = null + + let object = { + array: Buffer.alloc(8), + sentry: [] + } + + $slice = $buffer.slice($start, 8) + $start += 8 + + $_ = $slice.indexOf(Buffer.from([ 10, 11 ])) + if (~$_) { + $slice = $buffer.slice(0, $_) + } + + object.array = $slice + + $i[0] = 0 + for (;;) { + if ( + $buffer[$start] == 0x0 + ) { + $start += 1 + break + } + + object.sentry[$i[0]] = ($buffer[$start++]) + + $i[0]++ + } + + return object + } + } () +} diff --git a/test/generated/fixed/concat/multiple.parser.bff.js b/test/generated/fixed/concat/multiple.parser.bff.js new file mode 100644 index 00000000..524675c2 --- /dev/null +++ b/test/generated/fixed/concat/multiple.parser.bff.js @@ -0,0 +1,54 @@ +module.exports = function ({ parsers }) { + parsers.bff.object = function () { + + + return function () { + return function parse ($buffer, $start, $end) { + let $_, $i = [], $slice = null + + let object = { + array: Buffer.alloc(8), + sentry: [] + } + + if ($end - $start < 8) { + return parsers.inc.object(object, 1, $i)($buffer, $start, $end) + } + + $slice = $buffer.slice($start, 8) + $start += 8 + + $_ = $slice.indexOf(Buffer.from([ 10, 11 ])) + if (~$_) { + $slice = $buffer.slice(0, $_) + } + + object.array = $slice + + $i[0] = 0 + for (;;) { + if ($end - $start < 1) { + return parsers.inc.object(object, 7, $i)($buffer, $start, $end) + } + + if ( + $buffer[$start] == 0x0 + ) { + $start += 1 + break + } + + if ($end - $start < 1) { + return parsers.inc.object(object, 9, $i)($buffer, $start, $end) + } + + object.sentry[$i[0]] = ($buffer[$start++]) + + $i[0]++ + } + + return { start: $start, object: object, parse: null } + } + } () + } +} diff --git a/test/generated/fixed/concat/multiple.parser.inc.js b/test/generated/fixed/concat/multiple.parser.inc.js new file mode 100644 index 00000000..bebd6968 --- /dev/null +++ b/test/generated/fixed/concat/multiple.parser.inc.js @@ -0,0 +1,150 @@ +module.exports = function ({ parsers }) { + parsers.inc.object = function () { + + + return function (object = {}, $step = 0, $i = []) { + let $_, $bite, $buffers = [], $length = 0 + + return function parse ($buffer, $start, $end) { + for (;;) { + switch ($step) { + case 0: + + object = { + array: Buffer.alloc(8), + sentry: [] + } + + $step = 1 + + case 1: + + $_ = 0 + + $step = 2 + + case 2: { + + const $index = $buffer.indexOf(0xa, $start) + if (~$index) { + if ($_ + $index > 8) { + const $length = 8 - $_ + $buffers.push($buffer.slice($start, $start + $length)) + $_ += $length + $start += $length + $step = 4 + continue + } else { + $buffers.push($buffer.slice($start, $index)) + $_ += ($index - $start) + 1 + $start = $index + 1 + $step = 3 + continue + } + } else if ($_ + ($end - $start) >= 8) { + const $length = 8 - $_ + $buffers.push($buffer.slice($start, $start + $length)) + $_ += $length + $start += $length + $step = 4 + continue + } else { + $_ += $end - $start + $buffers.push($buffer.slice($start)) + return { start: $end, parse } + } + + $step = 3 + + } + + + case 3: + + if ($start == $end) { + return { start: $start, parse } + } + + if ($buffer[$start++] != 0xb) { + $buffers.push(Buffer.from([ 10 ].concat($buffer[$start]))) + $step = 2 + continue + } + + $step = 4 + + case 4: + + $_ = 8 - Math.min($buffers.reduce((sum, buffer) => { + return sum + buffer.length + }, 2), 8) + + object.array = $buffers.length == 1 ? $buffers[0] : Buffer.concat($buffers) + $buffers.length = 0 + + $step = 5 + + case 5: { + + const length = Math.min($_, $end - $start) + $start += length + $_ -= length + + if ($_ != 0) { + return { start: $start, parse } + } + + $step = 6 + + } + + case 6: + + $i[0] = 0 + + case 7: + + if ($start == $end) { + return { start: $start, parse } + } + + if ($buffer[$start] == 0x0) { + $start++ + $step = 12 + continue + } + + $step = 8 + + case 8: + + + case 9: + + $step = 10 + + case 10: + + if ($start == $end) { + return { start: $start, object: null, parse } + } + + object.sentry[$i[0]] = $buffer[$start++] + + + case 11: + + $i[0]++ + $step = 7 + continue + + case 12: + + return { start: $start, object: object, parse: null } + } + break + } + } + } + } () +} diff --git a/test/generated/fixed/concat/multiple.serializer.all.js b/test/generated/fixed/concat/multiple.serializer.all.js new file mode 100644 index 00000000..941b9968 --- /dev/null +++ b/test/generated/fixed/concat/multiple.serializer.all.js @@ -0,0 +1,28 @@ +module.exports = function ({ serializers }) { + serializers.all.object = function () { + + + return function (object) { + return function ($buffer, $start, $end) { + let $_, $i = [] + + $_ = $start + object.array.copy($buffer, $start) + $start += object.array.length + $_ += object.array.length + + $_ = 8 - $_ + $buffer.fill(Buffer.from([ 0xa, 0xb ]), $start, $start + $_) + $start += $_ + + for ($i[0] = 0; $i[0] < object.sentry.length; $i[0]++) { + $buffer[$start++] = (object.sentry[$i[0]] & 0xff) + } + + $buffer[$start++] = 0x0 + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/fixed/concat/multiple.serializer.bff.js b/test/generated/fixed/concat/multiple.serializer.bff.js new file mode 100644 index 00000000..bf1c4d05 --- /dev/null +++ b/test/generated/fixed/concat/multiple.serializer.bff.js @@ -0,0 +1,32 @@ +module.exports = function ({ serializers }) { + serializers.bff.object = function () { + + + return function (object) { + return function ($buffer, $start, $end) { + let $_, $i = [] + + if ($end - $start < 9 + 1 * object.sentry.length) { + return serializers.inc.object(object, 0, $i)($buffer, $start, $end) + } + + $_ = $start + object.array.copy($buffer, $start) + $start += object.array.length + $_ += object.array.length + + $_ = 8 - $_ + $buffer.fill(Buffer.from([ 0xa, 0xb ]), $start, $start + $_) + $start += $_ + + for ($i[0] = 0; $i[0] < object.sentry.length; $i[0]++) { + $buffer[$start++] = (object.sentry[$i[0]] & 0xff) + } + + $buffer[$start++] = 0x0 + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/fixed/concat/multiple.serializer.inc.js b/test/generated/fixed/concat/multiple.serializer.inc.js new file mode 100644 index 00000000..5dd89988 --- /dev/null +++ b/test/generated/fixed/concat/multiple.serializer.inc.js @@ -0,0 +1,120 @@ +module.exports = function ({ serializers }) { + serializers.inc.object = function () { + + + return function (object, $step = 0, $i = []) { + let $_, $bite + + return function serialize ($buffer, $start, $end) { + for (;;) { + switch ($step) { + case 0: + + $_ = 0 + + $step = 1 + + case 1: { + + const length = Math.min($end - $start, object.array.length - $_) + object.array.copy($buffer, $start, $_, $_ + length) + $start += length + $_ += length + + if ($_ != object.array.length) { + return { start: $start, serialize } + } + + $step = 2 + + } + + case 2: + + if ($start == $end) { + return { start: $start, serialize } + } + + if ($_++ == 8) { + $step = 4 + continue + } + + $buffer[$start++] = 0xa + + $step = 3 + + case 3: + + if ($start == $end) { + return { start: $start, serialize } + } + + if ($_++ == 8) { + $step = 4 + continue + } + + $buffer[$start++] = 0xb + + $step = 4 + + if ($_ != 8) { + $step = 2 + continue + } + + case 4: + + $i[0] = 0 + $step = 5 + + case 5: + + $step = 6 + $bite = 0 + $_ = object.sentry[$i[0]] + + case 6: + + while ($bite != -1) { + if ($start == $end) { + return { start: $start, serialize } + } + $buffer[$start++] = ($_ >>> $bite * 8 & 0xff) + $bite-- + } + + if (++$i[0] != object.sentry.length) { + $step = 5 + continue + } + + $step = 7 + + case 7: + + if ($start == $end) { + return { start: $start, serialize } + } + + $buffer[$start++] = 0x0 + + $step = 8 + + $step = 8 + + case 8: + + break + + } + + break + } + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/fixed/concat/multiple.sizeof.js b/test/generated/fixed/concat/multiple.sizeof.js new file mode 100644 index 00000000..b4c4734d --- /dev/null +++ b/test/generated/fixed/concat/multiple.sizeof.js @@ -0,0 +1,15 @@ +module.exports = function ({ sizeOf }) { + sizeOf.object = function () { + + + return function (object) { + let $start = 0 + + $start += 8 + + $start += 1 * object.sentry.length + 1 + + return $start + } + } () +} diff --git a/test/generated/fixed/concat/single.parser.all.js b/test/generated/fixed/concat/single.parser.all.js new file mode 100644 index 00000000..dbb98434 --- /dev/null +++ b/test/generated/fixed/concat/single.parser.all.js @@ -0,0 +1,40 @@ +module.exports = function ({ parsers }) { + parsers.all.object = function () { + + + return function ($buffer, $start) { + let $_, $i = [], $slice = null + + let object = { + array: Buffer.alloc(8), + sentry: [] + } + + $slice = $buffer.slice($start, 8) + $start += 8 + + $_ = $slice.indexOf(0) + if (~$_) { + $slice = $buffer.slice(0, $_) + } + + object.array = $slice + + $i[0] = 0 + for (;;) { + if ( + $buffer[$start] == 0x0 + ) { + $start += 1 + break + } + + object.sentry[$i[0]] = ($buffer[$start++]) + + $i[0]++ + } + + return object + } + } () +} diff --git a/test/generated/fixed/concat/single.parser.bff.js b/test/generated/fixed/concat/single.parser.bff.js new file mode 100644 index 00000000..48e2cdcb --- /dev/null +++ b/test/generated/fixed/concat/single.parser.bff.js @@ -0,0 +1,54 @@ +module.exports = function ({ parsers }) { + parsers.bff.object = function () { + + + return function () { + return function parse ($buffer, $start, $end) { + let $_, $i = [], $slice = null + + let object = { + array: Buffer.alloc(8), + sentry: [] + } + + if ($end - $start < 8) { + return parsers.inc.object(object, 1, $i)($buffer, $start, $end) + } + + $slice = $buffer.slice($start, 8) + $start += 8 + + $_ = $slice.indexOf(0) + if (~$_) { + $slice = $buffer.slice(0, $_) + } + + object.array = $slice + + $i[0] = 0 + for (;;) { + if ($end - $start < 1) { + return parsers.inc.object(object, 6, $i)($buffer, $start, $end) + } + + if ( + $buffer[$start] == 0x0 + ) { + $start += 1 + break + } + + if ($end - $start < 1) { + return parsers.inc.object(object, 8, $i)($buffer, $start, $end) + } + + object.sentry[$i[0]] = ($buffer[$start++]) + + $i[0]++ + } + + return { start: $start, object: object, parse: null } + } + } () + } +} diff --git a/test/generated/fixed/concat/single.parser.inc.js b/test/generated/fixed/concat/single.parser.inc.js new file mode 100644 index 00000000..1fedf0cb --- /dev/null +++ b/test/generated/fixed/concat/single.parser.inc.js @@ -0,0 +1,136 @@ +module.exports = function ({ parsers }) { + parsers.inc.object = function () { + + + return function (object = {}, $step = 0, $i = []) { + let $_, $bite, $buffers = [], $length = 0 + + return function parse ($buffer, $start, $end) { + for (;;) { + switch ($step) { + case 0: + + object = { + array: Buffer.alloc(8), + sentry: [] + } + + $step = 1 + + case 1: + + $_ = 0 + + $step = 2 + + case 2: { + + const $index = $buffer.indexOf(0x0, $start) + if (~$index) { + if ($_ + $index > 8) { + const $length = 8 - $_ + $buffers.push($buffer.slice($start, $start + $length)) + $_ += $length + $start += $length + $step = 3 + continue + } else { + $buffers.push($buffer.slice($start, $index)) + $_ += ($index - $start) + 1 + $start = $index + 1 + $step = 3 + continue + } + } else if ($_ + ($end - $start) >= 8) { + const $length = 8 - $_ + $buffers.push($buffer.slice($start, $start + $length)) + $_ += $length + $start += $length + $step = 3 + continue + } else { + $_ += $end - $start + $buffers.push($buffer.slice($start)) + return { start: $end, parse } + } + + $step = 3 + + } + + + case 3: + + $_ = 8 - Math.min($buffers.reduce((sum, buffer) => { + return sum + buffer.length + }, 1), 8) + + object.array = $buffers.length == 1 ? $buffers[0] : Buffer.concat($buffers) + $buffers.length = 0 + + $step = 4 + + case 4: { + + const length = Math.min($_, $end - $start) + $start += length + $_ -= length + + if ($_ != 0) { + return { start: $start, parse } + } + + $step = 5 + + } + + case 5: + + $i[0] = 0 + + case 6: + + if ($start == $end) { + return { start: $start, parse } + } + + if ($buffer[$start] == 0x0) { + $start++ + $step = 11 + continue + } + + $step = 7 + + case 7: + + + case 8: + + $step = 9 + + case 9: + + if ($start == $end) { + return { start: $start, object: null, parse } + } + + object.sentry[$i[0]] = $buffer[$start++] + + + case 10: + + $i[0]++ + $step = 6 + continue + + case 11: + + return { start: $start, object: object, parse: null } + } + break + } + } + } + } () +} diff --git a/test/generated/fixed/concat/single.serializer.all.js b/test/generated/fixed/concat/single.serializer.all.js new file mode 100644 index 00000000..3126bc2b --- /dev/null +++ b/test/generated/fixed/concat/single.serializer.all.js @@ -0,0 +1,28 @@ +module.exports = function ({ serializers }) { + serializers.all.object = function () { + + + return function (object) { + return function ($buffer, $start, $end) { + let $_, $i = [] + + $_ = $start + object.array.copy($buffer, $start) + $start += object.array.length + $_ += object.array.length + + $_ = 8 - $_ + $buffer.fill(0x0, $start, $start + $_) + $start += $_ + + for ($i[0] = 0; $i[0] < object.sentry.length; $i[0]++) { + $buffer[$start++] = (object.sentry[$i[0]] & 0xff) + } + + $buffer[$start++] = 0x0 + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/fixed/concat/single.serializer.bff.js b/test/generated/fixed/concat/single.serializer.bff.js new file mode 100644 index 00000000..8207283c --- /dev/null +++ b/test/generated/fixed/concat/single.serializer.bff.js @@ -0,0 +1,32 @@ +module.exports = function ({ serializers }) { + serializers.bff.object = function () { + + + return function (object) { + return function ($buffer, $start, $end) { + let $_, $i = [] + + if ($end - $start < 9 + 1 * object.sentry.length) { + return serializers.inc.object(object, 0, $i)($buffer, $start, $end) + } + + $_ = $start + object.array.copy($buffer, $start) + $start += object.array.length + $_ += object.array.length + + $_ = 8 - $_ + $buffer.fill(0x0, $start, $start + $_) + $start += $_ + + for ($i[0] = 0; $i[0] < object.sentry.length; $i[0]++) { + $buffer[$start++] = (object.sentry[$i[0]] & 0xff) + } + + $buffer[$start++] = 0x0 + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/fixed/concat/single.serializer.inc.js b/test/generated/fixed/concat/single.serializer.inc.js new file mode 100644 index 00000000..1710d55a --- /dev/null +++ b/test/generated/fixed/concat/single.serializer.inc.js @@ -0,0 +1,106 @@ +module.exports = function ({ serializers }) { + serializers.inc.object = function () { + + + return function (object, $step = 0, $i = []) { + let $_, $bite + + return function serialize ($buffer, $start, $end) { + for (;;) { + switch ($step) { + case 0: + + $_ = 0 + + $step = 1 + + case 1: { + + const length = Math.min($end - $start, object.array.length - $_) + object.array.copy($buffer, $start, $_, $_ + length) + $start += length + $_ += length + + if ($_ != object.array.length) { + return { start: $start, serialize } + } + + $step = 2 + + } + + case 2: + + $_ = 8 - $_ + + $step = 3 + + case 3: { + + const length = Math.min($end - $start, $_) + $buffer.fill(0x0, $start, $start + length) + $start += length + $_ -= length + + if ($_ != 0) { + return { start: $start, serialize } + } + + $step = 4 + + } + + case 4: + + $i[0] = 0 + $step = 5 + + case 5: + + $step = 6 + $bite = 0 + $_ = object.sentry[$i[0]] + + case 6: + + while ($bite != -1) { + if ($start == $end) { + return { start: $start, serialize } + } + $buffer[$start++] = ($_ >>> $bite * 8 & 0xff) + $bite-- + } + + if (++$i[0] != object.sentry.length) { + $step = 5 + continue + } + + $step = 7 + + case 7: + + if ($start == $end) { + return { start: $start, serialize } + } + + $buffer[$start++] = 0x0 + + $step = 8 + + $step = 8 + + case 8: + + break + + } + + break + } + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/fixed/concat/single.sizeof.js b/test/generated/fixed/concat/single.sizeof.js new file mode 100644 index 00000000..b4c4734d --- /dev/null +++ b/test/generated/fixed/concat/single.sizeof.js @@ -0,0 +1,15 @@ +module.exports = function ({ sizeOf }) { + sizeOf.object = function () { + + + return function (object) { + let $start = 0 + + $start += 8 + + $start += 1 * object.sentry.length + 1 + + return $start + } + } () +} diff --git a/test/generated/fixed/concat/unpadded.parser.all.js b/test/generated/fixed/concat/unpadded.parser.all.js new file mode 100644 index 00000000..00afbcc5 --- /dev/null +++ b/test/generated/fixed/concat/unpadded.parser.all.js @@ -0,0 +1,34 @@ +module.exports = function ({ parsers }) { + parsers.all.object = function () { + + + return function ($buffer, $start) { + let $_, $i = [], $slice = null + + let object = { + array: Buffer.alloc(8), + sentry: [] + } + + $slice = $buffer.slice($start, 8) + $start += 8 + object.array = $slice + + $i[0] = 0 + for (;;) { + if ( + $buffer[$start] == 0x0 + ) { + $start += 1 + break + } + + object.sentry[$i[0]] = ($buffer[$start++]) + + $i[0]++ + } + + return object + } + } () +} diff --git a/test/generated/fixed/concat/unpadded.parser.bff.js b/test/generated/fixed/concat/unpadded.parser.bff.js new file mode 100644 index 00000000..4c4fed43 --- /dev/null +++ b/test/generated/fixed/concat/unpadded.parser.bff.js @@ -0,0 +1,48 @@ +module.exports = function ({ parsers }) { + parsers.bff.object = function () { + + + return function () { + return function parse ($buffer, $start, $end) { + let $_, $i = [], $slice = null + + let object = { + array: Buffer.alloc(8), + sentry: [] + } + + if ($end - $start < 8) { + return parsers.inc.object(object, 1, $i)($buffer, $start, $end) + } + + $slice = $buffer.slice($start, 8) + $start += 8 + object.array = $slice + + $i[0] = 0 + for (;;) { + if ($end - $start < 1) { + return parsers.inc.object(object, 4, $i)($buffer, $start, $end) + } + + if ( + $buffer[$start] == 0x0 + ) { + $start += 1 + break + } + + if ($end - $start < 1) { + return parsers.inc.object(object, 6, $i)($buffer, $start, $end) + } + + object.sentry[$i[0]] = ($buffer[$start++]) + + $i[0]++ + } + + return { start: $start, object: object, parse: null } + } + } () + } +} diff --git a/test/generated/fixed/concat/unpadded.parser.inc.js b/test/generated/fixed/concat/unpadded.parser.inc.js new file mode 100644 index 00000000..1e370311 --- /dev/null +++ b/test/generated/fixed/concat/unpadded.parser.inc.js @@ -0,0 +1,90 @@ +module.exports = function ({ parsers }) { + parsers.inc.object = function () { + + + return function (object = {}, $step = 0, $i = []) { + let $_, $bite, $buffers = [], $length = 0 + + return function parse ($buffer, $start, $end) { + for (;;) { + switch ($step) { + case 0: + + object = { + array: Buffer.alloc(8), + sentry: [] + } + + $step = 1 + + case 1: + + $_ = 0 + + $step = 2 + + case 2: { + + const length = Math.min($end - $start, 8 - $_) + $buffer.copy(object.array, $_, $start, $start + length) + $start += length + $_ += length + + if ($_ != 8) { + return { start: $start, parse } + } + + $step = 3 + + } + + case 3: + + $i[0] = 0 + + case 4: + + if ($start == $end) { + return { start: $start, parse } + } + + if ($buffer[$start] == 0x0) { + $start++ + $step = 9 + continue + } + + $step = 5 + + case 5: + + + case 6: + + $step = 7 + + case 7: + + if ($start == $end) { + return { start: $start, object: null, parse } + } + + object.sentry[$i[0]] = $buffer[$start++] + + + case 8: + + $i[0]++ + $step = 4 + continue + + case 9: + + return { start: $start, object: object, parse: null } + } + break + } + } + } + } () +} diff --git a/test/generated/fixed/concat/unpadded.serializer.all.js b/test/generated/fixed/concat/unpadded.serializer.all.js new file mode 100644 index 00000000..ddffcb64 --- /dev/null +++ b/test/generated/fixed/concat/unpadded.serializer.all.js @@ -0,0 +1,24 @@ +module.exports = function ({ serializers }) { + serializers.all.object = function () { + + + return function (object) { + return function ($buffer, $start, $end) { + let $_, $i = [] + + $_ = $start + object.array.copy($buffer, $start) + $start += object.array.length + $_ += object.array.length + + for ($i[0] = 0; $i[0] < object.sentry.length; $i[0]++) { + $buffer[$start++] = (object.sentry[$i[0]] & 0xff) + } + + $buffer[$start++] = 0x0 + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/fixed/concat/unpadded.serializer.bff.js b/test/generated/fixed/concat/unpadded.serializer.bff.js new file mode 100644 index 00000000..3e22e35d --- /dev/null +++ b/test/generated/fixed/concat/unpadded.serializer.bff.js @@ -0,0 +1,28 @@ +module.exports = function ({ serializers }) { + serializers.bff.object = function () { + + + return function (object) { + return function ($buffer, $start, $end) { + let $_, $i = [] + + if ($end - $start < 9 + 1 * object.sentry.length) { + return serializers.inc.object(object, 0, $i)($buffer, $start, $end) + } + + $_ = $start + object.array.copy($buffer, $start) + $start += object.array.length + $_ += object.array.length + + for ($i[0] = 0; $i[0] < object.sentry.length; $i[0]++) { + $buffer[$start++] = (object.sentry[$i[0]] & 0xff) + } + + $buffer[$start++] = 0x0 + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/fixed/concat/unpadded.serializer.inc.js b/test/generated/fixed/concat/unpadded.serializer.inc.js new file mode 100644 index 00000000..1d6a472d --- /dev/null +++ b/test/generated/fixed/concat/unpadded.serializer.inc.js @@ -0,0 +1,85 @@ +module.exports = function ({ serializers }) { + serializers.inc.object = function () { + + + return function (object, $step = 0, $i = []) { + let $_, $bite + + return function serialize ($buffer, $start, $end) { + for (;;) { + switch ($step) { + case 0: + + $_ = 0 + + $step = 1 + + case 1: { + + const length = Math.min($end - $start, object.array.length - $_) + object.array.copy($buffer, $start, $_, $_ + length) + $start += length + $_ += length + + if ($_ != object.array.length) { + return { start: $start, serialize } + } + + $step = 2 + + } + + case 2: + + $i[0] = 0 + $step = 3 + + case 3: + + $step = 4 + $bite = 0 + $_ = object.sentry[$i[0]] + + case 4: + + while ($bite != -1) { + if ($start == $end) { + return { start: $start, serialize } + } + $buffer[$start++] = ($_ >>> $bite * 8 & 0xff) + $bite-- + } + + if (++$i[0] != object.sentry.length) { + $step = 3 + continue + } + + $step = 5 + + case 5: + + if ($start == $end) { + return { start: $start, serialize } + } + + $buffer[$start++] = 0x0 + + $step = 6 + + $step = 6 + + case 6: + + break + + } + + break + } + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/fixed/concat/unpadded.sizeof.js b/test/generated/fixed/concat/unpadded.sizeof.js new file mode 100644 index 00000000..b4c4734d --- /dev/null +++ b/test/generated/fixed/concat/unpadded.sizeof.js @@ -0,0 +1,15 @@ +module.exports = function ({ sizeOf }) { + sizeOf.object = function () { + + + return function (object) { + let $start = 0 + + $start += 8 + + $start += 1 * object.sentry.length + 1 + + return $start + } + } () +} diff --git a/test/generated/fixed/padded/multi.parser.all.js b/test/generated/fixed/words/multi.parser.all.js similarity index 61% rename from test/generated/fixed/padded/multi.parser.all.js rename to test/generated/fixed/words/multi.parser.all.js index 2a2ace21..a9b9030e 100644 --- a/test/generated/fixed/padded/multi.parser.all.js +++ b/test/generated/fixed/words/multi.parser.all.js @@ -7,7 +7,7 @@ module.exports = function ({ parsers }) { let object = { array: [], - sentry: 0 + sentry: [] } $i[0] = 0 @@ -29,9 +29,23 @@ module.exports = function ({ parsers }) { } - $start += (16 - $i[0]) * 1 - 2 + $start += 16 != $i[0] + ? (16 - $i[0]) * 1 - 2 + : 0 - object.sentry = ($buffer[$start++]) + $i[0] = 0 + for (;;) { + if ( + $buffer[$start] == 0x0 + ) { + $start += 1 + break + } + + object.sentry[$i[0]] = ($buffer[$start++]) + + $i[0]++ + } return object } diff --git a/test/generated/fixed/padded/multi.parser.bff.js b/test/generated/fixed/words/multi.parser.bff.js similarity index 57% rename from test/generated/fixed/padded/multi.parser.bff.js rename to test/generated/fixed/words/multi.parser.bff.js index f1e3a998..d8413bd2 100644 --- a/test/generated/fixed/padded/multi.parser.bff.js +++ b/test/generated/fixed/words/multi.parser.bff.js @@ -8,10 +8,10 @@ module.exports = function ({ parsers }) { let object = { array: [], - sentry: 0 + sentry: [] } - if ($end - $start < 17) { + if ($end - $start < 16) { return parsers.inc.object(object, 1, $i)($buffer, $start, $end) } @@ -38,9 +38,31 @@ module.exports = function ({ parsers }) { } - $start += (16 - $i[0]) * 1 - 2 + $start += 16 != $i[0] + ? (16 - $i[0]) * 1 - 2 + : 0 - object.sentry = ($buffer[$start++]) + $i[0] = 0 + for (;;) { + if ($end - $start < 1) { + return parsers.inc.object(object, 11, $i)($buffer, $start, $end) + } + + if ( + $buffer[$start] == 0x0 + ) { + $start += 1 + break + } + + if ($end - $start < 1) { + return parsers.inc.object(object, 13, $i)($buffer, $start, $end) + } + + object.sentry[$i[0]] = ($buffer[$start++]) + + $i[0]++ + } return { start: $start, object: object, parse: null } } diff --git a/test/generated/fixed/padded/multi.parser.inc.js b/test/generated/fixed/words/multi.parser.inc.js similarity index 69% rename from test/generated/fixed/padded/multi.parser.inc.js rename to test/generated/fixed/words/multi.parser.inc.js index 9cccdc37..863d8fb2 100644 --- a/test/generated/fixed/padded/multi.parser.inc.js +++ b/test/generated/fixed/words/multi.parser.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ parsers }) { return function (object = {}, $step = 0, $i = []) { - let $_, $bite + let $_, $bite, $length = 0 return function parse ($buffer, $start, $end) { for (;;) { @@ -12,7 +12,7 @@ module.exports = function ({ parsers }) { object = { array: [], - sentry: 0 + sentry: [] } $step = 1 @@ -23,6 +23,11 @@ module.exports = function ({ parsers }) { case 2: + if ($i[0] == 16) { + $step = 8 + continue + } + if ($start == $end) { return { start: $start, parse } } @@ -53,6 +58,7 @@ module.exports = function ({ parsers }) { case 4: + case 5: $step = 6 @@ -69,47 +75,73 @@ module.exports = function ({ parsers }) { case 7: $i[0]++ - - if ($i[0] == 16) { - $step = 8 - continue - } - $step = 2 continue case 8: - $_ = (16 - $i[0]) * 1 - 2 + $_ = 16 != $i[0] + ? (16 - $i[0]) * 1 - 2 + : 0 + $step = 9 - case 9: + case 9: { - $bite = Math.min($end - $start, $_) - $_ -= $bite - $start += $bite + const length = Math.min($_, $end - $start) + $start += length + $_ -= length if ($_ != 0) { - return { start: $start, object: null, parse } + return { start: $start, parse } } $step = 10 + } + case 10: - $step = 11 + $i[0] = 0 case 11: if ($start == $end) { - return { start: $start, object: null, parse } + return { start: $start, parse } } - object.sentry = $buffer[$start++] + if ($buffer[$start] == 0x0) { + $start++ + $step = 16 + continue + } + $step = 12 case 12: + + case 13: + + $step = 14 + + case 14: + + if ($start == $end) { + return { start: $start, object: null, parse } + } + + object.sentry[$i[0]] = $buffer[$start++] + + + case 15: + + $i[0]++ + $step = 11 + continue + + case 16: + return { start: $start, object: object, parse: null } } break diff --git a/test/generated/fixed/padded/multi.serializer.all.js b/test/generated/fixed/words/multi.serializer.all.js similarity index 80% rename from test/generated/fixed/padded/multi.serializer.all.js rename to test/generated/fixed/words/multi.serializer.all.js index db88ce81..c9595d54 100644 --- a/test/generated/fixed/padded/multi.serializer.all.js +++ b/test/generated/fixed/words/multi.serializer.all.js @@ -24,7 +24,11 @@ module.exports = function ({ serializers }) { $i[0]++ } - $buffer[$start++] = (object.sentry & 0xff) + for ($i[0] = 0; $i[0] < object.sentry.length; $i[0]++) { + $buffer[$start++] = (object.sentry[$i[0]] & 0xff) + } + + $buffer[$start++] = 0x0 return { start: $start, serialize: null } } diff --git a/test/generated/fixed/padded/multi.serializer.bff.js b/test/generated/fixed/words/multi.serializer.bff.js similarity index 77% rename from test/generated/fixed/padded/multi.serializer.bff.js rename to test/generated/fixed/words/multi.serializer.bff.js index 25516843..1b305c42 100644 --- a/test/generated/fixed/padded/multi.serializer.bff.js +++ b/test/generated/fixed/words/multi.serializer.bff.js @@ -6,7 +6,7 @@ module.exports = function ({ serializers }) { return function ($buffer, $start, $end) { let $i = [] - if ($end - $start < 17) { + if ($end - $start < 17 + 1 * object.sentry.length) { return serializers.inc.object(object, 0, $i)($buffer, $start, $end) } @@ -28,7 +28,11 @@ module.exports = function ({ serializers }) { $i[0]++ } - $buffer[$start++] = (object.sentry & 0xff) + for ($i[0] = 0; $i[0] < object.sentry.length; $i[0]++) { + $buffer[$start++] = (object.sentry[$i[0]] & 0xff) + } + + $buffer[$start++] = 0x0 return { start: $start, serialize: null } } diff --git a/test/generated/fixed/padded/multi.serializer.inc.js b/test/generated/fixed/words/multi.serializer.inc.js similarity index 76% rename from test/generated/fixed/padded/multi.serializer.inc.js rename to test/generated/fixed/words/multi.serializer.inc.js index e3e3c202..c2aac3e7 100644 --- a/test/generated/fixed/padded/multi.serializer.inc.js +++ b/test/generated/fixed/words/multi.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { @@ -34,6 +34,8 @@ module.exports = function ({ serializers }) { continue } + $_ = $i[0] * 1 + $step = 3 case 3: @@ -42,7 +44,7 @@ module.exports = function ({ serializers }) { return { start: $start, serialize } } - if ($i[0]++ == 16) { + if ($_++ == 16) { $step = 5 continue } @@ -57,7 +59,7 @@ module.exports = function ({ serializers }) { return { start: $start, serialize } } - if ($i[0]++ == 16) { + if ($_++ == 16) { $step = 5 continue } @@ -66,19 +68,24 @@ module.exports = function ({ serializers }) { $step = 5 - if ($i[0] != 16) { - $step = 3 - continue - } + if ($_ != 16) { + $step = 3 + continue + } case 5: + $i[0] = 0 $step = 6 - $bite = 0 - $_ = object.sentry case 6: + $step = 7 + $bite = 0 + $_ = object.sentry[$i[0]] + + case 7: + while ($bite != -1) { if ($start == $end) { return { start: $start, serialize } @@ -87,10 +94,26 @@ module.exports = function ({ serializers }) { $bite-- } + if (++$i[0] != object.sentry.length) { + $step = 6 + continue + } - $step = 7 + $step = 8 - case 7: + case 8: + + if ($start == $end) { + return { start: $start, serialize } + } + + $buffer[$start++] = 0x0 + + $step = 9 + + $step = 9 + + case 9: break diff --git a/test/generated/fixed/words/multi.sizeof.js b/test/generated/fixed/words/multi.sizeof.js new file mode 100644 index 00000000..53fa2bbe --- /dev/null +++ b/test/generated/fixed/words/multi.sizeof.js @@ -0,0 +1,15 @@ +module.exports = function ({ sizeOf }) { + sizeOf.object = function () { + + + return function (object) { + let $start = 0 + + $start += 16 + + $start += 1 * object.sentry.length + 1 + + return $start + } + } () +} diff --git a/test/generated/fixed/words/single.parser.all.js b/test/generated/fixed/words/single.parser.all.js new file mode 100644 index 00000000..22cdbf47 --- /dev/null +++ b/test/generated/fixed/words/single.parser.all.js @@ -0,0 +1,52 @@ +module.exports = function ({ parsers }) { + parsers.all.object = function () { + + + return function ($buffer, $start) { + let $i = [] + + let object = { + array: [], + sentry: [] + } + + $i[0] = 0 + for (;;) { + if ( + $buffer[$start] == 0x0 + ) { + $start += 1 + break + } + + object.array[$i[0]] = ($buffer[$start++]) + $i[0]++ + + if ($i[0] == 8) { + break + } + } + + + $start += 8 != $i[0] + ? (8 - $i[0]) * 1 - 1 + : 0 + + $i[0] = 0 + for (;;) { + if ( + $buffer[$start] == 0x0 + ) { + $start += 1 + break + } + + object.sentry[$i[0]] = ($buffer[$start++]) + + $i[0]++ + } + + return object + } + } () +} diff --git a/test/generated/fixed/words/single.parser.bff.js b/test/generated/fixed/words/single.parser.bff.js new file mode 100644 index 00000000..1116e325 --- /dev/null +++ b/test/generated/fixed/words/single.parser.bff.js @@ -0,0 +1,70 @@ +module.exports = function ({ parsers }) { + parsers.bff.object = function () { + + + return function () { + return function parse ($buffer, $start, $end) { + let $i = [] + + let object = { + array: [], + sentry: [] + } + + if ($end - $start < 8) { + return parsers.inc.object(object, 1, $i)($buffer, $start, $end) + } + + $i[0] = 0 + for (;;) { + if ($end - $start < 1) { + return parsers.inc.object(object, 2, $i)($buffer, $start, $end) + } + + if ( + $buffer[$start] == 0x0 + ) { + $start += 1 + break + } + + object.array[$i[0]] = ($buffer[$start++]) + $i[0]++ + + if ($i[0] == 8) { + break + } + } + + + $start += 8 != $i[0] + ? (8 - $i[0]) * 1 - 1 + : 0 + + $i[0] = 0 + for (;;) { + if ($end - $start < 1) { + return parsers.inc.object(object, 10, $i)($buffer, $start, $end) + } + + if ( + $buffer[$start] == 0x0 + ) { + $start += 1 + break + } + + if ($end - $start < 1) { + return parsers.inc.object(object, 12, $i)($buffer, $start, $end) + } + + object.sentry[$i[0]] = ($buffer[$start++]) + + $i[0]++ + } + + return { start: $start, object: object, parse: null } + } + } () + } +} diff --git a/test/generated/fixed/padded/single.parser.inc.js b/test/generated/fixed/words/single.parser.inc.js similarity index 63% rename from test/generated/fixed/padded/single.parser.inc.js rename to test/generated/fixed/words/single.parser.inc.js index d79f00e4..7a333e2b 100644 --- a/test/generated/fixed/padded/single.parser.inc.js +++ b/test/generated/fixed/words/single.parser.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ parsers }) { return function (object = {}, $step = 0, $i = []) { - let $_, $bite + let $_, $bite, $length = 0 return function parse ($buffer, $start, $end) { for (;;) { @@ -12,7 +12,7 @@ module.exports = function ({ parsers }) { object = { array: [], - sentry: 0 + sentry: [] } $step = 1 @@ -23,6 +23,11 @@ module.exports = function ({ parsers }) { case 2: + if ($i[0] == 8) { + $step = 7 + continue + } + if ($start == $end) { return { start: $start, parse } } @@ -33,10 +38,11 @@ module.exports = function ({ parsers }) { continue } - $step = 6 + $step = 3 case 3: + case 4: $step = 5 @@ -53,47 +59,73 @@ module.exports = function ({ parsers }) { case 6: $i[0]++ - - if ($i[0] == 8) { - $step = 7 - continue - } - $step = 2 continue case 7: - $_ = (8 - $i[0]) * 1 - 1 + $_ = 8 != $i[0] + ? (8 - $i[0]) * 1 - 1 + : 0 + $step = 8 - case 8: + case 8: { - $bite = Math.min($end - $start, $_) - $_ -= $bite - $start += $bite + const length = Math.min($_, $end - $start) + $start += length + $_ -= length if ($_ != 0) { - return { start: $start, object: null, parse } + return { start: $start, parse } } $step = 9 + } + case 9: - $step = 10 + $i[0] = 0 case 10: if ($start == $end) { - return { start: $start, object: null, parse } + return { start: $start, parse } } - object.sentry = $buffer[$start++] + if ($buffer[$start] == 0x0) { + $start++ + $step = 15 + continue + } + $step = 11 case 11: + + case 12: + + $step = 13 + + case 13: + + if ($start == $end) { + return { start: $start, object: null, parse } + } + + object.sentry[$i[0]] = $buffer[$start++] + + + case 14: + + $i[0]++ + $step = 10 + continue + + case 15: + return { start: $start, object: object, parse: null } } break diff --git a/test/generated/fixed/padded/single.serializer.all.js b/test/generated/fixed/words/single.serializer.all.js similarity index 76% rename from test/generated/fixed/padded/single.serializer.all.js rename to test/generated/fixed/words/single.serializer.all.js index e2af1215..18a766dc 100644 --- a/test/generated/fixed/padded/single.serializer.all.js +++ b/test/generated/fixed/words/single.serializer.all.js @@ -18,7 +18,11 @@ module.exports = function ({ serializers }) { $i[0]++ } - $buffer[$start++] = (object.sentry & 0xff) + for ($i[0] = 0; $i[0] < object.sentry.length; $i[0]++) { + $buffer[$start++] = (object.sentry[$i[0]] & 0xff) + } + + $buffer[$start++] = 0x0 return { start: $start, serialize: null } } diff --git a/test/generated/fixed/padded/single.serializer.bff.js b/test/generated/fixed/words/single.serializer.bff.js similarity index 74% rename from test/generated/fixed/padded/single.serializer.bff.js rename to test/generated/fixed/words/single.serializer.bff.js index b6ccc7cd..53fa54d3 100644 --- a/test/generated/fixed/padded/single.serializer.bff.js +++ b/test/generated/fixed/words/single.serializer.bff.js @@ -6,7 +6,7 @@ module.exports = function ({ serializers }) { return function ($buffer, $start, $end) { let $i = [] - if ($end - $start < 9) { + if ($end - $start < 9 + 1 * object.sentry.length) { return serializers.inc.object(object, 0, $i)($buffer, $start, $end) } @@ -22,7 +22,11 @@ module.exports = function ({ serializers }) { $i[0]++ } - $buffer[$start++] = (object.sentry & 0xff) + for ($i[0] = 0; $i[0] < object.sentry.length; $i[0]++) { + $buffer[$start++] = (object.sentry[$i[0]] & 0xff) + } + + $buffer[$start++] = 0x0 return { start: $start, serialize: null } } diff --git a/test/generated/fixed/padded/single.serializer.inc.js b/test/generated/fixed/words/single.serializer.inc.js similarity index 74% rename from test/generated/fixed/padded/single.serializer.inc.js rename to test/generated/fixed/words/single.serializer.inc.js index a4d46171..5892de9d 100644 --- a/test/generated/fixed/padded/single.serializer.inc.js +++ b/test/generated/fixed/words/single.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { @@ -34,6 +34,8 @@ module.exports = function ({ serializers }) { continue } + $_ = $i[0] * 1 + $step = 3 case 3: @@ -42,7 +44,7 @@ module.exports = function ({ serializers }) { return { start: $start, serialize } } - if ($i[0]++ == 8) { + if ($_++ == 8) { $step = 4 continue } @@ -51,19 +53,24 @@ module.exports = function ({ serializers }) { $step = 4 - if ($i[0] != 8) { - $step = 3 - continue - } + if ($_ != 8) { + $step = 3 + continue + } case 4: + $i[0] = 0 $step = 5 - $bite = 0 - $_ = object.sentry case 5: + $step = 6 + $bite = 0 + $_ = object.sentry[$i[0]] + + case 6: + while ($bite != -1) { if ($start == $end) { return { start: $start, serialize } @@ -72,10 +79,26 @@ module.exports = function ({ serializers }) { $bite-- } + if (++$i[0] != object.sentry.length) { + $step = 5 + continue + } - $step = 6 + $step = 7 - case 6: + case 7: + + if ($start == $end) { + return { start: $start, serialize } + } + + $buffer[$start++] = 0x0 + + $step = 8 + + $step = 8 + + case 8: break diff --git a/test/generated/fixed/words/single.sizeof.js b/test/generated/fixed/words/single.sizeof.js new file mode 100644 index 00000000..b4c4734d --- /dev/null +++ b/test/generated/fixed/words/single.sizeof.js @@ -0,0 +1,15 @@ +module.exports = function ({ sizeOf }) { + sizeOf.object = function () { + + + return function (object) { + let $start = 0 + + $start += 8 + + $start += 1 * object.sentry.length + 1 + + return $start + } + } () +} diff --git a/test/generated/fixed/words.parser.all.js b/test/generated/fixed/words/unpadded.parser.all.js similarity index 100% rename from test/generated/fixed/words.parser.all.js rename to test/generated/fixed/words/unpadded.parser.all.js diff --git a/test/generated/fixed/words.parser.bff.js b/test/generated/fixed/words/unpadded.parser.bff.js similarity index 94% rename from test/generated/fixed/words.parser.bff.js rename to test/generated/fixed/words/unpadded.parser.bff.js index 664d4a0c..b9a1a31e 100644 --- a/test/generated/fixed/words.parser.bff.js +++ b/test/generated/fixed/words/unpadded.parser.bff.js @@ -31,7 +31,7 @@ module.exports = function ({ parsers }) { $i[0] = 0 for (;;) { if ($end - $start < 1) { - return parsers.inc.object(object, 9, $i)($buffer, $start, $end) + return parsers.inc.object(object, 7, $i)($buffer, $start, $end) } if ( @@ -42,7 +42,7 @@ module.exports = function ({ parsers }) { } if ($end - $start < 1) { - return parsers.inc.object(object, 11, $i)($buffer, $start, $end) + return parsers.inc.object(object, 9, $i)($buffer, $start, $end) } object.sentry[$i[0]] = ($buffer[$start++]) diff --git a/test/generated/fixed/words.parser.inc.js b/test/generated/fixed/words/unpadded.parser.inc.js similarity index 84% rename from test/generated/fixed/words.parser.inc.js rename to test/generated/fixed/words/unpadded.parser.inc.js index 720f052a..897567e3 100644 --- a/test/generated/fixed/words.parser.inc.js +++ b/test/generated/fixed/words/unpadded.parser.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ parsers }) { return function (object = {}, $step = 0, $i = []) { - let $_, $bite + let $_, $bite, $length = 0 return function parse ($buffer, $start, $end) { for (;;) { @@ -46,20 +46,15 @@ module.exports = function ({ parsers }) { $i[0]++ - if ($i[0] == 4) { - $step = 6 + if ($i[0] != 4) { + $step = 3 continue } - $step = 2 - continue - - case 6: - $_ = (4 - $i[0]) * 2 - 0 - $step = 7 + $step = 6 - case 7: + case 6: $bite = Math.min($end - $start, $_) $_ -= $bite @@ -69,36 +64,34 @@ module.exports = function ({ parsers }) { return { start: $start, object: null, parse } } - $step = 8 + $step = 7 - case 8: + case 7: $i[0] = 0 - case 9: + case 8: if ($start == $end) { return { start: $start, parse } } - if ($buffer[$start] != 0x0) { - $step = 10 - parse([ ], 0, 0) + if ($buffer[$start] == 0x0) { + $start++ + $step = 13 continue } - $start++ - $step = 14 - continue + $step = 9 - case 10: + case 9: - case 11: + case 10: - $step = 12 + $step = 11 - case 12: + case 11: if ($start == $end) { return { start: $start, object: null, parse } @@ -107,13 +100,13 @@ module.exports = function ({ parsers }) { object.sentry[$i[0]] = $buffer[$start++] - case 13: + case 12: $i[0]++ - $step = 9 + $step = 8 continue - case 14: + case 13: return { start: $start, object: object, parse: null } } diff --git a/test/generated/fixed/words.serializer.all.js b/test/generated/fixed/words/unpadded.serializer.all.js similarity index 100% rename from test/generated/fixed/words.serializer.all.js rename to test/generated/fixed/words/unpadded.serializer.all.js diff --git a/test/generated/fixed/words.serializer.bff.js b/test/generated/fixed/words/unpadded.serializer.bff.js similarity index 100% rename from test/generated/fixed/words.serializer.bff.js rename to test/generated/fixed/words/unpadded.serializer.bff.js diff --git a/test/generated/fixed/words.serializer.inc.js b/test/generated/fixed/words/unpadded.serializer.inc.js similarity index 92% rename from test/generated/fixed/words.serializer.inc.js rename to test/generated/fixed/words/unpadded.serializer.inc.js index 58785cb0..fe308207 100644 --- a/test/generated/fixed/words.serializer.inc.js +++ b/test/generated/fixed/words/unpadded.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { @@ -34,13 +34,15 @@ module.exports = function ({ serializers }) { continue } + $_ = $i[0] * 2 + $step = 3 - if ($i[0] != 4) { - $step = 3 - continue - } + if ($_ != 8) { + $step = 3 + continue + } case 3: diff --git a/test/generated/fixed/words/unpadded.sizeof.js b/test/generated/fixed/words/unpadded.sizeof.js new file mode 100644 index 00000000..b4c4734d --- /dev/null +++ b/test/generated/fixed/words/unpadded.sizeof.js @@ -0,0 +1,15 @@ +module.exports = function ({ sizeOf }) { + sizeOf.object = function () { + + + return function (object) { + let $start = 0 + + $start += 8 + + $start += 1 * object.sentry.length + 1 + + return $start + } + } () +} diff --git a/test/generated/ieee/be/double.parser.inc.js b/test/generated/ieee/be/double.parser.inc.js index 4982f5b9..e32b06ee 100644 --- a/test/generated/ieee/be/double.parser.inc.js +++ b/test/generated/ieee/be/double.parser.inc.js @@ -40,20 +40,15 @@ module.exports = function ({ parsers }) { $i[0]++ - if ($i[0] == 8) { - $step = 6 + if ($i[0] != 8) { + $step = 3 continue } - $step = 2 - continue - - case 6: - $_ = (8 - $i[0]) * 1 - 0 - $step = 7 + $step = 6 - case 7: + case 6: $bite = Math.min($end - $start, $_) $_ -= $bite @@ -63,16 +58,16 @@ module.exports = function ({ parsers }) { return { start: $start, object: null, parse } } - $step = 8 + $step = 7 object.value = (function (value) { return Buffer.from(value).readDoubleBE() })(object.value) - case 8: + case 7: - $step = 9 + $step = 8 - case 9: + case 8: if ($start == $end) { return { start: $start, object: null, parse } @@ -81,7 +76,7 @@ module.exports = function ({ parsers }) { object.sentry = $buffer[$start++] - case 10: + case 9: return { start: $start, object: object, parse: null } } diff --git a/test/generated/ieee/be/double.serializer.inc.js b/test/generated/ieee/be/double.serializer.inc.js index 2f4f8a0c..6c461dbe 100644 --- a/test/generated/ieee/be/double.serializer.inc.js +++ b/test/generated/ieee/be/double.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = [], $$ = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { @@ -42,13 +42,15 @@ module.exports = function ({ serializers }) { continue } + $_ = $i[0] * 1 + $step = 4 - if ($i[0] != 8) { - $step = 4 - continue - } + if ($_ != 8) { + $step = 4 + continue + } case 4: diff --git a/test/generated/ieee/be/float.parser.inc.js b/test/generated/ieee/be/float.parser.inc.js index df950338..6c0b29cd 100644 --- a/test/generated/ieee/be/float.parser.inc.js +++ b/test/generated/ieee/be/float.parser.inc.js @@ -40,20 +40,15 @@ module.exports = function ({ parsers }) { $i[0]++ - if ($i[0] == 4) { - $step = 6 + if ($i[0] != 4) { + $step = 3 continue } - $step = 2 - continue - - case 6: - $_ = (4 - $i[0]) * 1 - 0 - $step = 7 + $step = 6 - case 7: + case 6: $bite = Math.min($end - $start, $_) $_ -= $bite @@ -63,16 +58,16 @@ module.exports = function ({ parsers }) { return { start: $start, object: null, parse } } - $step = 8 + $step = 7 object.value = (function (value) { return Buffer.from(value).readFloatBE() })(object.value) - case 8: + case 7: - $step = 9 + $step = 8 - case 9: + case 8: if ($start == $end) { return { start: $start, object: null, parse } @@ -81,7 +76,7 @@ module.exports = function ({ parsers }) { object.sentry = $buffer[$start++] - case 10: + case 9: return { start: $start, object: object, parse: null } } diff --git a/test/generated/ieee/be/float.serializer.inc.js b/test/generated/ieee/be/float.serializer.inc.js index 2bcd55b0..c85b455c 100644 --- a/test/generated/ieee/be/float.serializer.inc.js +++ b/test/generated/ieee/be/float.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = [], $$ = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { @@ -42,13 +42,15 @@ module.exports = function ({ serializers }) { continue } + $_ = $i[0] * 1 + $step = 4 - if ($i[0] != 4) { - $step = 4 - continue - } + if ($_ != 4) { + $step = 4 + continue + } case 4: diff --git a/test/generated/ieee/le/double.parser.inc.js b/test/generated/ieee/le/double.parser.inc.js index 4982f5b9..e32b06ee 100644 --- a/test/generated/ieee/le/double.parser.inc.js +++ b/test/generated/ieee/le/double.parser.inc.js @@ -40,20 +40,15 @@ module.exports = function ({ parsers }) { $i[0]++ - if ($i[0] == 8) { - $step = 6 + if ($i[0] != 8) { + $step = 3 continue } - $step = 2 - continue - - case 6: - $_ = (8 - $i[0]) * 1 - 0 - $step = 7 + $step = 6 - case 7: + case 6: $bite = Math.min($end - $start, $_) $_ -= $bite @@ -63,16 +58,16 @@ module.exports = function ({ parsers }) { return { start: $start, object: null, parse } } - $step = 8 + $step = 7 object.value = (function (value) { return Buffer.from(value).readDoubleBE() })(object.value) - case 8: + case 7: - $step = 9 + $step = 8 - case 9: + case 8: if ($start == $end) { return { start: $start, object: null, parse } @@ -81,7 +76,7 @@ module.exports = function ({ parsers }) { object.sentry = $buffer[$start++] - case 10: + case 9: return { start: $start, object: object, parse: null } } diff --git a/test/generated/ieee/le/double.serializer.inc.js b/test/generated/ieee/le/double.serializer.inc.js index 2f4f8a0c..6c461dbe 100644 --- a/test/generated/ieee/le/double.serializer.inc.js +++ b/test/generated/ieee/le/double.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = [], $$ = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { @@ -42,13 +42,15 @@ module.exports = function ({ serializers }) { continue } + $_ = $i[0] * 1 + $step = 4 - if ($i[0] != 8) { - $step = 4 - continue - } + if ($_ != 8) { + $step = 4 + continue + } case 4: diff --git a/test/generated/ieee/le/float.parser.inc.js b/test/generated/ieee/le/float.parser.inc.js index df950338..6c0b29cd 100644 --- a/test/generated/ieee/le/float.parser.inc.js +++ b/test/generated/ieee/le/float.parser.inc.js @@ -40,20 +40,15 @@ module.exports = function ({ parsers }) { $i[0]++ - if ($i[0] == 4) { - $step = 6 + if ($i[0] != 4) { + $step = 3 continue } - $step = 2 - continue - - case 6: - $_ = (4 - $i[0]) * 1 - 0 - $step = 7 + $step = 6 - case 7: + case 6: $bite = Math.min($end - $start, $_) $_ -= $bite @@ -63,16 +58,16 @@ module.exports = function ({ parsers }) { return { start: $start, object: null, parse } } - $step = 8 + $step = 7 object.value = (function (value) { return Buffer.from(value).readFloatBE() })(object.value) - case 8: + case 7: - $step = 9 + $step = 8 - case 9: + case 8: if ($start == $end) { return { start: $start, object: null, parse } @@ -81,7 +76,7 @@ module.exports = function ({ parsers }) { object.sentry = $buffer[$start++] - case 10: + case 9: return { start: $start, object: object, parse: null } } diff --git a/test/generated/ieee/le/float.serializer.inc.js b/test/generated/ieee/le/float.serializer.inc.js index 2bcd55b0..c85b455c 100644 --- a/test/generated/ieee/le/float.serializer.inc.js +++ b/test/generated/ieee/le/float.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = [], $$ = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { @@ -42,13 +42,15 @@ module.exports = function ({ serializers }) { continue } + $_ = $i[0] * 1 + $step = 4 - if ($i[0] != 4) { - $step = 4 - continue - } + if ($_ != 4) { + $step = 4 + continue + } case 4: diff --git a/test/generated/ieee/specified/be/double.parser.inc.js b/test/generated/ieee/specified/be/double.parser.inc.js index 4982f5b9..e32b06ee 100644 --- a/test/generated/ieee/specified/be/double.parser.inc.js +++ b/test/generated/ieee/specified/be/double.parser.inc.js @@ -40,20 +40,15 @@ module.exports = function ({ parsers }) { $i[0]++ - if ($i[0] == 8) { - $step = 6 + if ($i[0] != 8) { + $step = 3 continue } - $step = 2 - continue - - case 6: - $_ = (8 - $i[0]) * 1 - 0 - $step = 7 + $step = 6 - case 7: + case 6: $bite = Math.min($end - $start, $_) $_ -= $bite @@ -63,16 +58,16 @@ module.exports = function ({ parsers }) { return { start: $start, object: null, parse } } - $step = 8 + $step = 7 object.value = (function (value) { return Buffer.from(value).readDoubleBE() })(object.value) - case 8: + case 7: - $step = 9 + $step = 8 - case 9: + case 8: if ($start == $end) { return { start: $start, object: null, parse } @@ -81,7 +76,7 @@ module.exports = function ({ parsers }) { object.sentry = $buffer[$start++] - case 10: + case 9: return { start: $start, object: object, parse: null } } diff --git a/test/generated/ieee/specified/be/double.serializer.inc.js b/test/generated/ieee/specified/be/double.serializer.inc.js index 2f4f8a0c..6c461dbe 100644 --- a/test/generated/ieee/specified/be/double.serializer.inc.js +++ b/test/generated/ieee/specified/be/double.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = [], $$ = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { @@ -42,13 +42,15 @@ module.exports = function ({ serializers }) { continue } + $_ = $i[0] * 1 + $step = 4 - if ($i[0] != 8) { - $step = 4 - continue - } + if ($_ != 8) { + $step = 4 + continue + } case 4: diff --git a/test/generated/ieee/specified/be/float.parser.inc.js b/test/generated/ieee/specified/be/float.parser.inc.js index df950338..6c0b29cd 100644 --- a/test/generated/ieee/specified/be/float.parser.inc.js +++ b/test/generated/ieee/specified/be/float.parser.inc.js @@ -40,20 +40,15 @@ module.exports = function ({ parsers }) { $i[0]++ - if ($i[0] == 4) { - $step = 6 + if ($i[0] != 4) { + $step = 3 continue } - $step = 2 - continue - - case 6: - $_ = (4 - $i[0]) * 1 - 0 - $step = 7 + $step = 6 - case 7: + case 6: $bite = Math.min($end - $start, $_) $_ -= $bite @@ -63,16 +58,16 @@ module.exports = function ({ parsers }) { return { start: $start, object: null, parse } } - $step = 8 + $step = 7 object.value = (function (value) { return Buffer.from(value).readFloatBE() })(object.value) - case 8: + case 7: - $step = 9 + $step = 8 - case 9: + case 8: if ($start == $end) { return { start: $start, object: null, parse } @@ -81,7 +76,7 @@ module.exports = function ({ parsers }) { object.sentry = $buffer[$start++] - case 10: + case 9: return { start: $start, object: object, parse: null } } diff --git a/test/generated/ieee/specified/be/float.serializer.inc.js b/test/generated/ieee/specified/be/float.serializer.inc.js index 2bcd55b0..c85b455c 100644 --- a/test/generated/ieee/specified/be/float.serializer.inc.js +++ b/test/generated/ieee/specified/be/float.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = [], $$ = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { @@ -42,13 +42,15 @@ module.exports = function ({ serializers }) { continue } + $_ = $i[0] * 1 + $step = 4 - if ($i[0] != 4) { - $step = 4 - continue - } + if ($_ != 4) { + $step = 4 + continue + } case 4: diff --git a/test/generated/ieee/specified/le/double.parser.inc.js b/test/generated/ieee/specified/le/double.parser.inc.js index f60ae3dc..4b979a19 100644 --- a/test/generated/ieee/specified/le/double.parser.inc.js +++ b/test/generated/ieee/specified/le/double.parser.inc.js @@ -40,20 +40,15 @@ module.exports = function ({ parsers }) { $i[0]++ - if ($i[0] == 8) { - $step = 6 + if ($i[0] != 8) { + $step = 3 continue } - $step = 2 - continue - - case 6: - $_ = (8 - $i[0]) * 1 - 0 - $step = 7 + $step = 6 - case 7: + case 6: $bite = Math.min($end - $start, $_) $_ -= $bite @@ -63,16 +58,16 @@ module.exports = function ({ parsers }) { return { start: $start, object: null, parse } } - $step = 8 + $step = 7 object.value = (function (value) { return Buffer.from(value).readDoubleLE() })(object.value) - case 8: + case 7: - $step = 9 + $step = 8 - case 9: + case 8: if ($start == $end) { return { start: $start, object: null, parse } @@ -81,7 +76,7 @@ module.exports = function ({ parsers }) { object.sentry = $buffer[$start++] - case 10: + case 9: return { start: $start, object: object, parse: null } } diff --git a/test/generated/ieee/specified/le/double.serializer.inc.js b/test/generated/ieee/specified/le/double.serializer.inc.js index b3e509e9..03de0332 100644 --- a/test/generated/ieee/specified/le/double.serializer.inc.js +++ b/test/generated/ieee/specified/le/double.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = [], $$ = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { @@ -42,13 +42,15 @@ module.exports = function ({ serializers }) { continue } + $_ = $i[0] * 1 + $step = 4 - if ($i[0] != 8) { - $step = 4 - continue - } + if ($_ != 8) { + $step = 4 + continue + } case 4: diff --git a/test/generated/ieee/specified/le/float.parser.inc.js b/test/generated/ieee/specified/le/float.parser.inc.js index bccec5a5..93215f52 100644 --- a/test/generated/ieee/specified/le/float.parser.inc.js +++ b/test/generated/ieee/specified/le/float.parser.inc.js @@ -40,20 +40,15 @@ module.exports = function ({ parsers }) { $i[0]++ - if ($i[0] == 4) { - $step = 6 + if ($i[0] != 4) { + $step = 3 continue } - $step = 2 - continue - - case 6: - $_ = (4 - $i[0]) * 1 - 0 - $step = 7 + $step = 6 - case 7: + case 6: $bite = Math.min($end - $start, $_) $_ -= $bite @@ -63,16 +58,16 @@ module.exports = function ({ parsers }) { return { start: $start, object: null, parse } } - $step = 8 + $step = 7 object.value = (function (value) { return Buffer.from(value).readFloatLE() })(object.value) - case 8: + case 7: - $step = 9 + $step = 8 - case 9: + case 8: if ($start == $end) { return { start: $start, object: null, parse } @@ -81,7 +76,7 @@ module.exports = function ({ parsers }) { object.sentry = $buffer[$start++] - case 10: + case 9: return { start: $start, object: object, parse: null } } diff --git a/test/generated/ieee/specified/le/float.serializer.inc.js b/test/generated/ieee/specified/le/float.serializer.inc.js index 865223c1..2329c5e1 100644 --- a/test/generated/ieee/specified/le/float.serializer.inc.js +++ b/test/generated/ieee/specified/le/float.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = [], $$ = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { @@ -42,13 +42,15 @@ module.exports = function ({ serializers }) { continue } + $_ = $i[0] * 1 + $step = 4 - if ($i[0] != 4) { - $step = 4 - continue - } + if ($_ != 4) { + $step = 4 + continue + } case 4: diff --git a/test/generated/inline/after.serializer.inc.js b/test/generated/inline/after.serializer.inc.js index 28aad080..7507912e 100644 --- a/test/generated/inline/after.serializer.inc.js +++ b/test/generated/inline/after.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/inline/before.serializer.inc.js b/test/generated/inline/before.serializer.inc.js index 904b766c..917782af 100644 --- a/test/generated/inline/before.serializer.inc.js +++ b/test/generated/inline/before.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $$ = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/inline/both.serializer.inc.js b/test/generated/inline/both.serializer.inc.js index 39426a94..a21dda79 100644 --- a/test/generated/inline/both.serializer.inc.js +++ b/test/generated/inline/both.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $$ = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/inline/mirrored.serializer.inc.js b/test/generated/inline/mirrored.serializer.inc.js index a387b23c..3583959b 100644 --- a/test/generated/inline/mirrored.serializer.inc.js +++ b/test/generated/inline/mirrored.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $$ = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/inline/named.serializer.inc.js b/test/generated/inline/named.serializer.inc.js index 693d6821..11360298 100644 --- a/test/generated/inline/named.serializer.inc.js +++ b/test/generated/inline/named.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $$ = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/integer/be/compliment/int.serializer.inc.js b/test/generated/integer/be/compliment/int.serializer.inc.js index e60a8798..14a578fe 100644 --- a/test/generated/integer/be/compliment/int.serializer.inc.js +++ b/test/generated/integer/be/compliment/int.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/integer/be/compliment/long.serializer.inc.js b/test/generated/integer/be/compliment/long.serializer.inc.js index 43df5629..716750ef 100644 --- a/test/generated/integer/be/compliment/long.serializer.inc.js +++ b/test/generated/integer/be/compliment/long.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/integer/be/compliment/short.serializer.inc.js b/test/generated/integer/be/compliment/short.serializer.inc.js index c941c5c8..a6e35f3a 100644 --- a/test/generated/integer/be/compliment/short.serializer.inc.js +++ b/test/generated/integer/be/compliment/short.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/integer/be/word/int.serializer.inc.js b/test/generated/integer/be/word/int.serializer.inc.js index e60a8798..14a578fe 100644 --- a/test/generated/integer/be/word/int.serializer.inc.js +++ b/test/generated/integer/be/word/int.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/integer/be/word/long.serializer.inc.js b/test/generated/integer/be/word/long.serializer.inc.js index 43df5629..716750ef 100644 --- a/test/generated/integer/be/word/long.serializer.inc.js +++ b/test/generated/integer/be/word/long.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/integer/be/word/short.serializer.inc.js b/test/generated/integer/be/word/short.serializer.inc.js index c941c5c8..a6e35f3a 100644 --- a/test/generated/integer/be/word/short.serializer.inc.js +++ b/test/generated/integer/be/word/short.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/integer/byte/whole.serializer.inc.js b/test/generated/integer/byte/whole.serializer.inc.js index b6e7984b..1bfd6eab 100644 --- a/test/generated/integer/byte/whole.serializer.inc.js +++ b/test/generated/integer/byte/whole.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/integer/le/compliment/int.serializer.inc.js b/test/generated/integer/le/compliment/int.serializer.inc.js index 50bf5706..f0d6881d 100644 --- a/test/generated/integer/le/compliment/int.serializer.inc.js +++ b/test/generated/integer/le/compliment/int.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/integer/le/compliment/long.serializer.inc.js b/test/generated/integer/le/compliment/long.serializer.inc.js index c066ffcf..775a3a7b 100644 --- a/test/generated/integer/le/compliment/long.serializer.inc.js +++ b/test/generated/integer/le/compliment/long.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/integer/le/compliment/short.serializer.inc.js b/test/generated/integer/le/compliment/short.serializer.inc.js index 4ffc916a..6135ea0e 100644 --- a/test/generated/integer/le/compliment/short.serializer.inc.js +++ b/test/generated/integer/le/compliment/short.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/integer/le/word/int.serializer.inc.js b/test/generated/integer/le/word/int.serializer.inc.js index 50bf5706..f0d6881d 100644 --- a/test/generated/integer/le/word/int.serializer.inc.js +++ b/test/generated/integer/le/word/int.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/integer/le/word/long.serializer.inc.js b/test/generated/integer/le/word/long.serializer.inc.js index c066ffcf..775a3a7b 100644 --- a/test/generated/integer/le/word/long.serializer.inc.js +++ b/test/generated/integer/le/word/long.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/integer/le/word/short.serializer.inc.js b/test/generated/integer/le/word/short.serializer.inc.js index 4ffc916a..6135ea0e 100644 --- a/test/generated/integer/le/word/short.serializer.inc.js +++ b/test/generated/integer/le/word/short.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/literal.serializer.inc.js b/test/generated/literal.serializer.inc.js index 3a51b29a..5d88de23 100644 --- a/test/generated/literal.serializer.inc.js +++ b/test/generated/literal.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/little.serializer.inc.js b/test/generated/little.serializer.inc.js index 8ed8a465..24f96f98 100644 --- a/test/generated/little.serializer.inc.js +++ b/test/generated/little.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/lookup.serializer.inc.js b/test/generated/lookup.serializer.inc.js index 5f5ebcd6..32f5e997 100644 --- a/test/generated/lookup.serializer.inc.js +++ b/test/generated/lookup.serializer.inc.js @@ -12,7 +12,7 @@ module.exports = function ({ serializers }) { } return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/require/module.serializer.inc.js b/test/generated/require/module.serializer.inc.js index 53cda1f5..06e0db2b 100644 --- a/test/generated/require/module.serializer.inc.js +++ b/test/generated/require/module.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { const twiddle = require('../../../test/cycle/twiddle') return function (object, $step = 0, $$ = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/skip.serializer.inc.js b/test/generated/skip.serializer.inc.js index 6cb8862a..447c86c5 100644 --- a/test/generated/skip.serializer.inc.js +++ b/test/generated/skip.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { diff --git a/test/generated/structure.serializer.inc.js b/test/generated/structure.serializer.inc.js index da1f604e..4e9361e4 100644 --- a/test/generated/structure.serializer.inc.js +++ b/test/generated/structure.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/switched/packed/strings.serializer.inc.js b/test/generated/switched/packed/strings.serializer.inc.js index 0763752e..1a887710 100644 --- a/test/generated/switched/packed/strings.serializer.inc.js +++ b/test/generated/switched/packed/strings.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/switched/packed/variant.serializer.inc.js b/test/generated/switched/packed/variant.serializer.inc.js index 10324c1e..d909d78c 100644 --- a/test/generated/switched/packed/variant.serializer.inc.js +++ b/test/generated/switched/packed/variant.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { switch ($step) { diff --git a/test/generated/switched/strings.serializer.inc.js b/test/generated/switched/strings.serializer.inc.js index d30d542b..e27c9afd 100644 --- a/test/generated/switched/strings.serializer.inc.js +++ b/test/generated/switched/strings.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { diff --git a/test/generated/switched/variant.serializer.inc.js b/test/generated/switched/variant.serializer.inc.js index e1d2e9f8..0698b3a6 100644 --- a/test/generated/switched/variant.serializer.inc.js +++ b/test/generated/switched/variant.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { diff --git a/test/generated/terminated/concat.parser.inc.js b/test/generated/terminated/concat.parser.inc.js index b2619391..066d46c7 100644 --- a/test/generated/terminated/concat.parser.inc.js +++ b/test/generated/terminated/concat.parser.inc.js @@ -17,12 +17,12 @@ module.exports = function ({ parsers }) { $step = 1 - case 1: + case 1: { - $_ = $buffer.indexOf(13, $start) - if (~$_) { - $buffers.push($buffer.slice($start, $_)) - $start = $_ + 1 + const $index = $buffer.indexOf(0xd, $start) + if (~$index) { + $buffers.push($buffer.slice($start, $index)) + $start = $index + 1 $step = 2 continue } else { @@ -32,6 +32,7 @@ module.exports = function ({ parsers }) { $step = 2 + } case 2: @@ -39,7 +40,7 @@ module.exports = function ({ parsers }) { return { start: $start, parse } } - if ($buffer[$start++] != 10) { + if ($buffer[$start++] != 0xa) { $buffers.push(Buffer.from([ 13 ].concat($buffer[$start]))) $step = 2 continue @@ -49,10 +50,11 @@ module.exports = function ({ parsers }) { case 3: + object.array = $buffers.length == 1 ? $buffers[0] : Buffer.concat($buffers) $buffers.length = 0 - $step = 5 + $step = 4 case 4: diff --git a/test/generated/terminated/concat.serializer.inc.js b/test/generated/terminated/concat.serializer.inc.js index 230f8ce3..138402d5 100644 --- a/test/generated/terminated/concat.serializer.inc.js +++ b/test/generated/terminated/concat.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { diff --git a/test/generated/terminated/nested.parser.inc.js b/test/generated/terminated/nested.parser.inc.js index a406c758..748ce8c7 100644 --- a/test/generated/terminated/nested.parser.inc.js +++ b/test/generated/terminated/nested.parser.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ parsers }) { return function (object = {}, $step = 0, $i = []) { - let $_, $bite + let $_, $bite, $length = 0 return function parse ($buffer, $start, $end) { for (;;) { @@ -43,7 +43,7 @@ module.exports = function ({ parsers }) { if ($buffer[$start] != 0x0) { $step = 4 - parse([ 0x0 ], 0, 1) + parse(Buffer.from([ 0x0 ]), 0, 1) continue } $start++ @@ -81,7 +81,7 @@ module.exports = function ({ parsers }) { if ($buffer[$start] != 0x0) { $step = 8 - parse([ 0x0 ], 0, 1) + parse(Buffer.from([ 0x0 ]), 0, 1) continue } $start++ diff --git a/test/generated/terminated/nested.serializer.inc.js b/test/generated/terminated/nested.serializer.inc.js index b65f299f..383de809 100644 --- a/test/generated/terminated/nested.serializer.inc.js +++ b/test/generated/terminated/nested.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) { diff --git a/test/generated/terminated/words.parser.inc.js b/test/generated/terminated/words.parser.inc.js index d264a6a9..1fbdcf51 100644 --- a/test/generated/terminated/words.parser.inc.js +++ b/test/generated/terminated/words.parser.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ parsers }) { return function (object = {}, $step = 0, $i = []) { - let $_, $bite + let $_, $bite, $length = 0 return function parse ($buffer, $start, $end) { for (;;) { @@ -43,7 +43,7 @@ module.exports = function ({ parsers }) { if ($buffer[$start] != 0x0) { $step = 4 - parse([ 0x0 ], 0, 1) + parse(Buffer.from([ 0x0 ]), 0, 1) continue } $start++ diff --git a/test/generated/terminated/words.serializer.inc.js b/test/generated/terminated/words.serializer.inc.js index 5da86c6e..bc5e6bd6 100644 --- a/test/generated/terminated/words.serializer.inc.js +++ b/test/generated/terminated/words.serializer.inc.js @@ -3,7 +3,7 @@ module.exports = function ({ serializers }) { return function (object, $step = 0, $i = []) { - let $bite, $stop, $_ + let $_, $bite return function serialize ($buffer, $start, $end) { for (;;) {