diff --git a/parse.all.js b/parse.all.js index f55a9fba..12bf91c5 100644 --- a/parse.all.js +++ b/parse.all.js @@ -116,16 +116,21 @@ function inquisition (fields, $I = 0) { checked.push(field) break case 'terminated': - field.fields = inquisition(field.fields) + if (field.fields[field.fields.length - 1].type != 'buffer') { + field.fields = inquisition(field.fields) + } checked.push(field) break case 'terminator': checked.push({ type: 'checkpoint', - lengths: [ field.body.terminator.length ], + lengths: [ 0 ], vivify: null, rewind: 0 }) + if (field.body.fields[field.body.fields.length - 1].type != 'buffer') { + checked[checked.length - 1].lengths[0] += field.body.terminator.length + } checked.push(field) break case 'lengthEncoding': @@ -248,6 +253,7 @@ function generate (packet, { require, bff, chk }) { checked.push(field) checkpoint.lengths[0] += field.body.terminator.length break + // TODO Checkpoint invocation on fields in two places? case 'repeated': // TODO If the terminator is greater than or equal to the size // of the repeated part, we do not have to perform the @@ -257,7 +263,10 @@ function generate (packet, { require, bff, chk }) { break case 'terminated': checked.push(field) - field.fields = checkpoints(path + `${field.dotted}[$i[${index}]]`, field.fields, index + 1) + const element = field.fields.slice().pop().fields.slice().pop() + if (element.type != 'buffer') { + field.fields = checkpoints(path + `${field.dotted}[$i[${index}]]`, field.fields, index + 1) + } checked.push(checkpoint = { type: 'checkpoint', lengths: [ 0 ], vivify: null, rewind: 0 }) @@ -495,11 +504,14 @@ function generate (packet, { require, bff, chk }) { const element = field.fields.slice().pop().fields.slice().pop() if (element.type == 'buffer') { const terminator = field.terminator + const assign = element.concat + ? `${path} = $buffer.slice($start, $_)` + : `${path} = [ $buffer.slice($start, $_) ]` if (bff || chk) { const source = $(` $_ = $buffer.indexOf(Buffer.from(${util.inspect(terminator)}), $start) if (~$_) { - ${path} = $buffer.slice($start, $_) + `, assign, ` $start = $_ + ${terminator.length} } else { return parsers.inc.${packet.name}(${signature().join(', ')})($buffer, $start, $end) @@ -508,10 +520,11 @@ function generate (packet, { require, bff, chk }) { $step += 2 + terminator.length return source } + // TODO What if you don't find? Here we create zero buffer. return $(` $_ = $buffer.indexOf(Buffer.from(${util.inspect(terminator)}), $start) $_ = ~$_ ? $_ : $start - ${path} = $buffer.slice($start, $_) + `, assign, ` $start = $_ + ${terminator.length} `) } diff --git a/serialize.all.js b/serialize.all.js index 49e61d6a..1d8f8783 100644 --- a/serialize.all.js +++ b/serialize.all.js @@ -139,8 +139,14 @@ function checkpoints (path, fields, index = 0) { // checked.push(field) if (field.fields[0].fixed) { - // *Division in string templates upsets Docco JavaScript parser.* - checkpoint.lengths.push(`${path + field.dotted}.length * ${field.fields[0].bits >>> 3}`) + if (field.fields[0].type == 'buffer' && !field.fields[0].concat) { + checkpoint.lengths.push($(` + ${path + field.dotted}.reduce((sum, buffer) => sum + buffer.length, 0) + `)) + } else { + // *Division in string templates upsets Docco JavaScript parser.* + checkpoint.lengths.push(`${path + field.dotted}.length * ${field.fields[0].bits >>> 3}`) + } } else { field.fields = checkpoints(`${path}${field.dotted}[$i[${index}]]`, field.fields, index + 1) checked.push(checkpoint = { type: 'checkpoint', lengths: [ 0 ] }) @@ -251,11 +257,23 @@ function inquisition (path, fields) { checked.push(field) break case 'terminated': - field.fields = inquisition(path + field.dotted, field.fields) + if (field.fields[field.fields.length - 1].type == 'buffer') { + checked.push({ type: 'checkpoint', lengths: [ 0, + field.fields[field.fields.length - 1].concat + ? `${path + field.dotted}.length` + : `${path + field.dotted}.reduce((sum, buffer) => sum + buffer.length, 0)` + ]}) + } else { + field.fields = inquisition(path + field.dotted, field.fields) + } checked.push(field) break case 'terminator': - checked.push({ type: 'checkpoint', lengths: [ field.body.terminator.length ]}) + if (field.body.fields[field.body.fields.length - 1].type == 'buffer') { + checked[checked.length - 2].lengths[0] += field.body.terminator.length + } else { + checked.push({ type: 'checkpoint', lengths: [ field.body.terminator.length ]}) + } checked.push(field) break case 'lengthEncoding': @@ -457,6 +475,25 @@ function generate (packet, { require = null, bff, chk }) { } function terminated (path, field) { + const element = field.fields[field.fields.length - 1] + if (element.type == 'buffer') { + $step += 2 + if (element.concat) { + return $(` + ${path}.copy($buffer, $start, 0, ${path}.length) + $start += ${path}.length + `) + } + variables.register = true + return $(` + $_ = 0 + for (let $index = 0; $index < ${path}.length; $index++) { + ${path}[$index].copy($buffer, $start) + $start += ${path}[$index].length + $_ += ${path}[$index].length + } + `) + } $step += 1 const i = `$i[${++$i}]` const looped = join(field.fields.map(field => dispatch(`${path}[${i}]`, field))) @@ -471,9 +508,12 @@ function generate (packet, { require = null, bff, chk }) { function terminator (field) { const terminator = [] - $step += field.body.terminator.length + 1 + $step += field.body.terminator.length + if (field.body.fields[field.body.fields.length - 1].type != 'buffer') { + $step++ + } for (const bite of field.body.terminator) { - terminator.push(`$buffer[$start++] = 0x${bite.toString(16)}`) + terminator.push(`$buffer[$start++] = ${hex(bite)}`) } return terminator.join('\n') } diff --git a/serialize.inc.js b/serialize.inc.js index a3f1d605..8463cb07 100644 --- a/serialize.inc.js +++ b/serialize.inc.js @@ -242,34 +242,123 @@ function generate (packet, { require = null }) { return source } + function copy (path, element, buffered) { + let i + if (!element.concat) { + locals['offset'] = 0 + locals['length'] = 0 + i = `$i[${++$i}]` + } + const source = element.concat + // Copy the single buffer using copy. + ? $(` + case ${$step++}: + + $_ = 0 + + case ${$step++}: { + + $step = ${$step - 1} + + const length = Math.min($end - $start, ${path}.length - $_) + ${path}.copy($buffer, $start, $_, $_ + length) + $start += length + $_ += length + + if ($_ != ${path}.length) { + `, buffered, ` + return { start: $start, serialize: $serialize } + } + + $step = ${$step} + + } + `) + // Loop through an array of buffers copying to the serialization + // buffer using `Buffer.copy()`. Need to track the index of the + // current buffer in the array the offset in the current buffer. + : $(` + case ${$step++}: + + $_ = 0 + $offset = 0 + $length = ${path}.reduce((sum, buffer) => sum + buffer.length, 0) + ${i} = 0 + + case ${$step++}: { + + $step = ${$step - 1} + + for (;;) { + const length = Math.min($end - $start, ${path}[${i}].length - $offset) + ${path}[${i}].copy($buffer, $start, $offset, $offset + length) + $offset += length + $start += length + $_ += length + + if ($offset == ${path}[${i}].length) { + ${i}++ + $offset = 0 + } + + if ($_ == $length) { + break + } + + `, buffered, ` + return { start: $start, serialize: $serialize } + } + + $step = ${$step} + + } + `) + if (element.concat) { + i-- + } + return source + } + function terminated (path, field) { surround = true + const buffered = accumulate.buffered.length != 0 + ? accumulate.buffered.map(buffered => { + return $(` + `, buffered.source, ` + $starts[${buffered.start}] = $start + `) + }).join('\n') : null + function terminate () { + return join(field.terminator.map(bite => { + return $(` + case ${$step++}: + + if ($start == $end) { + `, buffered, ` + return { start: $start, serialize: $serialize } + } + + $buffer[$start++] = ${hex(bite)} + + $step = ${$step} + `) + })) + } + const element = field.fields[0] + if (element.type == 'buffer') { + const source = $(` + `, copy(path, element, buffered), ` + + `, terminate(), ` + `) + return source + } $i++ const init = $step const again = ++$step const i = `$i[${$i}]` const looped = join(field.fields.map(field => dispatch(`${path}[${i}]`, field))) const done = $step - const buffered = accumulate.buffered.map(buffered => { - return $(` - `, buffered.source, ` - $starts[${buffered.start}] = $start - `) - }) - const terminator = join(field.terminator.map(bite => { - return $(` - case ${$step++}: - - if ($start == $end) { - `, buffered.length != 0 ? buffered.join('\n') : null, ` - return { start: $start, serialize: $serialize } - } - - $buffer[$start++] = 0x${bite.toString(16)} - - $step = ${$step} - `) - })) const source = $(` case ${init}: @@ -284,7 +373,7 @@ function generate (packet, { require = null }) { $step = ${done} - `, terminator, ` + `, terminate(), ` case ${$step++}: `) diff --git a/sizeof.js b/sizeof.js index 61b2097a..a1a920ff 100644 --- a/sizeof.js +++ b/sizeof.js @@ -113,9 +113,14 @@ function generate (packet, { require = null }) { return source } case 'terminated': { + // TODO Use AST rollup `fixed`. if (field.fields.filter(field => !field.fixed).length == 0) { const bits = field.fields.reduce((sum, field) => sum + field.bits, 0) - return $(` + return field.fields[0].type == 'buffer' && !field.fields[0].concat + ? $(` + $start += ${path}.reduce((sum, buffer) => sum + buffer.length, 0) + + ${field.terminator.length} + `) : $(` $start += ${bits >>> 3} * ${path}.length + ${field.terminator.length} `) } diff --git a/test/cycle/terminated.t.js b/test/cycle/terminated.t.js index 6b3a2427..828a3e9f 100644 --- a/test/cycle/terminated.t.js +++ b/test/cycle/terminated.t.js @@ -42,4 +42,18 @@ function prove (okay) { nudge: 0xaa, array: Buffer.from('abcdefghij'), sentry: 0xaa }] }) + cycle(okay, { + name: 'terminated/chunked', + define: { + object: { + nudge: 8, + array: [ [[ Buffer ]], 0xd, 0xa ], + sentry: 8 + } + }, + objects: [{ + nudge: 0xaa, array: [ Buffer.from('abcdefghij') ], sentry: 0xaa + }], + stopAt: 'serialize.chk' + }) } diff --git a/test/generated/terminated/chunked.lookup.js b/test/generated/terminated/chunked.lookup.js new file mode 100644 index 00000000..5830592d --- /dev/null +++ b/test/generated/terminated/chunked.lookup.js @@ -0,0 +1,3 @@ +module.exports = function ({ $lookup }) { + $lookup.push.apply($lookup, []) +} diff --git a/test/generated/terminated/chunked.parser.all.js b/test/generated/terminated/chunked.parser.all.js new file mode 100644 index 00000000..4271f939 --- /dev/null +++ b/test/generated/terminated/chunked.parser.all.js @@ -0,0 +1,24 @@ +module.exports = function ({ parsers, $lookup }) { + parsers.all.object = function () { + return function ($buffer, $start) { + let $_, $i = [] + + let object = { + nudge: 0, + array: null, + sentry: 0 + } + + object.nudge = ($buffer[$start++]) + + $_ = $buffer.indexOf(Buffer.from([ 13, 10 ]), $start) + $_ = ~$_ ? $_ : $start + object.array = [ $buffer.slice($start, $_) ] + $start = $_ + 2 + + object.sentry = ($buffer[$start++]) + + return object + } + } () +} diff --git a/test/generated/terminated/chunked.parser.bff.js b/test/generated/terminated/chunked.parser.bff.js new file mode 100644 index 00000000..148f15bb --- /dev/null +++ b/test/generated/terminated/chunked.parser.bff.js @@ -0,0 +1,37 @@ +module.exports = function ({ parsers, $lookup }) { + parsers.bff.object = function () { + return function () { + return function ($buffer, $start, $end) { + let $_, $i = [] + + let object = { + nudge: 0, + array: null, + sentry: 0 + } + + if ($end - $start < 1) { + return parsers.inc.object(object, 1, $i)($buffer, $start, $end) + } + + object.nudge = ($buffer[$start++]) + + $_ = $buffer.indexOf(Buffer.from([ 13, 10 ]), $start) + if (~$_) { + object.array = [ $buffer.slice($start, $_) ] + $start = $_ + 2 + } else { + return parsers.inc.object(object, 3, $i)($buffer, $start, $end) + } + + if ($end - $start < 1) { + return parsers.inc.object(object, 7, $i)($buffer, $start, $end) + } + + object.sentry = ($buffer[$start++]) + + return { start: $start, object: object, parse: null } + } + } () + } +} diff --git a/test/generated/terminated/chunked.parser.chk.js b/test/generated/terminated/chunked.parser.chk.js new file mode 100644 index 00000000..9946824e --- /dev/null +++ b/test/generated/terminated/chunked.parser.chk.js @@ -0,0 +1,37 @@ +module.exports = function ({ parsers, $lookup }) { + parsers.chk.object = function () { + return function () { + return function ($buffer, $start, $end) { + let $_, $i = [] + + let object = { + nudge: 0, + array: null, + sentry: 0 + } + + if ($end - $start < 1) { + return parsers.inc.object(object, 1, $i)($buffer, $start, $end) + } + + object.nudge = ($buffer[$start++]) + + $_ = $buffer.indexOf(Buffer.from([ 13, 10 ]), $start) + if (~$_) { + object.array = $buffer.slice($start, $_) + $start = $_ + 2 + } else { + return parsers.inc.object(object, 3, $i)($buffer, $start, $end) + } + + if ($end - $start < 1) { + return parsers.inc.object(object, 7, $i)($buffer, $start, $end) + } + + object.sentry = ($buffer[$start++]) + + return { start: $start, object: object, parse: null } + } + } () + } +} diff --git a/test/generated/terminated/chunked.parser.inc.js b/test/generated/terminated/chunked.parser.inc.js new file mode 100644 index 00000000..58ba1290 --- /dev/null +++ b/test/generated/terminated/chunked.parser.inc.js @@ -0,0 +1,100 @@ +module.exports = function ({ parsers, $lookup }) { + parsers.inc.object = function () { + return function (object, $step = 0, $i = []) { + let $_, $bite, $buffers = [] + + return function $parse ($buffer, $start, $end) { + for (;;) { + switch ($step) { + case 0: + + object = { + nudge: 0, + array: null, + sentry: 0 + } + + $step = 1 + + case 1: + + $step = 2 + + case 2: + + if ($start == $end) { + return { start: $start, object: null, parse: $parse } + } + + object.nudge = $buffer[$start++] + + + // TODO Here we set the step upon entry, which is why we don't + // always have to set the step for an integer. Usually we have + // some sort of preamble that sets the step. We should eliminate + // steps where we can (why not?) and close the door behind us + // when we enter a step. + case 3: { + + $step = 3 + + const $index = $buffer.indexOf(0xd, $start) + if (~$index) { + $buffers.push($buffer.slice($start, $index)) + $start = $index + 1 + $step = 4 + continue + } else { + $buffers.push($buffer.slice($start)) + return { start: $end, object: null, parse: $parse } + } + + $step = 4 + + } + + case 4: + + if ($start == $end) { + return { start: $start, object: null, parse: $parse } + } + + if ($buffer[$start++] != 0xa) { + $buffers.push(Buffer.from([ 13 ].concat($buffer[$start]))) + $step = 4 + continue + } + + $step = 5 + + case 5: + + + object.array = $buffers + $buffers = [] + + $step = 6 + + case 6: + + $step = 7 + + case 7: + + if ($start == $end) { + return { start: $start, object: null, parse: $parse } + } + + object.sentry = $buffer[$start++] + + + case 8: + + return { start: $start, object: object, parse: null } + } + break + } + } + } + } () +} diff --git a/test/generated/terminated/chunked.serializer.all.js b/test/generated/terminated/chunked.serializer.all.js new file mode 100644 index 00000000..d87e0aed --- /dev/null +++ b/test/generated/terminated/chunked.serializer.all.js @@ -0,0 +1,25 @@ +module.exports = function ({ serializers, $lookup }) { + serializers.all.object = function () { + return function (object) { + return function ($buffer, $start, $end) { + let $_, $i = [] + + $buffer[$start++] = (object.nudge & 0xff) + + $_ = 0 + for (let $index = 0; $index < object.array.length; $index++) { + object.array[$index].copy($buffer, $start) + $start += object.array[$index].length + $_ += object.array[$index].length + } + + $buffer[$start++] = 0xd + $buffer[$start++] = 0xa + + $buffer[$start++] = (object.sentry & 0xff) + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/terminated/chunked.serializer.bff.js b/test/generated/terminated/chunked.serializer.bff.js new file mode 100644 index 00000000..9c2226c2 --- /dev/null +++ b/test/generated/terminated/chunked.serializer.bff.js @@ -0,0 +1,29 @@ +module.exports = function ({ serializers, $lookup }) { + serializers.bff.object = function () { + return function (object) { + return function ($buffer, $start, $end) { + let $_, $i = [] + + if ($end - $start < 4 + object.array.reduce((sum, buffer) => sum + buffer.length, 0)) { + return serializers.inc.object(object, 0, $i)($buffer, $start, $end) + } + + $buffer[$start++] = (object.nudge & 0xff) + + $_ = 0 + for (let $index = 0; $index < object.array.length; $index++) { + object.array[$index].copy($buffer, $start) + $start += object.array[$index].length + $_ += object.array[$index].length + } + + $buffer[$start++] = 0xd + $buffer[$start++] = 0xa + + $buffer[$start++] = (object.sentry & 0xff) + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/terminated/chunked.serializer.chk.js b/test/generated/terminated/chunked.serializer.chk.js new file mode 100644 index 00000000..580014ef --- /dev/null +++ b/test/generated/terminated/chunked.serializer.chk.js @@ -0,0 +1,37 @@ +module.exports = function ({ serializers, $lookup }) { + serializers.chk.object = function () { + return function (object) { + return function ($buffer, $start, $end) { + let $_, $i = [] + + if ($end - $start < 1) { + return serializers.inc.object(object, 0, $i)($buffer, $start, $end) + } + + $buffer[$start++] = (object.nudge & 0xff) + + if ($end - $start < 2 + object.array.reduce((sum, buffer) => sum + buffer.length, 0)) { + return serializers.inc.object(object, 2, $i)($buffer, $start, $end) + } + + $_ = 0 + for (let $index = 0; $index < object.array.length; $index++) { + object.array[$index].copy($buffer, $start) + $start += object.array[$index].length + $_ += object.array[$index].length + } + + $buffer[$start++] = 0xd + $buffer[$start++] = 0xa + + if ($end - $start < 1) { + return serializers.inc.object(object, 6, $i)($buffer, $start, $end) + } + + $buffer[$start++] = (object.sentry & 0xff) + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/terminated/chunked.serializer.inc.js b/test/generated/terminated/chunked.serializer.inc.js new file mode 100644 index 00000000..30e6c985 --- /dev/null +++ b/test/generated/terminated/chunked.serializer.inc.js @@ -0,0 +1,112 @@ +module.exports = function ({ serializers, $lookup }) { + serializers.inc.object = function () { + return function (object, $step = 0, $i = []) { + let $_, $bite, $offset = 0, $length = 0 + + return function $serialize ($buffer, $start, $end) { + for (;;) { + switch ($step) { + case 0: + + $step = 1 + $bite = 0 + $_ = object.nudge + + case 1: + + while ($bite != -1) { + if ($start == $end) { + return { start: $start, serialize: $serialize } + } + $buffer[$start++] = ($_ >>> $bite * 8 & 0xff) + $bite-- + } + + + case 2: + + $_ = 0 + $offset = 0 + $length = object.array.reduce((sum, buffer) => sum + buffer.length, 0) + $i[0] = 0 + + case 3: { + + $step = 3 + + for (;;) { + const length = Math.min($end - $start, object.array[$i[0]].length - $offset) + object.array[$i[0]].copy($buffer, $start, $offset, $offset + length) + $offset += length + $start += length + $_ += length + + if ($offset == object.array[$i[0]].length) { + $i[0]++ + $offset = 0 + } + + if ($_ == $length) { + break + } + + return { start: $start, serialize: $serialize } + } + + $step = 4 + + } + + case 4: + + if ($start == $end) { + return { start: $start, serialize: $serialize } + } + + $buffer[$start++] = 0xd + + $step = 5 + + case 5: + + if ($start == $end) { + return { start: $start, serialize: $serialize } + } + + $buffer[$start++] = 0xa + + $step = 6 + + case 6: + + $step = 7 + $bite = 0 + $_ = object.sentry + + case 7: + + while ($bite != -1) { + if ($start == $end) { + return { start: $start, serialize: $serialize } + } + $buffer[$start++] = ($_ >>> $bite * 8 & 0xff) + $bite-- + } + + + $step = 8 + + case 8: + + break + + } + + break + } + + return { start: $start, serialize: null } + } + } + } () +} diff --git a/test/generated/terminated/chunked.sizeof.js b/test/generated/terminated/chunked.sizeof.js new file mode 100644 index 00000000..fdc2975a --- /dev/null +++ b/test/generated/terminated/chunked.sizeof.js @@ -0,0 +1,16 @@ +module.exports = function ({ sizeOf }) { + sizeOf.object = function () { + return function (object) { + let $start = 0 + + $start += 1 + + $start += object.array.reduce((sum, buffer) => sum + buffer.length, 0) + + 2 + + $start += 1 + + return $start + } + } () +} diff --git a/test/generated/terminated/concat.serializer.all.js b/test/generated/terminated/concat.serializer.all.js index cce99507..c6f7be16 100644 --- a/test/generated/terminated/concat.serializer.all.js +++ b/test/generated/terminated/concat.serializer.all.js @@ -6,9 +6,8 @@ module.exports = function ({ serializers, $lookup }) { $buffer[$start++] = (object.nudge & 0xff) - for ($i[0] = 0; $i[0] < object.array.length; $i[0]++) { - $buffer[$start++] = (object.array[$i[0]] & 0xff) - } + object.array.copy($buffer, $start, 0, object.array.length) + $start += object.array.length $buffer[$start++] = 0xd $buffer[$start++] = 0xa diff --git a/test/generated/terminated/concat.serializer.bff.js b/test/generated/terminated/concat.serializer.bff.js index 6b23678b..5d113824 100644 --- a/test/generated/terminated/concat.serializer.bff.js +++ b/test/generated/terminated/concat.serializer.bff.js @@ -10,9 +10,8 @@ module.exports = function ({ serializers, $lookup }) { $buffer[$start++] = (object.nudge & 0xff) - for ($i[0] = 0; $i[0] < object.array.length; $i[0]++) { - $buffer[$start++] = (object.array[$i[0]] & 0xff) - } + object.array.copy($buffer, $start, 0, object.array.length) + $start += object.array.length $buffer[$start++] = 0xd $buffer[$start++] = 0xa diff --git a/test/generated/terminated/concat.serializer.chk.js b/test/generated/terminated/concat.serializer.chk.js index b7fce7d0..1f8faab4 100644 --- a/test/generated/terminated/concat.serializer.chk.js +++ b/test/generated/terminated/concat.serializer.chk.js @@ -10,23 +10,18 @@ module.exports = function ({ serializers, $lookup }) { $buffer[$start++] = (object.nudge & 0xff) - for ($i[0] = 0; $i[0] < object.array.length; $i[0]++) { - if ($end - $start < 1) { - return serializers.inc.object(object, 3, $i)($buffer, $start, $end) - } - - $buffer[$start++] = (object.array[$i[0]] & 0xff) + if ($end - $start < 2 + object.array.length) { + return serializers.inc.object(object, 2, $i)($buffer, $start, $end) } - if ($end - $start < 2) { - return serializers.inc.object(object, 5, $i)($buffer, $start, $end) - } + object.array.copy($buffer, $start, 0, object.array.length) + $start += object.array.length $buffer[$start++] = 0xd $buffer[$start++] = 0xa if ($end - $start < 1) { - return serializers.inc.object(object, 8, $i)($buffer, $start, $end) + return serializers.inc.object(object, 6, $i)($buffer, $start, $end) } $buffer[$start++] = (object.sentry & 0xff) diff --git a/test/generated/terminated/concat.serializer.inc.js b/test/generated/terminated/concat.serializer.inc.js index e9e379dc..2a1d2057 100644 --- a/test/generated/terminated/concat.serializer.inc.js +++ b/test/generated/terminated/concat.serializer.inc.js @@ -25,33 +25,26 @@ module.exports = function ({ serializers, $lookup }) { case 2: - $i[0] = 0 - $step = 3 + $_ = 0 - case 3: + case 3: { - $step = 4 - $bite = 0 - $_ = object.array[$i[0]] + $step = 3 - case 4: + const length = Math.min($end - $start, object.array.length - $_) + object.array.copy($buffer, $start, $_, $_ + length) + $start += length + $_ += length - while ($bite != -1) { - if ($start == $end) { + if ($_ != object.array.length) { return { start: $start, serialize: $serialize } } - $buffer[$start++] = ($_ >>> $bite * 8 & 0xff) - $bite-- - } - if (++$i[0] != object.array.length) { - $step = 3 - continue - } + $step = 4 - $step = 5 + } - case 5: + case 4: if ($start == $end) { return { start: $start, serialize: $serialize } @@ -59,9 +52,9 @@ module.exports = function ({ serializers, $lookup }) { $buffer[$start++] = 0xd - $step = 6 + $step = 5 - case 6: + case 5: if ($start == $end) { return { start: $start, serialize: $serialize } @@ -69,17 +62,15 @@ module.exports = function ({ serializers, $lookup }) { $buffer[$start++] = 0xa - $step = 7 - - case 7: + $step = 6 - case 8: + case 6: - $step = 9 + $step = 7 $bite = 0 $_ = object.sentry - case 9: + case 7: while ($bite != -1) { if ($start == $end) { @@ -90,9 +81,9 @@ module.exports = function ({ serializers, $lookup }) { } - $step = 10 + $step = 8 - case 10: + case 8: break