Skip to content

Commit

Permalink
Implement terminated buffers.
Browse files Browse the repository at this point in the history
Closes #509.
Closes #508.
  • Loading branch information
flatheadmill committed Jul 17, 2020
1 parent e9f66ca commit 85ef6a4
Show file tree
Hide file tree
Showing 19 changed files with 642 additions and 77 deletions.
23 changes: 18 additions & 5 deletions parse.all.js
Original file line number Diff line number Diff line change
Expand Up @@ -116,16 +116,21 @@ function inquisition (fields, $I = 0) {
checked.push(field)
break
case 'terminated':
field.fields = inquisition(field.fields)
if (field.fields[field.fields.length - 1].type != 'buffer') {
field.fields = inquisition(field.fields)
}
checked.push(field)
break
case 'terminator':
checked.push({
type: 'checkpoint',
lengths: [ field.body.terminator.length ],
lengths: [ 0 ],
vivify: null,
rewind: 0
})
if (field.body.fields[field.body.fields.length - 1].type != 'buffer') {
checked[checked.length - 1].lengths[0] += field.body.terminator.length
}
checked.push(field)
break
case 'lengthEncoding':
Expand Down Expand Up @@ -248,6 +253,7 @@ function generate (packet, { require, bff, chk }) {
checked.push(field)
checkpoint.lengths[0] += field.body.terminator.length
break
// TODO Checkpoint invocation on fields in two places?
case 'repeated':
// TODO If the terminator is greater than or equal to the size
// of the repeated part, we do not have to perform the
Expand All @@ -257,7 +263,10 @@ function generate (packet, { require, bff, chk }) {
break
case 'terminated':
checked.push(field)
field.fields = checkpoints(path + `${field.dotted}[$i[${index}]]`, field.fields, index + 1)
const element = field.fields.slice().pop().fields.slice().pop()
if (element.type != 'buffer') {
field.fields = checkpoints(path + `${field.dotted}[$i[${index}]]`, field.fields, index + 1)
}
checked.push(checkpoint = {
type: 'checkpoint', lengths: [ 0 ], vivify: null, rewind: 0
})
Expand Down Expand Up @@ -495,11 +504,14 @@ function generate (packet, { require, bff, chk }) {
const element = field.fields.slice().pop().fields.slice().pop()
if (element.type == 'buffer') {
const terminator = field.terminator
const assign = element.concat
? `${path} = $buffer.slice($start, $_)`
: `${path} = [ $buffer.slice($start, $_) ]`
if (bff || chk) {
const source = $(`
$_ = $buffer.indexOf(Buffer.from(${util.inspect(terminator)}), $start)
if (~$_) {
${path} = $buffer.slice($start, $_)
`, assign, `
$start = $_ + ${terminator.length}
} else {
return parsers.inc.${packet.name}(${signature().join(', ')})($buffer, $start, $end)
Expand All @@ -508,10 +520,11 @@ function generate (packet, { require, bff, chk }) {
$step += 2 + terminator.length
return source
}
// TODO What if you don't find? Here we create zero buffer.
return $(`
$_ = $buffer.indexOf(Buffer.from(${util.inspect(terminator)}), $start)
$_ = ~$_ ? $_ : $start
${path} = $buffer.slice($start, $_)
`, assign, `
$start = $_ + ${terminator.length}
`)
}
Expand Down
52 changes: 46 additions & 6 deletions serialize.all.js
Original file line number Diff line number Diff line change
Expand Up @@ -139,8 +139,14 @@ function checkpoints (path, fields, index = 0) {
//
checked.push(field)
if (field.fields[0].fixed) {
// *Division in string templates upsets Docco JavaScript parser.*
checkpoint.lengths.push(`${path + field.dotted}.length * ${field.fields[0].bits >>> 3}`)
if (field.fields[0].type == 'buffer' && !field.fields[0].concat) {
checkpoint.lengths.push($(`
${path + field.dotted}.reduce((sum, buffer) => sum + buffer.length, 0)
`))
} else {
// *Division in string templates upsets Docco JavaScript parser.*
checkpoint.lengths.push(`${path + field.dotted}.length * ${field.fields[0].bits >>> 3}`)
}
} else {
field.fields = checkpoints(`${path}${field.dotted}[$i[${index}]]`, field.fields, index + 1)
checked.push(checkpoint = { type: 'checkpoint', lengths: [ 0 ] })
Expand Down Expand Up @@ -251,11 +257,23 @@ function inquisition (path, fields) {
checked.push(field)
break
case 'terminated':
field.fields = inquisition(path + field.dotted, field.fields)
if (field.fields[field.fields.length - 1].type == 'buffer') {
checked.push({ type: 'checkpoint', lengths: [ 0,
field.fields[field.fields.length - 1].concat
? `${path + field.dotted}.length`
: `${path + field.dotted}.reduce((sum, buffer) => sum + buffer.length, 0)`
]})
} else {
field.fields = inquisition(path + field.dotted, field.fields)
}
checked.push(field)
break
case 'terminator':
checked.push({ type: 'checkpoint', lengths: [ field.body.terminator.length ]})
if (field.body.fields[field.body.fields.length - 1].type == 'buffer') {
checked[checked.length - 2].lengths[0] += field.body.terminator.length
} else {
checked.push({ type: 'checkpoint', lengths: [ field.body.terminator.length ]})
}
checked.push(field)
break
case 'lengthEncoding':
Expand Down Expand Up @@ -457,6 +475,25 @@ function generate (packet, { require = null, bff, chk }) {
}

function terminated (path, field) {
const element = field.fields[field.fields.length - 1]
if (element.type == 'buffer') {
$step += 2
if (element.concat) {
return $(`
${path}.copy($buffer, $start, 0, ${path}.length)
$start += ${path}.length
`)
}
variables.register = true
return $(`
$_ = 0
for (let $index = 0; $index < ${path}.length; $index++) {
${path}[$index].copy($buffer, $start)
$start += ${path}[$index].length
$_ += ${path}[$index].length
}
`)
}
$step += 1
const i = `$i[${++$i}]`
const looped = join(field.fields.map(field => dispatch(`${path}[${i}]`, field)))
Expand All @@ -471,9 +508,12 @@ function generate (packet, { require = null, bff, chk }) {

function terminator (field) {
const terminator = []
$step += field.body.terminator.length + 1
$step += field.body.terminator.length
if (field.body.fields[field.body.fields.length - 1].type != 'buffer') {
$step++
}
for (const bite of field.body.terminator) {
terminator.push(`$buffer[$start++] = 0x${bite.toString(16)}`)
terminator.push(`$buffer[$start++] = ${hex(bite)}`)
}
return terminator.join('\n')
}
Expand Down
131 changes: 110 additions & 21 deletions serialize.inc.js
Original file line number Diff line number Diff line change
Expand Up @@ -242,34 +242,123 @@ function generate (packet, { require = null }) {
return source
}

function copy (path, element, buffered) {
let i
if (!element.concat) {
locals['offset'] = 0
locals['length'] = 0
i = `$i[${++$i}]`
}
const source = element.concat
// Copy the single buffer using copy.
? $(`
case ${$step++}:
$_ = 0
case ${$step++}: {
$step = ${$step - 1}
const length = Math.min($end - $start, ${path}.length - $_)
${path}.copy($buffer, $start, $_, $_ + length)
$start += length
$_ += length
if ($_ != ${path}.length) {
`, buffered, `
return { start: $start, serialize: $serialize }
}
$step = ${$step}
}
`)
// Loop through an array of buffers copying to the serialization
// buffer using `Buffer.copy()`. Need to track the index of the
// current buffer in the array the offset in the current buffer.
: $(`
case ${$step++}:
$_ = 0
$offset = 0
$length = ${path}.reduce((sum, buffer) => sum + buffer.length, 0)
${i} = 0
case ${$step++}: {
$step = ${$step - 1}
for (;;) {
const length = Math.min($end - $start, ${path}[${i}].length - $offset)
${path}[${i}].copy($buffer, $start, $offset, $offset + length)
$offset += length
$start += length
$_ += length
if ($offset == ${path}[${i}].length) {
${i}++
$offset = 0
}
if ($_ == $length) {
break
}
`, buffered, `
return { start: $start, serialize: $serialize }
}
$step = ${$step}
}
`)
if (element.concat) {
i--
}
return source
}

function terminated (path, field) {
surround = true
const buffered = accumulate.buffered.length != 0
? accumulate.buffered.map(buffered => {
return $(`
`, buffered.source, `
$starts[${buffered.start}] = $start
`)
}).join('\n') : null
function terminate () {
return join(field.terminator.map(bite => {
return $(`
case ${$step++}:
if ($start == $end) {
`, buffered, `
return { start: $start, serialize: $serialize }
}
$buffer[$start++] = ${hex(bite)}
$step = ${$step}
`)
}))
}
const element = field.fields[0]
if (element.type == 'buffer') {
const source = $(`
`, copy(path, element, buffered), `
`, terminate(), `
`)
return source
}
$i++
const init = $step
const again = ++$step
const i = `$i[${$i}]`
const looped = join(field.fields.map(field => dispatch(`${path}[${i}]`, field)))
const done = $step
const buffered = accumulate.buffered.map(buffered => {
return $(`
`, buffered.source, `
$starts[${buffered.start}] = $start
`)
})
const terminator = join(field.terminator.map(bite => {
return $(`
case ${$step++}:
if ($start == $end) {
`, buffered.length != 0 ? buffered.join('\n') : null, `
return { start: $start, serialize: $serialize }
}
$buffer[$start++] = 0x${bite.toString(16)}
$step = ${$step}
`)
}))
const source = $(`
case ${init}:
Expand All @@ -284,7 +373,7 @@ function generate (packet, { require = null }) {
$step = ${done}
`, terminator, `
`, terminate(), `
case ${$step++}:
`)
Expand Down
7 changes: 6 additions & 1 deletion sizeof.js
Original file line number Diff line number Diff line change
Expand Up @@ -113,9 +113,14 @@ function generate (packet, { require = null }) {
return source
}
case 'terminated': {
// TODO Use AST rollup `fixed`.
if (field.fields.filter(field => !field.fixed).length == 0) {
const bits = field.fields.reduce((sum, field) => sum + field.bits, 0)
return $(`
return field.fields[0].type == 'buffer' && !field.fields[0].concat
? $(`
$start += ${path}.reduce((sum, buffer) => sum + buffer.length, 0) +
${field.terminator.length}
`) : $(`
$start += ${bits >>> 3} * ${path}.length + ${field.terminator.length}
`)
}
Expand Down
14 changes: 14 additions & 0 deletions test/cycle/terminated.t.js
Original file line number Diff line number Diff line change
Expand Up @@ -42,4 +42,18 @@ function prove (okay) {
nudge: 0xaa, array: Buffer.from('abcdefghij'), sentry: 0xaa
}]
})
cycle(okay, {
name: 'terminated/chunked',
define: {
object: {
nudge: 8,
array: [ [[ Buffer ]], 0xd, 0xa ],
sentry: 8
}
},
objects: [{
nudge: 0xaa, array: [ Buffer.from('abcdefghij') ], sentry: 0xaa
}],
stopAt: 'serialize.chk'
})
}
3 changes: 3 additions & 0 deletions test/generated/terminated/chunked.lookup.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
module.exports = function ({ $lookup }) {
$lookup.push.apply($lookup, [])
}
24 changes: 24 additions & 0 deletions test/generated/terminated/chunked.parser.all.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
module.exports = function ({ parsers, $lookup }) {
parsers.all.object = function () {
return function ($buffer, $start) {
let $_, $i = []

let object = {
nudge: 0,
array: null,
sentry: 0
}

object.nudge = ($buffer[$start++])

$_ = $buffer.indexOf(Buffer.from([ 13, 10 ]), $start)
$_ = ~$_ ? $_ : $start
object.array = [ $buffer.slice($start, $_) ]
$start = $_ + 2

object.sentry = ($buffer[$start++])

return object
}
} ()
}
Loading

0 comments on commit 85ef6a4

Please sign in to comment.