Skip to content

Commit

Permalink
Serialize and parse fixed byte arrays as buffers.
Browse files Browse the repository at this point in the history
Implement serialization and parse of buffers, fixed and terminated.
Incremental parsing padding fixed arrays is not redirected to the
terminated parser implementation which will add checks for reaching the
maxiumu fixed length array. Incremental terminated buffer parsing is
implemented, but not serialization and not synchrnous operation.

Buffers are either catenated, meaning during incremental parse, multiple
buffers are gathered, than catenated into a single buffer, or else they
are left as an array of buffers which are themselves slices of the
underlying buffer.

Closes #506.
Closes #513.
Closes #517.
  • Loading branch information
flatheadmill committed Jul 11, 2020
1 parent f60ef24 commit 7eed865
Show file tree
Hide file tree
Showing 137 changed files with 3,681 additions and 543 deletions.
10 changes: 10 additions & 0 deletions hex.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
function hex (value) {
switch (typeof value) {
case 'number':
return `0x${value.toString(16)}`
case 'object':
return `[ ${value.map(value => hex(value)).join(', ')} ]`
}
}

module.exports = hex
53 changes: 48 additions & 5 deletions parse.all.js
Original file line number Diff line number Diff line change
Expand Up @@ -387,17 +387,57 @@ function generate (packet, { require, bff }) {
}

function fixed (path, field) {
// Fetch the type of element.
const element = field.fields[field.fields.length - 1]
//

// Buffers can use `indexOf`, `fill` and `copy` and will be much faster
// than operating byte-by-byte.

//
if (element.type == 'buffer') {
variables.register = true
variables.slice = true
// Advance past buffer read to padding skip.
$step += field.pad.length == 0 ? 2 : 3
const slice = $(`
$slice = $buffer.slice($start, ${field.length})
$start += ${field.length}
`)
const assign = element.concat ? `${path} = $slice` : `${path}.push($slice)`
if (field.pad.length != 0) {
$step += field.pad.length
const pad = field.pad.length > 1
? `Buffer.from(${util.format(field.pad)})`
: field.pad[0]
return ($(`
`, slice, `
$_ = $slice.indexOf(${pad})
if (~$_) {
$slice = $buffer.slice(0, $_)
}
`, assign, `
`))
}
// See: https://marcradziwill.com/blog/mastering-javascript-high-performance/
return ($(`
`, slice, `
`, assign, `
`))
}
variables.i = true
const i = `$i[${++$i}]`
$step += 1
const check = bff && field.pad.length != 0
? checkpoint({ lengths: [ field.pad.length ] })
: null
$step += 1
$step += field.pad.length
// Advance past initialization and terminator tests.
$step += 1 + field.pad.length
const looped = join(field.fields.map(field => dispatch(path + `[${i}]`, field)))
$step += field.pad.length
$step += 3 // Skip termination test and fill.
// Advance past end-of-loop test and fill skip.
$step += 1 + (field.pad.length != 0 ? 2 : 0)
const terminator = field.pad.map((bite, index) => {
if (index == 0) {
return `$buffer[$start] == 0x${bite.toString(16)}`
Expand Down Expand Up @@ -438,7 +478,9 @@ function generate (packet, { require, bff }) {
return $(`
`, source, `
$start += (${field.length} - ${i}) * ${field.bits / field.length / 8} - ${field.pad.length}
$start += ${field.length} != ${i}
? (${field.length} - ${i}) * ${field.bits / field.length / 8} - ${field.pad.length}
: 0
`)
}
return source
Expand Down Expand Up @@ -663,6 +705,7 @@ function generate (packet, { require, bff }) {
i: '$i = []',
I: '$I = []',
sip: '$sip = []',
slice: '$slice = null',
accumulator: '$accumulator = {}',
starts: '$starts = []'
}
Expand Down
Loading

0 comments on commit 7eed865

Please sign in to comment.