diff --git a/.github/dependabot.yaml b/.github/dependabot.yaml new file mode 100644 index 0000000..7789a02 --- /dev/null +++ b/.github/dependabot.yaml @@ -0,0 +1,11 @@ +version: 2 +updates: + - package-ecosystem: github-actions + directory: / + schedule: + interval: daily + - package-ecosystem: npm + directory: / + schedule: + interval: daily + versioning-strategy: increase-if-necessary diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml deleted file mode 100644 index b480ea2..0000000 --- a/.github/workflows/ci.yaml +++ /dev/null @@ -1,17 +0,0 @@ -name: CI - -on: push - -jobs: - test: - runs-on: ubuntu-20.04 - strategy: - matrix: - node: [ '10', '12', '14' ] - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v2 - with: - node-version: ${{ matrix.node }} - - run: npm install - - run: npm test diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 0000000..1e2e5f3 --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,21 @@ +name: Test +on: + - pull_request + - push +jobs: + test: + runs-on: ubuntu-24.04 + strategy: + matrix: + node: + - '18' + - '20' + - '22' + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node }} + - run: npm install + - run: npm test + - uses: codecov/codecov-action@v4 diff --git a/.gitignore b/.gitignore index 9cfc2e3..3a2cff1 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,4 @@ -.idea coverage node_modules -.DS_Store +package-lock.json test/spec diff --git a/.npmrc b/.npmrc deleted file mode 100644 index 43c97e7..0000000 --- a/.npmrc +++ /dev/null @@ -1 +0,0 @@ -package-lock=false diff --git a/LICENSE.md b/LICENSE.md index 847a610..d319c08 100644 --- a/LICENSE.md +++ b/LICENSE.md @@ -1,9 +1,21 @@ -# License -The MIT License (MIT) -Copyright © 2018–2019 Zazuko GmbH +MIT License -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +Copyright (c) 2024 Thomas Bergwinkl -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index 3869e86..7caebe6 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,14 @@ # rdf-parser-csvw -A CSV on the Web parser with [RDFJS Stream interface](https://github.com/rdfjs/representation-task-force/). +[![build status](https://img.shields.io/github/actions/workflow/status/rdf-ext/rdf-parser-csvw/test.yaml?branch=master)](https://github.com/rdf-ext/rdf-parser-csvw/actions/workflows/test.yaml) +[![npm version](https://img.shields.io/npm/v/rdf-parser-csvw.svg)](https://www.npmjs.com/package/rdf-parser-csvw) + +A CSV on the Web parser with [RDF/JS Stream interface](https://github.com/rdfjs/representation-task-force/). ## Usage The package exports the parser as a class, so an instance must be created before it can be used. -The `.import` method, as defined in the [RDFJS specification](http://rdf.js.org/#sink-interface), must be called to do the actual parsing. +The `.import` method, as defined in the [RDF/JS specification](http://rdf.js.org/#sink-interface), must be called to do the actual parsing. It expects a stream of strings. The method will return a stream which emits the parsed quads. @@ -17,7 +20,7 @@ The constructor accepts an `options` object with the following optional keys: - `baseIRI`: Use the IRI to create Named Nodes. The value must be a String. This options is required. -- `factory`: Use an alternative RDFJS data factory. +- `factory`: Use an alternative RDF/JS data factory. By default the [reference implementation](https://github.com/rdfjs/data-model/) us used. - `timezone`: Use an alternative timezone to parse date and time values. The value must be given as a String as defined in the [Luxon documentation](https://moment.github.io/luxon/docs/manual/zones.html#specifying-a-zone). diff --git a/bin/csvw-metadata.js b/bin/csvw-metadata.js index cdb405a..ed8316c 100755 --- a/bin/csvw-metadata.js +++ b/bin/csvw-metadata.js @@ -1,8 +1,7 @@ #!/usr/bin/env node -const MetadataBuilder = require('../lib/MetadataBuilder') - -const program = require('commander') +import program from 'commander' +import MetadataBuilder from '../lib/MetadataBuilder.js' program .arguments('') diff --git a/index.js b/index.js index 1635ba9..a94cc40 100644 --- a/index.js +++ b/index.js @@ -1,7 +1,7 @@ -const CsvParser = require('./lib/CsvParser') -const parseMetadata = require('./lib/metadata') -const rdf = require('@rdfjs/data-model') -const ObjectParserTransform = require('./lib/ObjectParserTransform') +import rdf from '@rdfjs/data-model' +import CsvParser from './lib/CsvParser.js' +import parseMetadata from './lib/metadata/index.js' +import ObjectParserTransform from './lib/ObjectParserTransform.js' class Parser { constructor ({ metadata, baseIRI = '', factory = rdf, timezone, relaxColumnCount, skipLinesWithError } = {}) { @@ -43,7 +43,7 @@ class Parser { output.destroy(err) }) - input.on('error', (err) => { + input.on('error', err => { output.destroy(err) }) @@ -57,4 +57,4 @@ class Parser { } } -module.exports = Parser +export default Parser diff --git a/lib/CsvParser.js b/lib/CsvParser.js index 554294e..0b43f92 100644 --- a/lib/CsvParser.js +++ b/lib/CsvParser.js @@ -1,5 +1,5 @@ -const { Parser } = require('csv-parse') -const { Transform } = require('readable-stream') +import { Parser } from 'csv-parse' +import { Transform } from 'readable-stream' class CsvParser extends Transform { constructor ({ delimiter, lineTerminators, quoteChar, relaxColumnCount, skipLinesWithError } = {}) { @@ -43,4 +43,4 @@ class CsvParser extends Transform { } } -module.exports = CsvParser +export default CsvParser diff --git a/lib/MetadataBuilder.js b/lib/MetadataBuilder.js index a41534a..fdc55e6 100644 --- a/lib/MetadataBuilder.js +++ b/lib/MetadataBuilder.js @@ -1,4 +1,4 @@ -const fs = require('fs') +import fs from 'fs' class MetadataBuilder { static readFirstLine (filename) { @@ -83,4 +83,4 @@ class MetadataBuilder { } } -module.exports = MetadataBuilder +export default MetadataBuilder diff --git a/lib/ObjectParserTransform.js b/lib/ObjectParserTransform.js index 8402c0a..47c519b 100644 --- a/lib/ObjectParserTransform.js +++ b/lib/ObjectParserTransform.js @@ -1,7 +1,7 @@ -const parseMetadata = require('./metadata') -const namespace = require('./namespace') -const rdf = require('@rdfjs/data-model') -const { Transform } = require('readable-stream') +import rdf from '@rdfjs/data-model' +import { Transform } from 'readable-stream' +import parseMetadata from './metadata/index.js' +import namespace from './namespace.js' class ObjectParserTransform extends Transform { constructor ({ baseIRI = '', factory = rdf, metadata, tableSchema, timezone } = {}) { @@ -60,7 +60,7 @@ class ObjectParserTransform extends Transform { const urlQuad = [...this.parsedMetadata.dataset.match(null, this.ns.url)][0] if (urlQuad) { - this.copySubgraph([...this.parsedMetadata.dataset.match(urlQuad.subject)].filter((quad) => { + this.copySubgraph([...this.parsedMetadata.dataset.match(urlQuad.subject)].filter(quad => { return quad.predicate.value.slice(0, 26) !== 'http://www.w3.org/ns/csvw#' }), this.tableNode) } @@ -100,7 +100,7 @@ class ObjectParserTransform extends Transform { describesNode )) - this.tableSchema.columns({ contentLine: this.contentLine, row: rowData }).forEach((column) => { + this.tableSchema.columns({ contentLine: this.contentLine, row: rowData }).forEach(column => { this.push(this.factory.quad( column.subject || describesNode, column.property, @@ -130,7 +130,7 @@ class ObjectParserTransform extends Transform { } copySubgraph (quads, subject) { - quads.forEach((quad) => { + quads.forEach(quad => { this.push(this.factory.quad( subject || quad.subject, quad.predicate, @@ -144,4 +144,4 @@ class ObjectParserTransform extends Transform { } } -module.exports = ObjectParserTransform +export default ObjectParserTransform diff --git a/lib/metadata/Metadata.js b/lib/metadata/Metadata.js index 43e508a..00f2dd6 100644 --- a/lib/metadata/Metadata.js +++ b/lib/metadata/Metadata.js @@ -1,5 +1,5 @@ -const namespace = require('../namespace') -const TableSchema = require('./TableSchema') +import namespace from '../namespace.js' +import TableSchema from './TableSchema.js' class Metadata { constructor (dataset, { baseIRI, factory, timezone } = {}) { @@ -61,4 +61,4 @@ class Metadata { } } -module.exports = Metadata +export default Metadata diff --git a/lib/metadata/RdfUtils.js b/lib/metadata/RdfUtils.js index 878b203..60a2807 100644 --- a/lib/metadata/RdfUtils.js +++ b/lib/metadata/RdfUtils.js @@ -1,4 +1,4 @@ -const rdf = require('@rdfjs/data-model') +import rdf from '@rdfjs/data-model' const ns = { first: rdf.namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#first'), @@ -52,4 +52,4 @@ class RdfUtils { } } -module.exports = RdfUtils +export default RdfUtils diff --git a/lib/metadata/TableSchema.js b/lib/metadata/TableSchema.js index e42cf7e..e0769e1 100644 --- a/lib/metadata/TableSchema.js +++ b/lib/metadata/TableSchema.js @@ -1,9 +1,9 @@ -const difference = require('lodash/difference') -const namespace = require('../namespace') -const parseDateTime = require('../parseDateTime') -const uriTemplate = require('uri-templates') -const URL = require('url') -const RdfUtils = require('./RdfUtils') +import url from 'node:url' +import difference from 'lodash/difference.js' +import uriTemplate from 'uri-templates' +import namespace from '../namespace.js' +import parseDateTime from '../parseDateTime.js' +import RdfUtils from './RdfUtils.js' const defaultColumnNames = new Set(['_column', '_sourceColumn', '_row', '_sourceRow', '_name']) @@ -39,8 +39,9 @@ class TableSchema { const aboutUrlTemplate = uriTemplate(aboutUrl) - return (row) => { - return this.factory.namedNode(URL.resolve(this.baseIRI, aboutUrlTemplate.fill(row))) // eslint-disable-line node/no-deprecated-api + return row => { + // eslint-disable-next-line + return this.factory.namedNode(url.resolve(this.baseIRI, aboutUrlTemplate.fill(row))) // eslint-disable-line node/no-deprecated-api } } @@ -57,7 +58,7 @@ class TableSchema { parseColumns () { const columnNode = RdfUtils.findNode(this.dataset, this.root, this.ns.column) - this.parsedColumns = RdfUtils.parseArray(this.dataset, columnNode).map((node) => { + this.parsedColumns = RdfUtils.parseArray(this.dataset, columnNode).map(node => { const titles = RdfUtils.findValues(this.dataset, node, this.ns.title) const name = RdfUtils.findValue(this.dataset, node, this.ns.name) || titles[0] const aboutUrl = RdfUtils.findValue(this.dataset, node, this.ns.aboutUrl) @@ -101,7 +102,7 @@ class TableSchema { return { base: this.factory.namedNode('http://www.w3.org/2001/XMLSchema#' + (base || 'string')), - format: format + format } } @@ -111,7 +112,7 @@ class TableSchema { this.createAllColumns(row) } - return this.allColumns.map((column) => { + return this.allColumns.map(column => { const cellData = { ...row, _name: column.name } return { @@ -119,7 +120,7 @@ class TableSchema { property: this.property(column, cellData), value: this.value(column, cellData) } - }).filter((column) => { + }).filter(column => { return column.value !== undefined }) } catch (cause) { @@ -136,7 +137,8 @@ class TableSchema { return null } - return this.factory.namedNode(URL.resolve(this.baseIRI, column.aboutUrl.fill(row))) // eslint-disable-line node/no-deprecated-api + // eslint-disable-next-line + return this.factory.namedNode(url.resolve(this.baseIRI, column.aboutUrl.fill(row))) // eslint-disable-line node/no-deprecated-api } value (column, row) { @@ -169,7 +171,9 @@ class TableSchema { } if (column.datatype.base) { - return this.factory.literal(value, (column.language && column.language.fill(row).toLowerCase()) || column.datatype.base) + const language = column.language && column.language.fill(row).toLowerCase() + + return this.factory.literal(value, language || this.factory.namedNode(column.datatype.base)) } } @@ -183,7 +187,9 @@ class TableSchema { }, []) const undefinedColumns = difference(Object.keys(row), titles).reduce((titles, title) => { - if (defaultColumnNames.has(title)) return titles + if (defaultColumnNames.has(title)) { + return titles + } return [...titles, { name: title, @@ -209,4 +215,4 @@ class TableSchema { } } -module.exports = TableSchema +export default TableSchema diff --git a/lib/metadata/index.js b/lib/metadata/index.js index 24d1207..fd0fe2b 100644 --- a/lib/metadata/index.js +++ b/lib/metadata/index.js @@ -1,4 +1,4 @@ -const Metadata = require('./Metadata') +import Metadata from './Metadata.js' function metadata (input, { baseIRI, factory, timezone } = {}) { if (!input || typeof input.match === 'function') { @@ -8,4 +8,4 @@ function metadata (input, { baseIRI, factory, timezone } = {}) { return input } -module.exports = metadata +export default metadata diff --git a/lib/namespace.js b/lib/namespace.js index 406c427..3cad4a9 100644 --- a/lib/namespace.js +++ b/lib/namespace.js @@ -1,4 +1,4 @@ -const rdf = require('@rdfjs/data-model') +import rdf from '@rdfjs/data-model' function namespace (factory) { factory = factory || rdf @@ -40,4 +40,4 @@ function namespace (factory) { } } -module.exports = namespace +export default namespace diff --git a/lib/parseDateTime.js b/lib/parseDateTime.js index 529d2cf..dce339b 100644 --- a/lib/parseDateTime.js +++ b/lib/parseDateTime.js @@ -1,4 +1,4 @@ -const { DateTime } = require('luxon') +import { DateTime } from 'luxon' function parseDateTime (value, format, timezone) { if (format) { @@ -9,4 +9,4 @@ function parseDateTime (value, format, timezone) { DateTime.fromRFC2822(value, { zone: timezone }) } -module.exports = parseDateTime +export default parseDateTime diff --git a/package.json b/package.json index 712922e..c7247fc 100644 --- a/package.json +++ b/package.json @@ -2,16 +2,17 @@ "name": "rdf-parser-csvw", "version": "0.15.3", "description": "CSV on the Web parser", + "type": "module", "main": "index.js", "scripts": { - "test": "standard && mocha" + "test": "stricter-standard && c8 --reporter=lcov --reporter=text-summary mocha" }, "bin": { "csvw-metadata": "./bin/csvw-metadata.js" }, "repository": { "type": "git", - "url": "git://github.com/rdf-ext/rdf-parser-csvw.git" + "url": "https://github.com/rdf-ext/rdf-parser-csvw.git" }, "keywords": [ "rdf", @@ -26,22 +27,21 @@ }, "homepage": "https://github.com/rdf-ext/rdf-parser-csvw", "dependencies": { - "@rdfjs/data-model": "^1.1.2", - "commander": "^3.0.1", - "csv-parse": "^5.3.1", - "lodash": "^4.17.15", - "luxon": "^1.17.3", - "readable-stream": "^3.4.0", + "@rdfjs/data-model": "^2.0.2", + "commander": "^12.1.0", + "csv-parse": "^5.5.6", + "lodash": "^4.17.21", + "luxon": "^3.4.4", + "readable-stream": "^4.5.2", "uri-templates": "^0.2.0" }, "devDependencies": { - "@rdfjs/dataset": "^1.1.1", - "@rdfjs/parser-jsonld": "^1.1.2", - "@rdfjs/parser-n3": "^1.1.3", - "get-stream": "^6.0.0", - "glob": "^7.1.4", - "mocha": "^6.2.0", - "rdf-dataset-ext": "^1.0.0", - "standard": "^14.3.0" + "c8": "^9.1.0", + "glob": "^10.4.1", + "mocha": "^10.4.0", + "rdf-ext": "^2.5.1", + "rdf-test": "^0.1.0", + "stream-chunks": "^1.0.0", + "stricter-standard": "^0.3.1" } } diff --git a/test/CsvParser.test.js b/test/CsvParser.test.js index e7c91d2..2b0532b 100644 --- a/test/CsvParser.test.js +++ b/test/CsvParser.test.js @@ -1,24 +1,22 @@ -/* global describe, it */ - -const assert = require('assert') -const getStream = require('get-stream') -const CsvParser = require('../lib/CsvParser') -const { PassThrough } = require('readable-stream') -const waitFor = require('./support/waitFor') +import { deepStrictEqual, rejects, strictEqual } from 'node:assert' +import { describe, it } from 'mocha' +import { PassThrough } from 'readable-stream' +import chunks from 'stream-chunks/chunks.js' +import CsvParser from '../lib/CsvParser.js' describe('csvParser', () => { it('should be a function', () => { - assert.strictEqual(typeof CsvParser, 'function') + strictEqual(typeof CsvParser, 'function') }) it('should return a Transform', () => { const parser = new CsvParser() - assert.strictEqual(parser.readable, true) - assert.strictEqual(parser.writable, true) + strictEqual(parser.readable, true) + strictEqual(parser.writable, true) }) - it('should parse CSV with header', () => { + it('should parse CSV with header', async () => { const input = new PassThrough() const parser = new CsvParser() @@ -30,7 +28,7 @@ describe('csvParser', () => { input.write('value0,value1\n') input.end() - return waitFor(parser) + await chunks(parser) }) it('should parse CSV with BOM', async () => { @@ -48,7 +46,7 @@ describe('csvParser', () => { } }] - parser.on('data', (data) => { + parser.on('data', data => { output.push(data) }) @@ -56,12 +54,12 @@ describe('csvParser', () => { input.write('value0,value1\n') input.end() - await waitFor(parser) + await chunks(parser) - assert.deepStrictEqual(output, expected) + deepStrictEqual(output, expected) }) - it('should output objects with line number and row data', () => { + it('should output objects with line number and row data', async () => { const input = new PassThrough() const parser = new CsvParser() @@ -76,7 +74,7 @@ describe('csvParser', () => { } }] - parser.on('data', (data) => { + parser.on('data', data => { output.push(data) }) @@ -84,12 +82,12 @@ describe('csvParser', () => { input.write('value0,value1\n') input.end() - return waitFor(parser).then(() => { - assert.deepStrictEqual(output, expected) - }) + await chunks(parser) + + deepStrictEqual(output, expected) }) - it('should parse lines with alternative delimiter', () => { + it('should parse lines with alternative delimiter', async () => { const input = new PassThrough() const parser = new CsvParser({ delimiter: ';' }) @@ -104,7 +102,7 @@ describe('csvParser', () => { } }] - parser.on('data', (data) => { + parser.on('data', data => { output.push(data) }) @@ -112,12 +110,12 @@ describe('csvParser', () => { input.write('value0;value1\n') input.end() - return waitFor(parser).then(() => { - assert.deepStrictEqual(output, expected) - }) + await chunks(parser) + + deepStrictEqual(output, expected) }) - it('should parse lines with alternative lineTerminator and UTF16LE encoding', () => { + it('should parse lines with alternative lineTerminator and UTF16LE encoding', async () => { const input = new PassThrough() const parser = new CsvParser({ lineTerminators: ['\r\n'] }) @@ -132,7 +130,7 @@ describe('csvParser', () => { } }] - parser.on('data', (data) => { + parser.on('data', data => { output.push(data) }) @@ -141,9 +139,9 @@ describe('csvParser', () => { input.write('value0,value1\r\n', 'utf16le') input.end() - return waitFor(parser).then(() => { - assert.deepStrictEqual(output, expected) - }) + await parser + + deepStrictEqual(output, expected) }) it('should handle errors', async () => { @@ -156,8 +154,8 @@ describe('csvParser', () => { input.write('value1_2,value2_2\n') input.end() - await assert.rejects(async () => { - await getStream.array(parser) + await rejects(async () => { + await chunks(parser) }) }) }) diff --git a/test/ObjectParserTransform.test.js b/test/ObjectParserTransform.test.js index 4804aec..3a9a0e3 100644 --- a/test/ObjectParserTransform.test.js +++ b/test/ObjectParserTransform.test.js @@ -1,12 +1,10 @@ -/* global describe, it */ - -const assert = require('assert') -const fromStream = require('rdf-dataset-ext/fromStream') -const toCanonical = require('rdf-dataset-ext/toCanonical') -const rdf = require('./support/factory') -const ObjectParserTransform = require('../lib/ObjectParserTransform') -const { PassThrough } = require('readable-stream') -const waitFor = require('./support/waitFor') +import { strictEqual } from 'node:assert' +import { describe, it } from 'mocha' +import rdf from 'rdf-ext' +import { datasetEqual } from 'rdf-test/assert.js' +import { PassThrough } from 'readable-stream' +import chunks from 'stream-chunks/chunks.js' +import ObjectParserTransform from '../lib/ObjectParserTransform.js' const ns = { csvw: { @@ -29,17 +27,17 @@ const ns = { describe('ObjectParserTransform', () => { it('should be a constructor', () => { - assert.strictEqual(typeof ObjectParserTransform, 'function') + strictEqual(typeof ObjectParserTransform, 'function') }) it('should have a Transform interface', () => { const parser = new ObjectParserTransform() - assert.strictEqual(parser.readable, true) - assert.strictEqual(parser.writable, true) + strictEqual(parser.readable, true) + strictEqual(parser.writable, true) }) - it('should parse object', () => { + it('should parse object', async () => { const input = new PassThrough({ objectMode: true }) const parser = new ObjectParserTransform() @@ -57,10 +55,10 @@ describe('ObjectParserTransform', () => { input.end() - return waitFor(parser) + await chunks(parser) }) - it('should output RDF objects', () => { + it('should output RDF objects', async () => { const input = new PassThrough({ objectMode: true }) const parser = new ObjectParserTransform({ factory: rdf }) @@ -98,8 +96,8 @@ describe('ObjectParserTransform', () => { input.end() - return fromStream(rdf.dataset(), parser).then((actual) => { - assert.strictEqual(toCanonical(actual), toCanonical(expected)) - }) + const actual = await rdf.dataset().import(parser) + + datasetEqual(actual, expected) }) }) diff --git a/test/parseDateTime.test.js b/test/parseDateTime.test.js index 5155132..7cc5628 100644 --- a/test/parseDateTime.test.js +++ b/test/parseDateTime.test.js @@ -1,28 +1,27 @@ -/* global describe, it */ - -const assert = require('assert') -const parseDateTime = require('../lib/parseDateTime') +import { strictEqual } from 'node:assert' +import { describe, it } from 'mocha' +import parseDateTime from '../lib/parseDateTime.js' describe('parseDateTime', () => { it('should be a function', () => { - assert.strictEqual(typeof parseDateTime, 'function') + strictEqual(typeof parseDateTime, 'function') }) it('should parse a date time string', () => { const dateTime = parseDateTime('2018-01-01T01:00:00.000+0100') - assert.strictEqual(dateTime.toUTC().toISO(), '2018-01-01T00:00:00.000Z') + strictEqual(dateTime.toUTC().toISO(), '2018-01-01T00:00:00.000Z') }) it('should parse a date time string and set the given timezone', () => { const dateTime = parseDateTime('2018-01-01T01:00:00', null, 'Europe/Berlin') - assert.strictEqual(dateTime.toUTC().toISO(), '2018-01-01T00:00:00.000Z') + strictEqual(dateTime.toUTC().toISO(), '2018-01-01T00:00:00.000Z') }) it('should parse a date time string using the format argument', () => { const dateTime = parseDateTime('20180101 000000', 'yyyyMMdd HHmmss', 'UTC') - assert.strictEqual(dateTime.toUTC().toISO(), '2018-01-01T00:00:00.000Z') + strictEqual(dateTime.toUTC().toISO(), '2018-01-01T00:00:00.000Z') }) }) diff --git a/test/spec.test.js b/test/spec.test.js index dbe398d..4ff0157 100644 --- a/test/spec.test.js +++ b/test/spec.test.js @@ -1,16 +1,13 @@ -/* global describe, it */ - -const assert = require('assert') -const fs = require('fs') -const path = require('path') -const fromStream = require('rdf-dataset-ext/fromStream') -const toCanonical = require('rdf-dataset-ext/toCanonical') -const rdf = require('./support/factory') -const CsvwParser = require('..') -const JsonLdParser = require('@rdfjs/parser-jsonld') -const N3Parser = require('@rdfjs/parser-n3') - -const blackList = [ +import { createReadStream } from 'node:fs' +import { readFile } from 'node:fs/promises' +import { basename, extname } from 'node:path' +import { describe, it } from 'mocha' +import rdf from 'rdf-ext' +import { datasetEqual } from 'rdf-test/assert.js' +import CsvwParser from '../index.js' +import * as ns from './support/namespaces.js' + +const blackList = new Set([ 'manifest-rdf#test016', 'manifest-rdf#test023', 'manifest-rdf#test027', @@ -57,117 +54,70 @@ const blackList = [ 'manifest-rdf#test305', 'manifest-rdf#test306', 'manifest-rdf#test307' -] +]) -function datasetFromN3Fs (filename) { - const parser = new N3Parser({ baseIRI: new String('') }) // eslint-disable-line no-new-wrappers +async function loadTest (testPtr) { + if (blackList.has(testPtr.value)) { + return + } - return fromStream(rdf.dataset(), parser.import(fs.createReadStream(filename), { factory: rdf })) -} + const name = testPtr.out(ns.dawg.name).value + const action = testPtr.out(ns.dawg.action).value + const result = testPtr.out(ns.dawg.result).value + const implicit = testPtr.out(ns.csvwt.implicit).values[0] + const label = name + '<' + testPtr.value + '>' + const input = extname(action) === '.csv' ? action : implicit + const metadataUrl = input === action ? implicit : action + let metadata + + if (metadataUrl && extname(metadataUrl) === '.json') { + metadata = await rdf.io.dataset.fromURL(`test/spec/${metadataUrl}`) + } + + return () => { + it(label, async () => { + const parser = new CsvwParser({ factory: rdf }) + const inputStream = createReadStream(`test/spec/${input}`) + const outputStream = parser.import(inputStream, { + baseIRI: basename(input), + metadata + }) -function datasetFromJsonLdFs (filename) { - const parser = new JsonLdParser() + const expected = await rdf.io.dataset.fromURL(`test/spec/${result}`) + const actual = await rdf.dataset().import(outputStream) - return fromStream(rdf.dataset(), parser.import(fs.createReadStream(filename), { factory: rdf })) + datasetEqual(actual, expected) + }) + } } -function loadTests () { +async function loadTests () { const manifestFile = 'test/spec/manifest-rdf.ttl' + const tests = [] try { - fs.readFileSync(manifestFile) + await readFile(manifestFile) } catch (err) { return Promise.resolve([]) } - return datasetFromN3Fs(manifestFile).then((manifest) => { - let tests = [...manifest.match( - null, - rdf.namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), - rdf.namedNode('http://www.w3.org/2013/csvw/tests/vocab#ToRdfTest') - )].map((test) => { - return test.subject - }).map((test) => { - const name = [...manifest.match(test, rdf.namedNode('http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#name'))] - .map((t) => { - return t.object.value - })[0] - - const action = [...manifest.match(test, rdf.namedNode('http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#action'))] - .map((t) => { - return t.object.value - })[0] - - const result = [...manifest.match(test, rdf.namedNode('http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#result'))] - .map((t) => { - return t.object.value - })[0] - - const implicit = [...manifest.match(test, rdf.namedNode('http://www.w3.org/2013/csvw/tests/vocab#implicit'))] - .map((t) => { - return t.object.value - })[0] - - const label = name + '<' + test.value + '>' - - const input = path.extname(action) === '.csv' ? action : implicit - const metadata = input === action ? implicit : action - - return { - iri: test.value, - label: label, - name: name, - input: input, - metadata: metadata, - result: result - } - }) - - if (typeof blackList !== 'undefined') { - tests = tests.filter((test) => { - return blackList.indexOf(test.iri) === -1 - }) - } + const manifest = rdf.grapoi({ + dataset: await rdf.io.dataset.fromURL(manifestFile) + }) - return Promise.all(tests.map((test) => { - if (test.metadata) { - if (path.extname(test.metadata) === '.json') { - return datasetFromJsonLdFs(path.join(__dirname, 'spec', test.metadata)).then((metadata) => { - test.metadata = metadata + const testPtrs = manifest.hasOut(ns.rdf.type, ns.csvwt.ToRdfTest) - return test - }) - } - } + for (const testPtr of testPtrs) { + tests.push(await loadTest(testPtr)) + } - return test - })) - }) + return tests.filter(Boolean) } -loadTests().then((tests) => { +loadTests().then(tests => { describe('W3C spec tests', () => { - tests.forEach((test) => { - it(test.label, () => { - const parser = new CsvwParser({ factory: rdf }) - const input = fs.createReadStream('test/spec/' + test.input) - const stream = parser.import(input, { - baseIRI: path.basename(test.input), - metadata: test.metadata - }) - - return Promise.all([ - datasetFromN3Fs('test/spec/' + test.result), - fromStream(rdf.dataset(), stream) - ]).then((results) => { - const expected = results[0] - const actual = results[1] - - assert.strictEqual(toCanonical(actual), toCanonical(expected)) - }) - }) - }) + for (const test of tests) { + test() + } }) -}).catch((err) => { - console.error(err.stack) }) diff --git a/test/support/factory.js b/test/support/factory.js deleted file mode 100644 index 910ed39..0000000 --- a/test/support/factory.js +++ /dev/null @@ -1,6 +0,0 @@ -const DataModelFactory = require('@rdfjs/data-model') -const DatasetFactory = require('@rdfjs/dataset') - -const factory = { ...DataModelFactory, ...DatasetFactory } - -module.exports = factory diff --git a/test/support/namespaces.js b/test/support/namespaces.js new file mode 100644 index 0000000..3bf87ad --- /dev/null +++ b/test/support/namespaces.js @@ -0,0 +1,11 @@ +import rdf from 'rdf-ext' + +const csvwt = rdf.namespace('http://www.w3.org/2013/csvw/tests/vocab#') +const dawg = rdf.namespace('http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#') +const rdfns = rdf.namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#') + +export { + csvwt, + dawg, + rdfns as rdf +} diff --git a/test/support/waitFor.js b/test/support/waitFor.js deleted file mode 100644 index 11ae8ce..0000000 --- a/test/support/waitFor.js +++ /dev/null @@ -1,15 +0,0 @@ -const { finished } = require('readable-stream') - -function waitFor (stream) { - return new Promise((resolve, reject) => { - finished(stream, err => { - if (err) { - return reject(err) - } - - resolve() - }) - }) -} - -module.exports = waitFor diff --git a/test/test-cases.test.js b/test/test-cases.test.js index d441c33..9839e6d 100644 --- a/test/test-cases.test.js +++ b/test/test-cases.test.js @@ -1,17 +1,12 @@ -/* global describe, it */ +import { strictEqual } from 'node:assert' +import { createReadStream } from 'node:fs' +import { basename, dirname, join } from 'node:path' +import { glob } from 'glob' +import { describe, it } from 'mocha' +import rdf from 'rdf-ext' +import CsvwParser from '../index.js' -const assert = require('assert') -const fs = require('fs') -const fromStream = require('rdf-dataset-ext/fromStream') -const toCanonical = require('rdf-dataset-ext/toCanonical') -const glob = require('glob') -const path = require('path') -const rdf = require('./support/factory') -const CsvwParser = require('..') -const JsonLdParser = require('@rdfjs/parser-jsonld') -const N3Parser = require('@rdfjs/parser-n3') - -const blackList = [ +const blackList = new Set([ '006', '007', '009', @@ -19,58 +14,54 @@ const blackList = [ '012', '016', '017' -] +]) -function datasetFromN3Fs (filename) { - filename = path.resolve(filename) +async function loadTest (csvFile) { + const basePath = dirname(csvFile) + const baseName = basename(csvFile, '.csv') + const metadataFile = join(basePath, baseName + '.csv-metadata.json') + const outputFile = join(basePath, baseName + '.nt') + const id = baseName.slice(4, 7) - try { - fs.readFileSync(filename) - } catch (err) { - return Promise.resolve(rdf.dataset()) + if (blackList.has(id)) { + return } - const parser = new N3Parser({ baseIRI: new String(''), factory: rdf }) // eslint-disable-line no-new-wrappers - - return fromStream(rdf.dataset(), parser.import(fs.createReadStream(filename))) -} + return () => { + it(baseName, async () => { + const metadata = await rdf.io.dataset.fromURL(metadataFile) + const output = await rdf.io.dataset.fromURL(outputFile) -function datasetFromJsonLdFs (filename) { - const parser = new JsonLdParser({ factory: rdf }) + const parser = new CsvwParser({ + factory: rdf, + baseIRI: basename(csvFile), + metadata, + timezone: 'UTC' + }) + const input = createReadStream(csvFile) + const stream = parser.import(input) + const actual = await rdf.dataset().import(stream) - return fromStream(rdf.dataset(), parser.import(fs.createReadStream(path.resolve(filename)))) + strictEqual(actual.toCanonical(), output.toCanonical()) + }) + } } -describe('test-cases', () => { - glob.sync('test/support/test*.csv').forEach((csvFile) => { - const basePath = path.dirname(csvFile) - const baseName = path.basename(csvFile, '.csv') - const metadataFile = path.join(basePath, baseName + '.csv-metadata.json') - const outputFile = path.join(basePath, baseName + '.nt') - const id = baseName.slice(4, 7) +async function loadTests () { + const csvFiles = await glob('test/support/test*.csv') + const tests = [] - if (blackList.indexOf(id) !== -1) { - return - } + for (const csvFile of csvFiles.sort()) { + tests.push(await loadTest(csvFile)) + } - it(baseName, () => { - return Promise.all([ - datasetFromJsonLdFs(metadataFile), - datasetFromN3Fs(outputFile) - ]).then(([metadata, output]) => { - const parser = new CsvwParser({ - factory: rdf, - baseIRI: path.basename(csvFile), - metadata: metadata, - timezone: 'UTC' - }) - const input = fs.createReadStream(csvFile) - const stream = parser.import(input) + return tests.filter(Boolean) +} - return fromStream(rdf.dataset(), stream).then((actual) => { - assert.strictEqual(toCanonical(actual), toCanonical(output)) - }) - }) - }) +loadTests().then(tests => { + describe('test-cases', () => { + for (const test of tests) { + test() + } }) }) diff --git a/test/test.js b/test/test.js index ee88665..1ae3b35 100644 --- a/test/test.js +++ b/test/test.js @@ -1,8 +1,8 @@ -const { rejects } = require('assert') -const getStream = require('get-stream') -const { describe, it } = require('mocha') -const { PassThrough } = require('readable-stream') -const CsvwParser = require('../index.js') +import { rejects } from 'node:assert' +import { describe, it } from 'mocha' +import { PassThrough } from 'readable-stream' +import chunks from 'stream-chunks/chunks.js' +import CsvwParser from '../index.js' describe('rdf-parser-csv', () => { it('should handle errors', async () => { @@ -17,7 +17,7 @@ describe('rdf-parser-csv', () => { const result = parser.import(input) await rejects(async () => { - await getStream.array(result) + await chunks(result) }) }) })