diff --git a/bids-validator-web/package.json b/bids-validator-web/package.json index 3f77e0643..75dd8310d 100644 --- a/bids-validator-web/package.json +++ b/bids-validator-web/package.json @@ -5,6 +5,7 @@ "main": "index.js", "license": "MIT", "dependencies": { + "@babel/runtime": "^7.16.7", "bootstrap": "^4.3.0", "bowser": "^1.0.0", "next": "^11.1.2", diff --git a/bids-validator/bids_validator/rules/file_level_rules.json b/bids-validator/bids_validator/rules/file_level_rules.json index 54fb504f7..0c36e5081 100644 --- a/bids-validator/bids_validator/rules/file_level_rules.json +++ b/bids-validator/bids_validator/rules/file_level_rules.json @@ -523,5 +523,70 @@ "tokens": { "@@@_pet_ext_@@@": ["blood\\.tsv\\.gz", "blood\\.tsv", "blood\\.json"] } + }, + + "microscopy": { + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[0-9]+)?((@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@))$", + "tokens": { + "@@@_microscopy_type_@@@": [ + "_TEM", + "_SEM", + "_uCT", + "_BF", + "_DF", + "_PC", + "_DIC", + "_FLUO", + "_CONF", + "_PLI", + "_CARS", + "_2PE", + "_MPE", + "_SR", + "_NLO", + "_OCT", + "_SPIM" + ], + "@@@_microscopy_ext_@@@": [ + ".ome\\.tif", + ".ome\\.btf", + ".tif", + ".png" + ] + } + }, + "microscopy_photo": { + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_acq-[a-zA-Z0-9]+)?(@@@_photo_ext_@@@)$", + "tokens":{ + "@@@_photo_ext_@@@": [ + "_photo\\.jpg", + "_photo\\.png", + "_photo\\.tif" + ] + } + }, + "microscopy_json": { + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[0-9]+)?(@@@_microscopy_type_@@@)\\.json$", + "tokens": { + "@@@_microscopy_type_@@@": [ + "_TEM", + "_SEM", + "_uCT", + "_BF", + "_DF", + "_PC", + "_DIC", + "_FLUO", + "_CONF", + "_PLI", + "_CARS", + "_2PE", + "_MPE", + "_SR", + "_NLO", + "_OCT", + "_SPIM" + ] + } } } diff --git a/bids-validator/bids_validator/rules/session_level_rules.json b/bids-validator/bids_validator/rules/session_level_rules.json index 48fd6c724..241ef3c10 100644 --- a/bids-validator/bids_validator/rules/session_level_rules.json +++ b/bids-validator/bids_validator/rules/session_level_rules.json @@ -201,5 +201,30 @@ "UNCInfant2V23" ] } + }, + + "microscopy_ses": { + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+_)?(?:_chunk-[0-9]+)?(@@@_microscopy_ses_type_@@@)$", + "tokens": { + "@@@_microscopy_ses_type_@@@": [ + "_TEM.json", + "_SEM.json", + "_uCT.json", + "_BF.json", + "_DF.json", + "_PC.json", + "_DIC.json", + "_FLUO.json", + "_CONF.json", + "_PLI.json", + "_CARS.json", + "_2PE.json", + "_MPE.json", + "_SR.json", + "_NLO.json", + "_OCT.json", + "_SPIM.json" + ] + } } } diff --git a/bids-validator/bids_validator/rules/top_level_rules.json b/bids-validator/bids_validator/rules/top_level_rules.json index 9757f6e53..8849157b6 100644 --- a/bids-validator/bids_validator/rules/top_level_rules.json +++ b/bids-validator/bids_validator/rules/top_level_rules.json @@ -15,7 +15,9 @@ "phase2.json", "fieldmap.json", "events.json", - "scans.json" + "scans.json", + "samples.json", + "samples.tsv" ] } }, @@ -133,5 +135,30 @@ "tokens": { "@@@_other_top_files_ext_@@@": ["physio\\.json", "stim\\.json"] } + }, + + "microscopy_top": { + "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[0-9]+)?(?:@@@_microscopy_top_ext_@@@)$", + "tokens": { + "@@@_microscopy_top_ext_@@@": [ + "_TEM\\.json", + "_SEM\\.json", + "_uCT\\.json", + "_BF\\.json", + "_DF\\.json", + "_PC\\.json", + "_DIC\\.json", + "_FLUO\\.json", + "_CONF\\.json", + "_PLI\\.json", + "_CARS\\.json", + "_2PE\\.json", + "_MPE\\.json", + "_SR\\.json", + "_NLO\\.json", + "_OCT\\.json", + "_SPIM\\.json" + ] + } } } diff --git a/bids-validator/bin/bids-validator b/bids-validator/bin/bids-validator index 65840cd95..e5221accf 100755 --- a/bids-validator/bin/bids-validator +++ b/bids-validator/bin/bids-validator @@ -8,6 +8,7 @@ function entry(cli) { try { // Test if there's a development tree to run require.resolve('../cli.js') + process.env.ESBUILD_MAX_BUFFER = 64 * 1024 * 1024 // For dev, use esbuild-runner require('esbuild-runner/register') const { default: cli } = require('../cli.js') diff --git a/bids-validator/package.json b/bids-validator/package.json index a01350336..054da6d47 100644 --- a/bids-validator/package.json +++ b/bids-validator/package.json @@ -58,7 +58,9 @@ "stream-browserify": "^3.0.0", "table": "^5.2.3", "yaml": "^1.10.2", - "yargs": "^16.2.0" + "yargs": "^16.2.0", + "exifreader": "^4.1.0", + "xml2js": "^0.4.23" }, "devDependencies": { "adm-zip": "", diff --git a/bids-validator/tests/tsv.spec.js b/bids-validator/tests/tsv.spec.js index ef94134d2..9a477f75f 100644 --- a/bids-validator/tests/tsv.spec.js +++ b/bids-validator/tests/tsv.spec.js @@ -685,4 +685,30 @@ describe('TSV', function() { let issues = validate.TSV.validateContRec([physio_file], {}) assert(issues.length === 1 && issues[0].code === 133) }) + + // samples checks ----------------------------------------------------------- + + const samplesFile = { + name: 'samples.tsv', + relativePath: '/samples.tsv', + } + + it('should return errors for each missing mandatory header in samples.tsv', () => { + const tsv = 'wrong_col\nsome_data\n' + validate.TSV.TSV(samplesFile, tsv, [], function(issues) { + expect(issues.length).toBe(3) + const codes = issues.map(x => x.code) + expect(codes.includes(216)).toBe(true) + expect(codes.includes(217)).toBe(true) + expect(codes.includes(218)).toBe(true) + }) + }) + + it('should return an error for invalid sample_type samples.tsv', () => { + const tsv = 'sample_type\nbad\n' + validate.TSV.TSV(samplesFile, tsv, [], function(issues) { + const codes = issues.map(x => x.code) + expect(codes.includes(219)).toBe(true) + }) + }) }) diff --git a/bids-validator/utils/files/index.js b/bids-validator/utils/files/index.js index 02e7f48b1..237b940aa 100644 --- a/bids-validator/utils/files/index.js +++ b/bids-validator/utils/files/index.js @@ -3,6 +3,8 @@ import FileAPI from './FileAPI' import newFile from './newFile' import readFile from './readFile' +import readOMEFile from './readOMEFile' +import readBuffer from './readBuffer' import readNiftiHeader from './readNiftiHeader' import readDir from './readDir' import potentialLocations from './potentialLocations' @@ -21,6 +23,8 @@ export default { newFile, readFile, readDir, + readBuffer, + readOMEFile, readNiftiHeader, generateMergedSidecarDict, potentialLocations, diff --git a/bids-validator/utils/files/readBuffer.js b/bids-validator/utils/files/readBuffer.js new file mode 100644 index 000000000..732a50ae2 --- /dev/null +++ b/bids-validator/utils/files/readBuffer.js @@ -0,0 +1,23 @@ +import isNode from '../isNode' +import fs from 'fs' + +const readBuffer = file => { + return new Promise((resolve, reject) => { + if (isNode) { + resolve(fs.readFileSync(file.path)) + } else { + try { + const reader = new FileReader() + reader.onload = event => { + resolve(event.target.result) + } + + reader.readAsArrayBuffer(file) + } catch (e) { + reject(e) + } + } + }) +} + +export default readBuffer diff --git a/bids-validator/utils/files/readFile.js b/bids-validator/utils/files/readFile.js index b0efb9bff..4f420d6c7 100644 --- a/bids-validator/utils/files/readFile.js +++ b/bids-validator/utils/files/readFile.js @@ -25,7 +25,11 @@ const checkEncoding = (file, data, cb) => { } /** - * Read + * readFile + * @param {object | File} file - nodeJS fs file or browser File + * @param {boolean} annexed - is the file currently annexed? + * @param {string} dir - path to directory containing dataset. Only used if + * annexed is true. * * A helper method for reading file contents. * Takes a file object and a callback and calls @@ -39,12 +43,12 @@ const checkEncoding = (file, data, cb) => { function readFile(file, annexed, dir) { return new Promise((resolve, reject) => { if (isNode) { - testFile(file, annexed, dir, function (issue, stats, remoteBuffer) { + testFile(file, annexed, dir, function(issue, stats, remoteBuffer) { if (issue) { return reject(issue) } if (!remoteBuffer) { - fs.readFile(file.path, function (err, data) { + fs.readFile(file.path, function(err, data) { if (err) { return reject(err) } diff --git a/bids-validator/utils/files/readOMEFile.js b/bids-validator/utils/files/readOMEFile.js new file mode 100644 index 000000000..bdbc2ac56 --- /dev/null +++ b/bids-validator/utils/files/readOMEFile.js @@ -0,0 +1,17 @@ +import ExifReader from 'exifreader' +const xml2js = require('xml2js') + +const readOMEFile = buffer => { + let tags = ExifReader.load(buffer) + let xml = tags['ImageDescription']['description'] + return new Promise((resolve, reject) => { + xml2js + .parseStringPromise(xml) + .then(result => { + resolve(result) + }) + .catch(error => reject(error)) + }) +} + +export default readOMEFile diff --git a/bids-validator/utils/issues/list.js b/bids-validator/utils/issues/list.js index 8b395b12c..1c17b2dc3 100644 --- a/bids-validator/utils/issues/list.js +++ b/bids-validator/utils/issues/list.js @@ -1042,4 +1042,80 @@ export default { reason: 'The recommended file /README is very small. Please consider expanding it with additional information about the dataset.', }, + 214: { + key: 'SAMPLES_TSV_MISSING', + severity: 'error', + reason: + 'The compulsory file /samples.tsv is missing. See Section 03 (Modality agnostic files) of the BIDS specification.', + }, + 215: { + key: 'SAMPLE_ID_PATTERN', + severity: 'error', + reason: + 'sample_id column labels must consist of the pattern "sample-".', + }, + 216: { + key: 'SAMPLE_ID_COLUMN', + severity: 'error', + reason: "Samples .tsv files must have a 'sample_id' column.", + }, + 217: { + key: 'PARTICIPANT_ID_COLUMN', + severity: 'error', + reason: "Samples .tsv files must have a 'participant_id' column.", + }, + 218: { + key: 'SAMPLE_TYPE_COLUMN', + severity: 'error', + reason: "Samples .tsv files must have a 'sample_type' column.", + }, + 219: { + key: 'SAMPLE_TYPE_VALUE', + severity: 'error', + reason: + 'sample_type MUST consist of one of the following values: cell line, in vitro differentiated cells, primary cell, cell-free sample, cloning host, tissue, whole organisms, organoid or technical sample.', + }, + 220: { + key: 'SAMPLE_ID_DUPLICATE', + severity: 'error', + reason: + 'Each sample from a same subject MUST be described by one and only one row.', + }, + 221: { + key: 'PIXEL_SIZE_INCONSISTENT', + severity: 'error', + reason: + 'PixelSize need to be consistent with PhysicalSizeX, PhysicalSizeY and PhysicalSizeZ OME metadata fields', + }, + 222: { + key: 'INVALID_PIXEL_SIZE_UNIT', + severity: 'warning', + reason: 'PixelSize consistency is only validated for "mm", "µm" and "nm".', + }, + 223: { + key: 'CHUNK_TRANSFORMATION_MATRIX_MISSING', + severity: 'warning', + reason: + "It is recommended to define 'ChunkTransformationMatrix' for this file.", + }, + 224: { + key: 'OPTIONAL_FIELD_INCONSISTENT', + severity: 'error', + reason: 'Optional JSON field is not consistent with the OME-TIFF metadata', + }, + 225: { + key: 'NO_VALID_JSON', + severity: 'error', + reason: 'No valid JSON file found for this file', + }, + 226: { + key: 'UNSUPPORTED_BIG_TIFF', + severity: 'warning', + reason: 'Metadata consistency check skipped for BigTiff OME-TIFF file', + }, + 227: { + key: 'INCONSISTENT_TIFF_EXTENSION', + severity: 'error', + reason: 'Inconsistent TIFF file type and extension', + }, } diff --git a/bids-validator/utils/summary/collectModalities.js b/bids-validator/utils/summary/collectModalities.js index 4d0b3a903..7d95cee38 100644 --- a/bids-validator/utils/summary/collectModalities.js +++ b/bids-validator/utils/summary/collectModalities.js @@ -7,6 +7,7 @@ export const collectModalities = filenames => { MEG: 0, EEG: 0, iEEG: 0, + Microscopy: 0, } const secondary = { MRI_Diffusion: 0, @@ -56,6 +57,9 @@ export const collectModalities = filenames => { if (type.file.isIEEG(path)) { modalities.iEEG++ } + if (type.file.isMicroscopy(path)) { + modalities.Microscopy++ + } } // Order by matching file count const nonZero = Object.keys(modalities).filter(a => modalities[a] !== 0) diff --git a/bids-validator/utils/type.js b/bids-validator/utils/type.js index d6199c999..0c15168e5 100644 --- a/bids-validator/utils/type.js +++ b/bids-validator/utils/type.js @@ -59,6 +59,9 @@ const megCrosstalkData = buildRegExp(file_level_rules.meg_crosstalk) const stimuliData = buildRegExp(file_level_rules.stimuli) const petData = buildRegExp(file_level_rules.pet) const petBlood = buildRegExp(file_level_rules.pet_blood) +const microscopyData = buildRegExp(file_level_rules.microscopy) +const microscopyPhotoData = buildRegExp(file_level_rules.microscopy_photo) +const microscopyJSON = buildRegExp(file_level_rules.microscopy_json) // Phenotypic data const phenotypicData = buildRegExp(phenotypic_rules.phenotypic_data) // Session level @@ -71,6 +74,7 @@ const ieegSes = buildRegExp(session_level_rules.ieeg_ses) const megSes = buildRegExp(session_level_rules.meg_ses) const scansSes = buildRegExp(session_level_rules.scans) const petSes = buildRegExp(session_level_rules.pet_ses) +const microscopySes = buildRegExp(session_level_rules.microscopy_ses) // Subject level const subjectLevel = buildRegExp(subject_level_rules.subject_level) // Top level @@ -85,6 +89,7 @@ const multiDirFieldmap = buildRegExp(top_level_rules.multi_dir_fieldmap) const otherTopFiles = buildRegExp(top_level_rules.other_top_files) const megTop = buildRegExp(top_level_rules.meg_top) const petTop = buildRegExp(top_level_rules.pet_top) +const microscopyTop = buildRegExp(top_level_rules.microscopy_top) export default { /** @@ -110,7 +115,9 @@ export default { this.file.isFieldMap(path) || this.file.isPhenotypic(path) || this.file.isPET(path) || - this.file.isPETBlood(path) + this.file.isPETBlood(path) || + this.file.isMicroscopy(path) || + this.file.isMicroscopyJSON(path) ) }, @@ -134,7 +141,8 @@ export default { megTop.test(path) || eegTop.test(path) || ieegTop.test(path) || - petTop.test(path) + petTop.test(path) || + microscopyTop.test(path) ) } else { return ( @@ -148,7 +156,8 @@ export default { megTop.test(path) || eegTop.test(path) || ieegTop.test(path) || - petTop.test(path) + petTop.test(path) || + microscopyTop.test(path) ) } }, @@ -203,7 +212,8 @@ export default { conditionalMatch(megSes, path) || conditionalMatch(eegSes, path) || conditionalMatch(ieegSes, path) || - conditionalMatch(petSes, path) + conditionalMatch(petSes, path) || + conditionalMatch(microscopySes, path) ) }, @@ -339,6 +349,17 @@ export default { } }, + isMicroscopy: function(path) { + return ( + conditionalMatch(microscopyData, path) || + conditionalMatch(microscopyPhotoData, path) + ) + }, + + isMicroscopyJSON: function(path) { + return conditionalMatch(microscopyJSON, path) + }, + isBehavioral: function(path) { if (bids_schema) { return bids_schema.datatypes['beh'].some(regex => regex.exec(path)) @@ -365,7 +386,9 @@ export default { this.isBehavioral(path) || this.isFuncBold(path) || this.isPET(path) || - this.isPETBlood(path) + this.isPETBlood(path) || + this.isMicroscopy(path) || + this.isMicroscopyJSON(path) ) }, }, diff --git a/bids-validator/validators/bids/fullTest.js b/bids-validator/validators/bids/fullTest.js index a9c4d240f..41c4cff88 100644 --- a/bids-validator/validators/bids/fullTest.js +++ b/bids-validator/validators/bids/fullTest.js @@ -6,6 +6,7 @@ import json from '../json' import NIFTI from '../nifti' import bval from '../bval' import bvec from '../bvec' +import ometiff from '../microscopy' import Events from '../events' import { session } from '../session' import checkAnyDataPresent from '../checkAnyDataPresent' @@ -22,7 +23,7 @@ import collectPetFields from '../../utils/summary/collectPetFields' /** * Full Test * - * Takes on an array of files, callback, and boolean inidicating if git-annex is used. + * Takes on an array of files, callback, and boolean indicating if git-annex is used. * Starts the validation process for a BIDS package. */ const fullTest = (fileList, options, annexed, dir, schema, callback) => { @@ -140,11 +141,29 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { const readmeIssues = checkReadme(fileList) self.issues = self.issues.concat(readmeIssues) + // Check for samples file in the proper place (only for the microscopy modality) + if (summary.modalities.includes('Microscopy')) { + const samplesIssues = ometiff.checkSamples(fileList) + const jsonAndFieldIssues = ometiff.checkJSONAndField( + files, + jsonContentsDict, + ) + + self.issues = self.issues + .concat(samplesIssues) + .concat(jsonAndFieldIssues) + } // Validate json files and contents return json.validate(jsonFiles, fileList, jsonContentsDict, summary) }) .then(jsonIssues => { self.issues = self.issues.concat(jsonIssues) + + // ome-tiff consistency check + return ometiff.validate(files.ome, jsonContentsDict) + }) + .then(omeIssues => { + self.issues = self.issues.concat(omeIssues) // Nifti validation return NIFTI.validate( files.nifti, diff --git a/bids-validator/validators/bids/groupFileTypes.js b/bids-validator/validators/bids/groupFileTypes.js index 42c4a1bb3..b76eca201 100644 --- a/bids-validator/validators/bids/groupFileTypes.js +++ b/bids-validator/validators/bids/groupFileTypes.js @@ -11,6 +11,9 @@ const groupFileTypes = (fileList, options) => { bvec: [], contRecord: [], invalid: [], + ome: [], + png: [], + tif: [], // used to check all files not already passed through testFile() misc: [], } @@ -52,6 +55,17 @@ const sortFiles = (fileList, options, files) => { files.misc.push(file) } else if (ofType(filename, 'tsv.gz')) { files.contRecord.push(file) + } else if (ofType(filename, 'ome.tif') || ofType(filename, 'ome.btf')) { + // collect ome-tiff + files.ome.push(file) + } else if (ofType(filename, 'png')) { + files.png.push(file) + } else if ( + ofType(filename, 'tif') && + !ofType(filename, 'ome.tif') && + !ofType(filename, 'ome.btf') + ) { + files.tif.push(file) } else { files.misc.push(file) } diff --git a/bids-validator/validators/json/json.js b/bids-validator/validators/json/json.js index d38493ecb..0401435c9 100644 --- a/bids-validator/validators/json/json.js +++ b/bids-validator/validators/json/json.js @@ -96,6 +96,26 @@ const selectSchema = file => { schema = require('./schemas/ieeg.json') } else if (file.name.endsWith('eeg.json')) { schema = require('./schemas/eeg.json') + } else if ( + file.name.endsWith('TEM.json') || + file.name.endsWith('SEM.json') || + file.name.endsWith('uCT.json') || + file.name.endsWith('BF.json') || + file.name.endsWith('DF.json') || + file.name.endsWith('PC.json') || + file.name.endsWith('DIC.json') || + file.name.endsWith('FLUO.json') || + file.name.endsWith('CONF.json') || + file.name.endsWith('PLI.json') || + file.name.endsWith('CARS.json') || + file.name.endsWith('2PE.json') || + file.name.endsWith('MPE.json') || + file.name.endsWith('SR.json') || + file.name.endsWith('NLO.json') || + file.name.endsWith('OCT.json') || + file.name.endsWith('SPIM.json') + ) { + schema = require('./schemas/microscopy.json') } else if ( file.relativePath.includes('/meg/') && file.name.endsWith('coordsystem.json') diff --git a/bids-validator/validators/json/schemas/microscopy.json b/bids-validator/validators/json/schemas/microscopy.json new file mode 100644 index 000000000..4231f0ba9 --- /dev/null +++ b/bids-validator/validators/json/schemas/microscopy.json @@ -0,0 +1,83 @@ +{ + "type": "object", + "properties": { + "Manufacturer": { "type": "string", "minLength": 1 }, + "ManufacturersModelName": { "type": "string", "minLength": 1 }, + "DeviceSerialNumber": {"type": "string", "minLength": 1}, + "StationName": { "type": "string", "minLength": 1 }, + "SoftwareVersions": { "type": "string", "minLength": 1 }, + "InstitutionName": { "type": "string", "minLength": 1 }, + "InstitutionAddress": { "type": "string", "minLength": 1 }, + "InstitutionalDepartmentName": { "type": "string", "minLength": 1 }, + "BodyPart": { "type": "string", "minLength": 1 }, + "BodyPartDetails": { "type": "string", "minLength": 1 }, + "BodyPartDetailsOntology": { "type": "string", "minLength": 1, "format": "uri" }, + "SampleEnvironment": { "type": "string", "minLength": 1, "enum": ["in vivo", "ex vivo", "in vitro"] }, + "SampleEmbedding": { "type": "string", "minLength": 1 }, + "SampleFixation": { "type": "string", "minLength": 1 }, + "SampleStaining": { + "anyOf": [ + { "type": "string", "minLength": 1 }, + { "type": "array", "items": { "type": "string" } } + ] + }, + "SamplePrimaryAntibody": { + "anyOf": [ + { "type": "string", "minLength": 1 }, + { "type": "array", "items": { "type": "string" } } + ] + }, + "SampleSecondaryAntibody": { + "anyOf": [ + { "type": "string", "minLength": 1 }, + { "type": "array", "items": { "type": "string" } } + ] + }, + "SliceThickness": { "type": "number", "exclusiveMinimum": 0 }, + "SampleExtractionProtocol": { "type": "string", "minLength": 1 }, + "SampleExtractionInstitution": { "type": "string", "minLength": 1 }, + "TissueDeformationScaling": { "type": "number" }, + "PixelSize": {"type": "array", "minItems": 2, "maxItems": 3, "items": { "type": "number", "minimum": 0 } }, + "PixelSizeUnits": { "type": "string", "enum": ["mm", "um", "nm"] }, + "Immersion": { "type": "string", "minLength": 1 }, + "NumericalAperture": { "type": "number", "exclusiveMinimum": 0 }, + "Magnification": { "type": "number", "exclusiveMinimum": 0 }, + "ImageAcquisitionProtocol": { "type": "string", "minLength": 1 }, + "OtherAcquisitionParameters": { "type": "string", "minLength": 1 }, + "ChunkTransformationMatrix": { + "anyOf": [ + {"type": "array", "minItems": 3, "maxItems": 3, "items": { "type": "array", "minItems": 3, "maxItems": 3, "items":{ "type": "number"} } }, + {"type": "array", "minItems": 4, "maxItems": 4, "items": { "type": "array", "minItems": 4, "maxItems": 4, "items":{ "type": "number"} } } + ] + }, + "ChunkTransformationMatrixAxis": { "type": "array", "minItems": 2, "maxItems": 3, "items": { "type": "string" }} + }, + + "required": [ + "PixelSize", + "PixelSizeUnits" + ], + + "recommended": [ + "Manufacturer", + "ManufacturersModelName", + "DeviceSerialNumber", + "StationName", + "SoftwareVersions", + "InstitutionName", + "InstitutionAddress", + "InstitutionalDepartmentName", + "BodyPart", + "BodyPartDetails", + "SampleEnvironment", + "SampleStaining", + "SamplePrimaryAntibody", + "SampleSecondaryAntibody" + ], + + "dependencies": { + "PixelSize": ["PixelSizeUnits"], + "ChunkTransformationMatrix": ["ChunkTransformationMatrixAxis"] + } + +} diff --git a/bids-validator/validators/microscopy/__tests__/checkJSONAndField.spec.js b/bids-validator/validators/microscopy/__tests__/checkJSONAndField.spec.js new file mode 100644 index 000000000..62fa586b5 --- /dev/null +++ b/bids-validator/validators/microscopy/__tests__/checkJSONAndField.spec.js @@ -0,0 +1,51 @@ +import { assert } from 'chai' +import checkJSONAndField from '../checkJSONAndField' + +describe('checkJSONAndField()', () => { + const emptyJsonContentsDict = { + 'test.json': {}, + } + it('returns no issues with empty arguments', () => { + const issues = checkJSONAndField({}, {}) + expect(issues.length).toBe(0) + }) + + it('returns issue 225 with no json for ome files', () => { + const files = { + ome: [{ relativePath: 'test.ome.tif' }], + } + const issues = checkJSONAndField(files, emptyJsonContentsDict) + expect(issues.length).toBe(1) + expect(issues[0].code).toBe(225) + }) + + it('returns issue 225 with no json for tif files', () => { + const files = { + tif: [{ relativePath: 'test.tif' }], + } + const issues = checkJSONAndField(files, emptyJsonContentsDict) + expect(issues.length).toBe(1) + expect(issues[0].code).toBe(225) + }) + + it('returns issue 225 with no json for png files', () => { + const files = { + png: [{ relativePath: 'test.png' }], + } + const issues = checkJSONAndField(files, emptyJsonContentsDict) + expect(issues.length).toBe(1) + expect(issues[0].code).toBe(225) + }) + + it('returns warning 223 if chunk entity present but missing metadata', () => { + const files = { + ome: [{ relativePath: '/test_chunk-01.ome.tif' }], + } + const jsonContentsDict = { + '/test_chunk-01.json': { testKey: 'testValue' }, + } + const issues = checkJSONAndField(files, jsonContentsDict) + expect(issues.length).toBe(1) + expect(issues[0].code).toBe(223) + }) +}) diff --git a/bids-validator/validators/microscopy/__tests__/checkSample.spec.js b/bids-validator/validators/microscopy/__tests__/checkSample.spec.js new file mode 100644 index 000000000..5dd541c27 --- /dev/null +++ b/bids-validator/validators/microscopy/__tests__/checkSample.spec.js @@ -0,0 +1,19 @@ +import checkSamples from '../checkSamples' +describe('checkSamples()', () => { + it('returns issue 214 when no samples.tsv is present', () => { + const fileList = { + '0': { relativePath: '/test.tsv' }, + } + const issues = checkSamples(fileList) + expect(issues.length).toBe(1) + expect(issues[0].code).toBe(214) + }) + + it('doesnt return issue 214 when samples.tsv is present', () => { + const fileList = { + '0': { relativePath: '/samples.tsv' }, + } + const issues = checkSamples(fileList) + expect(issues.length).toBe(0) + }) +}) diff --git a/bids-validator/validators/microscopy/__tests__/data/btif_id.ome.tif b/bids-validator/validators/microscopy/__tests__/data/btif_id.ome.tif new file mode 100644 index 000000000..f62946161 Binary files /dev/null and b/bids-validator/validators/microscopy/__tests__/data/btif_id.ome.tif differ diff --git a/bids-validator/validators/microscopy/__tests__/data/invalid_id.ome.tif b/bids-validator/validators/microscopy/__tests__/data/invalid_id.ome.tif new file mode 100644 index 000000000..96973db51 Binary files /dev/null and b/bids-validator/validators/microscopy/__tests__/data/invalid_id.ome.tif differ diff --git a/bids-validator/validators/microscopy/__tests__/data/tif_id.ome.btf b/bids-validator/validators/microscopy/__tests__/data/tif_id.ome.btf new file mode 100644 index 000000000..416a1d8ec Binary files /dev/null and b/bids-validator/validators/microscopy/__tests__/data/tif_id.ome.btf differ diff --git a/bids-validator/validators/microscopy/__tests__/data/tif_id.ome.tif b/bids-validator/validators/microscopy/__tests__/data/tif_id.ome.tif new file mode 100644 index 000000000..416a1d8ec Binary files /dev/null and b/bids-validator/validators/microscopy/__tests__/data/tif_id.ome.tif differ diff --git a/bids-validator/validators/microscopy/__tests__/data/valid.ome.tif b/bids-validator/validators/microscopy/__tests__/data/valid.ome.tif new file mode 100644 index 000000000..416a1d8ec Binary files /dev/null and b/bids-validator/validators/microscopy/__tests__/data/valid.ome.tif differ diff --git a/bids-validator/validators/microscopy/__tests__/validate.spec.js b/bids-validator/validators/microscopy/__tests__/validate.spec.js new file mode 100644 index 000000000..2bf7a59f2 --- /dev/null +++ b/bids-validator/validators/microscopy/__tests__/validate.spec.js @@ -0,0 +1,99 @@ +import path from 'path' + +import readDir from '../../../utils/files/readDir' +import validate from '../validate' + +const dataDir = path.join(__dirname, '/data') + +const jsonContent = { + Manufacturer: 'Miltenyi Biotec', + ManufacturersModelName: 'UltraMicroscope II', + BodyPart: 'CSPINE', + SampleEnvironment: 'ex vivo', + SampleFixation: '4% paraformaldehyde, 2% glutaraldehyde', + SampleStaining: 'Luxol fast blue', + PixelSize: [1, 1, 1], + PixelSizeUnits: 'um', + Immersion: 'Oil', + NumericalAperture: 1.4, + Magnification: 40, + ChunkTransformationMatrix: [ + [1, 0, 0, 0], + [0, 2, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1], + ], + ChunkTransformationMatrixAxis: ['X', 'Y', 'Z'], +} + +describe('validate', () => { + it('returns error 227 with extension/id mismatch', () => { + const fileName = 'btif_id.ome.tif' + const files = [ + { + name: fileName, + relativePath: `/bids-validator/validators/microscopy/__tests__/data/${fileName}`, + path: path.join(dataDir, fileName), + }, + ] + + expect.assertions(3) + return validate(files, {}).then(issues => { + expect(issues.length).toBe(2) + expect(issues[0].code).toBe(227) + expect(issues[1].code).toBe(226) + }) + }) + + it('returns error 227 with incorrect id in magic number', () => { + const fileName = 'invalid_id.ome.tif' + const files = [ + { + name: fileName, + relativePath: `/bids-validator/validators/microscopy/__tests__/data/${fileName}`, + path: path.join(dataDir, fileName), + }, + ] + expect.assertions(2) + return validate(files, {}).then(issues => { + expect(issues.length).toBe(1) + expect(issues[0].code).toBe(227) + }) + }) + + it('returns error 227 with tif id and btf extension', () => { + const fileName = 'tif_id.ome.btf' + const files = [ + { + name: fileName, + relativePath: `/bids-validator/validators/microscopy/__tests__/data/${fileName}`, + path: path.join(dataDir, fileName), + }, + ] + + expect.assertions(2) + return validate(files, {}).then(issues => { + expect(issues.length).toBe(1) + expect(issues[0].code).toBe(227) + }) + }) + + it('validates with valid data', () => { + const fileName = 'valid.ome.tif' + const relativePath = `/bids-validator/validators/microscopy/__tests__/data/${fileName}` + const files = [ + { + name: fileName, + relativePath: relativePath, + path: path.join(dataDir, fileName), + }, + ] + const jsonContentDict = {} + jsonContentDict[relativePath.replace('.ome.tif', '.json')] = jsonContent + + expect.assertions(1) + return validate(files, jsonContentDict).then(issues => { + expect(issues.length).toBe(0) + }) + }) +}) diff --git a/bids-validator/validators/microscopy/checkJSONAndField.js b/bids-validator/validators/microscopy/checkJSONAndField.js new file mode 100644 index 000000000..266f3481e --- /dev/null +++ b/bids-validator/validators/microscopy/checkJSONAndField.js @@ -0,0 +1,85 @@ +import utils from '../../utils' +const Issue = utils.issues.Issue + +const checkJSONAndField = (files, jsonContentsDict) => { + let issues = [] + if (files.ome) { + files.ome.forEach(file => { + let possibleJsonPath = file.relativePath + .replace('.tif', '') + .replace('.btf', '') + .replace('.ome', '.json') + issues = issues.concat( + ifJsonExist(file, possibleJsonPath, jsonContentsDict), + ) + }) + } + if (files.png) { + files.png.forEach(file => { + if (!file.relativePath.includes('_photo')) { + let possibleJsonPath = file.relativePath.replace('.png', '.json') + issues = issues.concat( + ifJsonExist(file, possibleJsonPath, jsonContentsDict), + ) + } + }) + } + if (files.tif) { + files.tif.forEach(file => { + if (!file.relativePath.includes('_photo')) { + let possibleJsonPath = file.relativePath.replace('.tif', '.json') + issues = issues.concat( + ifJsonExist(file, possibleJsonPath, jsonContentsDict), + ) + } + }) + } + return issues +} + +const ifJsonExist = (file, possibleJsonPath, jsonContentsDict) => { + let potentialSidecars = utils.files.potentialLocations(possibleJsonPath) + const chunkRegex = new RegExp('_chunk-[0-9]+') + + const jsonChunkFiles = potentialSidecars.filter( + name => jsonContentsDict.hasOwnProperty(name) && chunkRegex.exec(name), + ) + const chunkPresent = + jsonChunkFiles.length || chunkRegex.exec(file.relativePath) + + const mergedDictionary = utils.files.generateMergedSidecarDict( + potentialSidecars, + jsonContentsDict, + ) + + // check if the given file has a corresponding JSON file + if (Object.keys(mergedDictionary).length === 0) { + return [ + new Issue({ + file: file, + code: 225, + }), + ] + } + + if (chunkPresent) { + return checkMatrixField(file, mergedDictionary) + } + + return [] +} + +const checkMatrixField = (file, mergedDictionary) => { + let issues = [] + if (!mergedDictionary.hasOwnProperty('ChunkTransformationMatrix')) { + issues.push( + new Issue({ + file: file, + code: 223, + }), + ) + } + return issues +} + +export default checkJSONAndField diff --git a/bids-validator/validators/microscopy/checkSamples.js b/bids-validator/validators/microscopy/checkSamples.js new file mode 100644 index 000000000..c535a8053 --- /dev/null +++ b/bids-validator/validators/microscopy/checkSamples.js @@ -0,0 +1,16 @@ +import isNode from '../../utils/isNode' + +const Issue = require('../../utils').issues.Issue + +const checkSamples = fileList => { + const issues = [] + const fileKeys = Object.keys(fileList) + const samplesFile = Array.from(Object.values(fileList)).find( + file => file.relativePath && file.relativePath == '/samples.tsv', + ) + if (!samplesFile) { + issues.push(new Issue({ code: 214 })) + } + return issues +} +export default checkSamples diff --git a/bids-validator/validators/microscopy/index.js b/bids-validator/validators/microscopy/index.js new file mode 100644 index 000000000..ab3895a9c --- /dev/null +++ b/bids-validator/validators/microscopy/index.js @@ -0,0 +1,11 @@ +import ometiff from './ometiff' +import validate from './validate' +import checkSamples from './checkSamples' +import checkJSONAndField from './checkJSONAndField' + +export default { + ometiff, + validate, + checkSamples, + checkJSONAndField, +} diff --git a/bids-validator/validators/microscopy/ometiff.js b/bids-validator/validators/microscopy/ometiff.js new file mode 100644 index 000000000..6e6030375 --- /dev/null +++ b/bids-validator/validators/microscopy/ometiff.js @@ -0,0 +1,165 @@ +import utils from '../../utils' +const Issue = utils.issues.Issue + +/** + * ometiff + * + * Takes an ometiff file, its omedata as an object + * and a callback as arguments. Callback + * with any issues it finds while validating + * against the BIDS specification. + */ +export default function ometiff(file, omeData, jsonContentsDict, callback) { + let issues = [] + + let mergedDictionary = getMergedDictionary(file, jsonContentsDict) + + let rootKey = Object.keys(omeData)[0] + let namespace = '' + if (rootKey.includes(':OME')) { + namespace = rootKey.split(':OME')[0].concat(':') + } + + // Check for consistency with optional OME-TIFF metadata if present for + // Immersion, NumericalAperture and Magnification + let optionalFieldsIssues = checkOptionalFields( + file.relativePath, + omeData, + namespace, + mergedDictionary, + ) + + // Check for consistency for PixelSize between JSON and OME-TIFF metadata + let pixelSizeIssues = checkPixelSize(omeData, namespace, mergedDictionary) + + issues = issues.concat(optionalFieldsIssues).concat(pixelSizeIssues) + + callback(issues) +} + +const convertFactor = (omeUnit, jsonUnit) => { + if (omeUnit === jsonUnit || (omeUnit === 'µm' && jsonUnit === 'um')) return 1 + + if (jsonUnit === 'um') { + if (omeUnit === 'mm') { + return 1000 + } else if (omeUnit === 'nm') { + return 0.001 + } + } else if (jsonUnit === 'mm') { + if (omeUnit === 'µm') { + return 0.001 + } else if (omeUnit === 'nm') { + return 0.000001 + } + } else if (jsonUnit === 'nm') { + if (omeUnit === 'mm') { + return 1000000 + } else if (omeUnit === 'µm') { + return 1000 + } + } +} + +const getMergedDictionary = (file, jsonContentsDict) => { + let possibleJsonPath = file.relativePath + .replace('.tif', '') + .replace('.ome', '.json') + + let potentialSidecars = utils.files.potentialLocations(possibleJsonPath) + + return utils.files.generateMergedSidecarDict( + potentialSidecars, + jsonContentsDict, + ) +} + +const checkOptionalFields = (omePath, omeData, namespace, jsonData) => { + let issues = [] + + let fields = { + Immersion: 'Immersion', + NumericalAperture: 'LensNA', + Magnification: 'NominalMagnification', + } + + if ( + omeData[`${namespace}OME`][`${namespace}Instrument`] && + omeData[`${namespace}OME`][`${namespace}Instrument`][0][`${namespace}Objective`] + ) { + let objective = + omeData[`${namespace}OME`][`${namespace}Instrument`][0][`${namespace}Objective`][0]['$'] + for (let field in fields) { + let property = fields[field] + if (jsonData.hasOwnProperty(field) && objective[property]) { + if (objective[property] != jsonData[field]) { + issues.push( + new Issue({ + file: { + relativePath: omePath, + path: omePath, + }, + evidence: `JSON field '${field}' is inconsistent`, + code: 224, + }), + ) + } + } + } + } + + return issues +} + +const checkPixelSize = (omeData, namespace, jsonData) => { + let issues = [] + let validUnits = ['um', 'µm', 'nm', 'mm'] + + const PhysicalSizeX = + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0]['$']['PhysicalSizeX'] + const physicalSizeXUnit = + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0]['$']['PhysicalSizeXUnit'] + const PhysicalSizeY = + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0]['$']['PhysicalSizeY'] + const physicalSizeYUnit = + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0]['$']['PhysicalSizeYUnit'] + const PhysicalSizeZ = + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0]['$']['PhysicalSizeZ'] + const physicalSizeZUnit = + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0]['$']['PhysicalSizeZUnit'] + + // if no corresponding json file, skip the consistency check + if (Object.keys(jsonData).length === 0) return [] + + let unitsPendToCheck = [ + physicalSizeXUnit, + physicalSizeYUnit, + physicalSizeZUnit, + ] + + unitsPendToCheck.forEach(unit => { + if (!validUnits.includes(unit)) { + issues.push(new Issue({ code: 222 })) + } + }) + + // if any physicalSizeUnit is not valid or no valid json file, skip the consistency check + if (issues.length > 0) return issues + + let pixelSize = jsonData['PixelSize'] + let physicalSizeUnit = jsonData['PixelSizeUnits'] + + let factorX = convertFactor(physicalSizeXUnit, physicalSizeUnit) + let factorY = convertFactor(physicalSizeYUnit, physicalSizeUnit) + let factorZ = convertFactor(physicalSizeZUnit, physicalSizeUnit) + + if ( + PhysicalSizeX * factorX !== pixelSize[0] || + PhysicalSizeY * factorY !== pixelSize[1] || + PhysicalSizeZ * factorZ !== pixelSize[2] + ) { + issues.push(new Issue({ code: 221 })) + } + + return issues +} diff --git a/bids-validator/validators/microscopy/validate.js b/bids-validator/validators/microscopy/validate.js new file mode 100644 index 000000000..c4d90e97e --- /dev/null +++ b/bids-validator/validators/microscopy/validate.js @@ -0,0 +1,75 @@ +import utils from '../../utils' +const Issue = utils.issues.Issue +import ometiff from './ometiff' +import validateTiffSignature from './validateTiffSignature' + +const TIFF_ID = 0x2a +const BIG_TIFF_ID = 0x2b + +const validate = (files, jsonContentsDict) => { + let issues = [] + // validate ometiff + const omePromises = files.map(function(file) { + return utils.limit( + () => + new Promise((resolve, reject) => { + utils.files.readBuffer(file).then(buffer => { + if (validateTiffSignature(buffer, TIFF_ID)) { + if (file.relativePath.endsWith('.ome.btf')) { + issues.push( + new Issue({ + code: 227, + file: file, + evidence: `Inconsistent TIFF file type and extension, given .ome.btf but should be .ome.tif`, + }), + ) + } + utils.files + .readOMEFile(buffer) + .then(omeData => { + ometiff(file, omeData, jsonContentsDict, function(omeIssues) { + issues = issues.concat(omeIssues) + resolve() + }) + }) + .catch(err => + utils.issues.redirect(err, reject, () => { + issues.push(err) + resolve() + }), + ) + } else if (validateTiffSignature(buffer, BIG_TIFF_ID)) { + if (file.relativePath.endsWith('.ome.tif')) { + issues.push( + new Issue({ + code: 227, + file: file, + evidence: `Inconsistent TIFF file type and extension, given .ome.tif but should be .ome.btf`, + }), + ) + } + issues.push( + new Issue({ + code: 226, + file: file, + }), + ) + resolve() + } else { + issues.push( + new Issue({ + code: 227, + file: file, + evidence: `3rd byte of file does not identify file as tiff.`, + }), + ) + resolve() + } + }) + }), + ) + }) + return Promise.all(omePromises).then(() => issues) +} + +export default validate diff --git a/bids-validator/validators/microscopy/validateTiffSignature.js b/bids-validator/validators/microscopy/validateTiffSignature.js new file mode 100644 index 000000000..5ac7265b3 --- /dev/null +++ b/bids-validator/validators/microscopy/validateTiffSignature.js @@ -0,0 +1,20 @@ +import isNode from '../../utils/isNode' + +const getDataView = buffer => { + if (isNode) { + const uint8arr = new Uint8Array(buffer.byteLength) + buffer.copy(uint8arr, 0, 0, buffer.byteLength) + return new DataView(uint8arr.buffer) + } else { + return new DataView(buffer) + } +} + +const validateTiffSignature = (buffer, tiffId) => { + const dataView = getDataView(buffer) + const littleEndian = dataView.getUint16(0) === 0x4949 + + return dataView.getUint16(2, littleEndian) === tiffId +} + +export default validateTiffSignature diff --git a/bids-validator/validators/tsv/tsv.js b/bids-validator/validators/tsv/tsv.js index 098d427cd..bca199b15 100644 --- a/bids-validator/validators/tsv/tsv.js +++ b/bids-validator/validators/tsv/tsv.js @@ -222,8 +222,9 @@ const TSV = (file, contents, fileList, callback) => { new Issue({ file: file, evidence: headersEvidence(headers), - reason: 'Participant_id column should be named ' + - 'as sub-.', + reason: + 'Participant_id column should be named ' + + 'as sub-.', line: l, code: 212, }), @@ -240,6 +241,168 @@ const TSV = (file, contents, fileList, callback) => { } } + // samples.tsv + let samples = null + if (file.name === 'samples.tsv') { + const sampleIssues = [] + const sampleIdColumnValues = [] + const participantIdColumnValues = [] + const sampleIdColumn = headers.indexOf('sample_id') + const participantIdColumn = headers.indexOf('participant_id') + const sampleTypeColumn = headers.indexOf('sample_type') + + // if the sample_id column is missing, an error + // will be raised + if (sampleIdColumn === -1) { + sampleIssues.push( + new Issue({ + file: file, + evidence: headersEvidence(headers), + line: 1, + code: 216, + }), + ) + } + // if the participant_id column is missing, an error + // will be raised + if (participantIdColumn === -1) { + sampleIssues.push( + new Issue({ + file: file, + evidence: headersEvidence(headers), + line: 1, + code: 217, + }), + ) + } + // if the sample_type column is missing, an error + // will be raised + if (sampleTypeColumn === -1) { + sampleIssues.push( + new Issue({ + file: file, + evidence: headersEvidence(headers), + line: 1, + code: 218, + }), + ) + } + // Fold sampleIssues into main issue array, only needed it for this + // conditional. + issues.push(...sampleIssues) + if (sampleIssues.length === 0) { + // otherwise, the samples should comprise of + // sample- and one sample per row + samples = [] + for (let l = 1; l < rows.length; l++) { + const row = rows[l] + // skip empty rows + if (!row || /^\s*$/.test(row)) { + continue + } + sampleIdColumnValues.push(row[sampleIdColumn]) + + // check if any incorrect patterns in sample_id column + if (!row[sampleIdColumn].startsWith('sample-')) { + issues.push( + new Issue({ + file: file, + evidence: row[sampleIdColumn], + reason: + 'sample_id column should be named ' + 'as sample-.', + line: l, + code: 215, + }), + ) + } + } + // The participants should comprise of + // sub- and one subject per row + participants = [] + for (let l = 1; l < rows.length; l++) { + const row = rows[l] + // skip empty rows + if (!row || /^\s*$/.test(row)) { + continue + } + participantIdColumnValues.push(row[participantIdColumn]) + + // check if any incorrect patterns in participant_id column + if (!row[participantIdColumn].startsWith('sub-')) { + issues.push( + new Issue({ + file: file, + evidence: row[participantIdColumn], + reason: + 'Participant_id column should be named ' + + 'as sub-.', + line: l, + code: 212, + }), + ) + } + + // obtain a list of the sample IDs in the samples.tsv file + const sample = row[sampleIdColumn].replace('sample-', '') + if (sample == 'emptyroom') { + continue + } + samples.push(sample) + } + + // check if a sample from same subject is described by one and only one row + let samplePartIdsSet = new Set() + for (let r = 0; r < rows.length - 1; r++) { + let uniqueString = sampleIdColumnValues[r].concat( + participantIdColumnValues[r], + ) + // check if SampleId Have Duplicate + if (samplePartIdsSet.has(uniqueString)) { + issues.push( + new Issue({ + file: file, + evidence: sampleIdColumnValues, + reason: + 'Each sample from a same subject MUST be described by one and only one row.', + line: 1, + code: 220, + }), + ) + break + } else samplePartIdsSet.add(uniqueString) + } + } + + if (sampleTypeColumn !== -1) { + // check if any incorrect patterns in sample_type column + const validSampleTypes = [ + 'cell line', + 'in vitro differentiated cells', + 'primary cell', + 'cell-free sample', + 'cloning host', + 'tissue', + 'whole organisms', + 'organoid', + 'technical sample', + ] + for (let c = 1; c < rows.length; c++) { + const row = rows[c] + if (!validSampleTypes.includes(row[sampleTypeColumn])) { + issues.push( + new Issue({ + file: file, + evidence: row[sampleTypeColumn], + reason: "sample_type can't be any value.", + line: c + 1, + code: 219, + }), + ) + } + } + } + } + if ( file.relativePath.includes('/meg/') && file.name.endsWith('_channels.tsv') @@ -298,10 +461,7 @@ const TSV = (file, contents, fileList, callback) => { } // blood.tsv - if ( - file.relativePath.includes('/pet/') && - file.name.endsWith('_blood.tsv') - ) { + if (file.relativePath.includes('/pet/') && file.name.endsWith('_blood.tsv')) { // Validate fields here checkheader('time', 0, file, 126) } @@ -365,7 +525,7 @@ const TSV = (file, contents, fileList, callback) => { pathList.push(fDir) } else if (fPath.includes('_ieeg.mefd/')) { // MEF3 data - const fDir = fPath.substring(0, fPath.indexOf('_ieeg.mefd/') + 10); + const fDir = fPath.substring(0, fPath.indexOf('_ieeg.mefd/') + 10) if (!pathList.includes(fDir)) { pathList.push(fDir) } @@ -415,5 +575,4 @@ const TSV = (file, contents, fileList, callback) => { } callback(issues, participants, stimPaths) } - export default TSV diff --git a/package-lock.json b/package-lock.json index 52e3722d4..6b0d14546 100644 --- a/package-lock.json +++ b/package-lock.json @@ -19,7 +19,7 @@ } }, "bids-validator": { - "version": "1.8.9-dev.0", + "version": "1.8.10-dev.0", "license": "MIT", "dependencies": { "@aws-sdk/client-s3": "^3.9.0", @@ -29,6 +29,7 @@ "cross-fetch": "^3.0.6", "date-fns": "^2.7.0", "events": "^3.3.0", + "exifreader": "^4.1.0", "hed-validator": "^3.5.0", "ignore": "^4.0.2", "is-utf8": "^0.2.1", @@ -43,6 +44,7 @@ "semver": "^7.3.2", "stream-browserify": "^3.0.0", "table": "^5.2.3", + "xml2js": "^0.4.23", "yaml": "^1.10.2", "yargs": "^16.2.0" }, @@ -70,9 +72,10 @@ } }, "bids-validator-web": { - "version": "1.8.9-dev.0", + "version": "1.8.10-dev.0", "license": "MIT", "dependencies": { + "@babel/runtime": "^7.16.7", "bootstrap": "^4.3.0", "bowser": "^1.0.0", "next": "^11.1.2", @@ -84,6 +87,17 @@ "sass": "^1.32.8" } }, + "bids-validator-web/node_modules/@babel/runtime": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.16.7.tgz", + "integrity": "sha512-9E9FJowqAsytyOY6LG+1KuueckRL+aQW+mKvXRXnuFGyRAyepJPmEo9vgMfXUA6O9u3IeEdv9MAkppFcaQwogQ==", + "dependencies": { + "regenerator-runtime": "^0.13.4" + }, + "engines": { + "node": ">=6.9.0" + } + }, "bids-validator/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -6568,6 +6582,15 @@ "integrity": "sha512-7tFImggNeNBVMsn0vLrpn1H1uPrUBdnARPTpZoitY37ZrdJREzf7I16tMrlK3hen349gr1NYh8CmZQa7CTG6Aw==", "dev": true }, + "node_modules/@xmldom/xmldom": { + "version": "0.7.5", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.7.5.tgz", + "integrity": "sha512-V3BIhmY36fXZ1OtVcI9W+FxQqxVLsPKcNjWigIaa81dLC9IolJl5Mt4Cvhmr0flUnjSpTdrbMTSbXqYqV5dT6A==", + "optional": true, + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/abab": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.5.tgz", @@ -9833,6 +9856,15 @@ "url": "https://github.com/sindresorhus/execa?sponsor=1" } }, + "node_modules/exifreader": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/exifreader/-/exifreader-4.1.0.tgz", + "integrity": "sha512-LzTW96ubaHRSWVD6bgANpZgWGHdtA/jsIdVjFVhDDN6k60wid8U6b3cIWSGTfRePjZlwvyt4nt12bIQ5ywUrBw==", + "hasInstallScript": true, + "optionalDependencies": { + "@xmldom/xmldom": "^0.7.5" + } + }, "node_modules/exit": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", @@ -27179,6 +27211,12 @@ "integrity": "sha512-7tFImggNeNBVMsn0vLrpn1H1uPrUBdnARPTpZoitY37ZrdJREzf7I16tMrlK3hen349gr1NYh8CmZQa7CTG6Aw==", "dev": true }, + "@xmldom/xmldom": { + "version": "0.7.5", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.7.5.tgz", + "integrity": "sha512-V3BIhmY36fXZ1OtVcI9W+FxQqxVLsPKcNjWigIaa81dLC9IolJl5Mt4Cvhmr0flUnjSpTdrbMTSbXqYqV5dT6A==", + "optional": true + }, "abab": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.5.tgz", @@ -27767,6 +27805,7 @@ "eslint-config-prettier": "^2.9.0", "eslint-plugin-prettier": "^2.6.2", "events": "^3.3.0", + "exifreader": "^4.1.0", "hed-validator": "^3.5.0", "husky": "^1.0.0-rc.13", "ignore": "^4.0.2", @@ -27786,6 +27825,7 @@ "stream-browserify": "^3.0.0", "sync-request": "6.0.0", "table": "^5.2.3", + "xml2js": "^0.4.23", "yaml": "^1.10.2", "yargs": "^16.2.0" }, @@ -27883,6 +27923,7 @@ "bids-validator-web": { "version": "file:bids-validator-web", "requires": { + "@babel/runtime": "^7.16.7", "bootstrap": "^4.3.0", "bowser": "^1.0.0", "next": "^11.1.2", @@ -27892,6 +27933,16 @@ "react-bootstrap": "^1.0.0-beta.5", "react-dom": "^17.0.2", "sass": "^1.32.8" + }, + "dependencies": { + "@babel/runtime": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.16.7.tgz", + "integrity": "sha512-9E9FJowqAsytyOY6LG+1KuueckRL+aQW+mKvXRXnuFGyRAyepJPmEo9vgMfXUA6O9u3IeEdv9MAkppFcaQwogQ==", + "requires": { + "regenerator-runtime": "^0.13.4" + } + } } }, "big.js": { @@ -29895,6 +29946,14 @@ "strip-final-newline": "^2.0.0" } }, + "exifreader": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/exifreader/-/exifreader-4.1.0.tgz", + "integrity": "sha512-LzTW96ubaHRSWVD6bgANpZgWGHdtA/jsIdVjFVhDDN6k60wid8U6b3cIWSGTfRePjZlwvyt4nt12bIQ5ywUrBw==", + "requires": { + "@xmldom/xmldom": "^0.7.5" + } + }, "exit": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz",