diff --git a/Dockerfile b/Dockerfile index dd9294517..0fbbf7c7c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,11 +5,21 @@ FROM node:10-alpine # from : https://github.com/nodejs/docker-node/issues/610 : # node-sass is built using node-gyp, which is built using python. # required for an NPM repo +# +# These packages are for importing spreadsheets (xlsx etc) : +# bash is now used by /backend/scripts/uploadSpreadsheet.bash +# and perl by /resources/tools/dev/snps2Dataset.pl +# gnumeric provides ssconvert, used by uploadSpreadsheet.bash +# terminus-font is required by ssconvert. RUN apk add --no-cache git \ --virtual .gyp \ python \ make \ g++ \ + bash \ + perl \ + gnumeric \ + terminus-font \ && npm install bower -g # add backend to image @@ -17,6 +27,9 @@ COPY ./backend /app # add frontend to image COPY ./frontend /frontend +COPY ./backend/scripts/uploadSpreadsheet.bash /app/scripts/. +COPY ./resources/tools/dev/snps2Dataset.pl /app/scripts/. + RUN node --version RUN cd /frontend && (npm ci || npm install) && bower install --allow-root diff --git a/README.md b/README.md index 925543925..b2430268b 100644 --- a/README.md +++ b/README.md @@ -2,20 +2,19 @@ [![Docker pulls](https://img.shields.io/docker/pulls/plantinformaticscollaboration/pretzel.svg?logo=docker&style=for-the-badge)](https://hub.docker.com/r/plantinformaticscollaboration/pretzel) -[![Docker pulls](https://img.shields.io/docker/automated/plantinformaticscollaboration/pretzel.svg?logo=docker&style=for-the-badge)](https://hub.docker.com/r/plantinformaticscollaboration/pretzel) -[![Docker pulls](https://img.shields.io/docker/build/plantinformaticscollaboration/pretzel.svg?logo=docker&style=for-the-badge)](https://hub.docker.com/r/plantinformaticscollaboration/pretzel) +[![Docker automated](https://img.shields.io/docker/automated/plantinformaticscollaboration/pretzel.svg?logo=docker&style=for-the-badge)](https://hub.docker.com/r/plantinformaticscollaboration/pretzel) +[![Docker build](https://img.shields.io/docker/cloud/build/plantinformaticscollaboration/pretzel.svg?logo=docker&style=for-the-badge)](https://hub.docker.com/r/plantinformaticscollaboration/pretzel) [![Website](https://img.shields.io/website-up-down-green-red/http/plantinformatics.io.svg?label=plantinformatics.io&style=for-the-badge)](http://plantinformatics.io) # About Pretzel A Loopback/Ember/D3 framework to display and interactively navigate complex datasets. -Developed by -- AgriBio, Department of Economic Development, Jobs, Transport and Resources (DEDJTR), Victoria, - Australia; -- CSIRO, Canberra, Australia. + -Funded by the Grains Research Development Corporation (GRDC). +Currently (2020-) funded and developed by Agriculture Victoria, Department of Jobs, Precincts and Regions (DJPR), Victoria, Australia. + +Previously (2016-2020) funded by the Grains Research Development Corporation (GRDC) and co-developed by Agriculture Victoria and CSIRO, Canberra, Australia. # Table of Contents - [Features](#features) diff --git a/backend/common/models/block.js b/backend/common/models/block.js index c1e6e388b..f4c544f42 100644 --- a/backend/common/models/block.js +++ b/backend/common/models/block.js @@ -6,6 +6,8 @@ var identity = require('../utilities/identity') var task = require('../utilities/task') const qs = require('qs'); +var upload = require('../utilities/upload'); +const { insert_features_recursive } = require('../utilities/upload'); var blockFeatures = require('../utilities/block-features'); var pathsAggr = require('../utilities/paths-aggr'); var pathsFilter = require('../utilities/paths-filter'); @@ -26,6 +28,7 @@ const { Writable, pipeline, Readable } = require('stream'); * and also : var streamify = require('stream-array'); */ +/* global process */ /** This value is used in SSE packet event id to signify the end of the cursor in pathsViaStream. */ @@ -66,11 +69,75 @@ class SseWritable extends Writable { } } +/*----------------------------------------------------------------------------*/ + +/** Given a start time, return elapsed milliseconds as a string. + * @param startTime result of process.hrtime(); + * @param decimalPlaces number of decimal places to show in the result string. + */ +function elapsedMs(startTime, decimalPlaces) { + let elapsedTime = process.hrtime(startTime); + var ms = elapsedTime[0] * 1e3 + elapsedTime[1] * 1e-6; + return ms.toFixed(decimalPlaces); +} + +/*----------------------------------------------------------------------------*/ /* global module require */ module.exports = function(Block) { + + /*--------------------------------------------------------------------------*/ + +// copied from localise-blocks.js - may be able to factor, if no changes + +/** Add features. + * @param features array of features to add. + * each feature defines .blockId + * @return promise (no value) + */ +function blockAddFeatures(db, datasetId, blockId, features, cb) { + /** convert the ._id and .blockId fields from hex string to ObjectId, + * and shallow-copy the other fields. */ + let featuresId = features.map((f) => { + let {/*_id, */...rest} = f; + // rest._id = ObjectId(_id); + rest.blockId = ObjectId(blockId); + return rest; + }); + + return insert_features_recursive(db, datasetId, featuresId, false, cb); +} + + + /** Send a database request to append the features in data to the given block. + * + * @param data blockId and features + */ + Block.blockFeaturesAdd = function(data, options, cb) { + let db = this.dataSource.connector; + + if (data.filename) { + upload.handleJson(data, processJson, cb); + } else { + processJson(data); + } + + function processJson(json) { + let + blockId = json.blockId, + b = {blockId}, + features = json.features; + return blockAddFeatures(db, /*datasetId*/b, blockId, features, cb) + .then(() => { console.log('after blockAddFeatures', b); return b.blockId; }); + } + + }; + + + + /** This is the original paths api, prior to progressive-loading, i.e. it * returns all paths in a single response. * @@ -447,6 +514,8 @@ module.exports = function(Block) { function reqStream(cursorFunction, filterFunction, cacheId, intervals, req, res, apiOptions) { /* The params of reqStream() are largely passed to pipeStream() - starting to look like a class. */ + let startTime = process.hrtime(); + /** trial also performance of : isSerialized: true */ let sse = new SSE(undefined, {isCompressed : false}); if (! res.setHeader) { @@ -484,7 +553,12 @@ module.exports = function(Block) { } req.on('close', () => { - console.log('req.on(close)'); + /* absolute time : new Date().toISOString() */ + console.log( + 'req.on(close)', 'reqStream', + 'The request processing time is', elapsedMs(startTime, 3), 'ms.', 'for', req.path, cacheId); + + // console.log('req.on(close)'); if (cursor) { // ! cursor.isExhausted() && cursor.hasNext() if (cursor.isClosed && ! cursor.isClosed()) @@ -519,7 +593,11 @@ module.exports = function(Block) { else closeCursor(cursor); function closeCursor(cursor) { - cursor.close(function () { console.log('cursor closed'); }); + cursor.close(function () { + console.log( + 'cursor closed', + 'reqStream', + 'The request processing time is', elapsedMs(startTime, 3), 'ms.', 'for', req.path, cacheId); }); } } } @@ -588,15 +666,30 @@ module.exports = function(Block) { * @param blockIds blocks */ Block.blockFeaturesCount = function(blockIds, options, res, cb) { + let + fnName = 'blockFeaturesCount', + cacheId = fnName + '_' + blockIds.join('_'), + result = cache.get(cacheId); + if (result) { + if (trace_block > 1) { + console.log(fnName, cacheId, 'get', result[0] || result); + } + cb(null, result); + } else { let db = this.dataSource.connector; let cursor = blockFeatures.blockFeaturesCount(db, blockIds); cursor.toArray() .then(function(featureCounts) { + if (trace_block > 1) { + console.log(fnName, cacheId, 'get', featureCounts[0] || featureCounts); + } + cache.put(cacheId, featureCounts); cb(null, featureCounts); }).catch(function(err) { cb(err); }); + } }; /*--------------------------------------------------------------------------*/ @@ -605,17 +698,48 @@ module.exports = function(Block) { * * @param blockId block * @param nBins number of bins to partition the block's features into + * @param interval undefined or range of locations of features to count + * @param isZoomed true means interval should be used to constrain the location of counted features. + * @param useBucketAuto default false, which means $bucket with + * boundaries calculated from interval and nBins; otherwise use + * $bucketAuto. */ - Block.blockFeaturesCounts = function(blockId, interval, nBins, options, res, cb) { + Block.blockFeaturesCounts = function(blockId, interval, nBins, isZoomed, useBucketAuto, options, res, cb) { + + let + fnName = 'blockFeaturesCounts', + /** when a block is viewed, it is not zoomed (the interval is the + * whole domain); this request recurs often and is worth caching, + * but when zoomed in there is no repeatability so result is not + * cached. Zoomed results could be collated in an interval tree, + * and used when they satisfied one end of a requested interval, + * i.e. just the new part would be queried. + */ + useCache = ! isZoomed || ! interval, + cacheId = fnName + '_' + blockId + '_' + nBins + '_' + useBucketAuto, + result = useCache && cache.get(cacheId); + if (result) { + if (trace_block > 1) { + console.log(fnName, cacheId, 'get', result[0]); + } + cb(null, result); + } else { let db = this.dataSource.connector; let cursor = - blockFeatures.blockFeaturesCounts(db, blockId, interval, nBins); + blockFeatures.blockFeaturesCounts(db, blockId, interval, nBins, isZoomed, useBucketAuto); cursor.toArray() .then(function(featureCounts) { + if (useCache) { + if (trace_block > 1) { + console.log(fnName, cacheId, 'put', featureCounts[0]); + } + cache.put(cacheId, featureCounts); + } cb(null, featureCounts); }).catch(function(err) { cb(err); }); + } }; /*--------------------------------------------------------------------------*/ @@ -624,15 +748,31 @@ module.exports = function(Block) { * @param blockId undefined (meaning all blocks) or id of 1 block to find min/max for */ Block.blockFeatureLimits = function(blockId, options, res, cb) { + let + fnName = 'blockFeatureLimits', + cacheId = fnName + '_' + blockId, + result = cache.get(cacheId); + if (result) { + if (trace_block > 1) { + console.log(fnName, cacheId, 'get', result[0] || result); + } + cb(null, result); + } else { + let db = this.dataSource.connector; let cursor = blockFeatures.blockFeatureLimits(db, blockId); cursor.toArray() .then(function(limits) { + if (trace_block > 1) { + console.log(fnName, cacheId, 'put', limits[0] || limits); + } + cache.put(cacheId, limits); cb(null, limits); }).catch(function(err) { cb(err); }); + } }; /*--------------------------------------------------------------------------*/ @@ -752,6 +892,15 @@ module.exports = function(Block) { // When adding a API .remoteMethod() here, also add the route name to backend/server/boot/access.js : genericResolver() //---------------------------------------------------------------------------- + Block.remoteMethod('blockFeaturesAdd', { + accepts: [ + {arg: 'data', type: 'object', required: true, http: {source: 'body'}}, + {arg: "options", type: "object", http: "optionsFromRequest"}, + ], + returns: {arg: 'status', type: 'string'}, + description: "Append the features in data to the given block" + }); + Block.remoteMethod('blockFeaturesCount', { accepts: [ {arg: 'blocks', type: 'array', required: true}, @@ -768,6 +917,8 @@ module.exports = function(Block) { {arg: 'block', type: 'string', required: true}, {arg: 'interval', type: 'array', required: false}, {arg: 'nBins', type: 'number', required: false}, + {arg: 'isZoomed', type: 'boolean', required: false, default : 'false'}, + {arg: 'useBucketAuto', type: 'boolean', required: false, default : 'false'}, {arg: "options", type: "object", http: "optionsFromRequest"}, {arg: 'res', type: 'object', 'http': {source: 'res'}}, ], diff --git a/backend/common/models/client.js b/backend/common/models/client.js index 4ac6a9265..6f77dae7a 100644 --- a/backend/common/models/client.js +++ b/backend/common/models/client.js @@ -112,8 +112,24 @@ module.exports = function(Client) { if (process.env.EMAIL_ACTIVE == 'true') { Client.findById(context.args.uid).then(function(userInstance) { var template = loopback.template(path.resolve(__dirname, '../../server/views/access_granted.ejs')); - let login_url = - context.req.protocol + '://' + context.req.host + + let + /** if node app server is behind a proxy (e.g. nginx, for + * https) then the req.host will be simply localhost; + * in that case use API_HOST. + */ + apiHost = + process.env.API_PORT_PROXY ? process.env.API_HOST : context.req.host, + /** If behind a proxy then the port will be default (80) + * expressed as ''. Otherwise API_PORT_EXT is used. + * + * (If running node app server within docker then the API + * port external to docker is API_PORT_EXT, and hence the + * name suffix _EXT; the internal port is generally the same + * and the same env var is used.) + * Related : reset_href, verifyHref. + */ + login_url = + context.req.protocol + '://' + apiHost + (process.env.API_PORT_PROXY ? '' : ':' + process.env.API_PORT_EXT) + '/login'; var html = template({ diff --git a/backend/common/models/dataset.js b/backend/common/models/dataset.js index 397ba0b76..4d08d3a73 100644 --- a/backend/common/models/dataset.js +++ b/backend/common/models/dataset.js @@ -2,10 +2,16 @@ /* global module */ /* global require */ +/* global Buffer */ +/* global process */ +const { spawn } = require('child_process'); +var fs = require('fs'); + var _ = require('lodash'); + var acl = require('../utilities/acl'); var identity = require('../utilities/identity'); var upload = require('../utilities/upload'); @@ -25,36 +31,15 @@ module.exports = function(Dataset) { Dataset.upload = function(msg, options, req, cb) { req.setTimeout(0); var models = this.app.models; - // Common steps for both .json and .gz files after parsing - const uploadParsed = (jsonMap) => { - if(!jsonMap.name){ - cb(Error('Dataset JSON has no "name" field (required)')); - } else { - // Check if dataset name already exists - // Passing option of 'unfiltered: true' overrides filter for public/personal-only - models.Dataset.exists(jsonMap.name, { unfiltered: true }).then((exists) => { - if (exists) { - cb(Error(`Dataset name "${jsonMap.name}" is already in use`)); - } else { - // Should be good to process saving of data - upload.uploadDataset(jsonMap, models, options, cb); - } - }) - .catch((err) => { - console.log(err); - cb(Error('Error checking dataset existence')); - }); - } - }; + const uploadParsed = (jsonMap) => upload.uploadParsedCb(models, jsonMap, options, cb); + function uploadParsedTry(jsonData) { + upload.uploadParsedTryCb(models, jsonData, options, cb); + } + // Parse as either .json or .gz + // factored as handleJson() if (msg.fileName.endsWith('.json')) { - try { - let jsonMap = JSON.parse(msg.data); - uploadParsed(jsonMap); - } catch (e) { - console.log(e); - cb(Error("Failed to parse JSON")); - } + uploadParsedTry(msg.data); } else if (msg.fileName.endsWith('.gz')) { var buffer = new Buffer(msg.data, 'binary'); load.gzip(buffer).then(function(json) { @@ -65,10 +50,137 @@ module.exports = function(Dataset) { console.log(err); cb(Error("Failed to read gz file")); }) + } else if ( + msg.fileName.endsWith('.xlsx') || msg.fileName.endsWith('.xls') || + msg.fileName.endsWith('.ods') + ) { + /** messages from child via file descriptors 3 and 4 are + * collated in these arrays and can be sent back to provide + * detail for / explain an error. + */ + let errors = [], warnings = []; + + /** Each worksheet in the .xslx will result in a dataset passed + * to upload.uploadDataset() which call cb(), so it is necessary + * to limit this to a single call-back, using cbWrap and cbCalled. + * It would be better to assemble an array of datasetId-s from + * insert_features_recursive(), and pass that to cb when complete. + * The client does not use this result value. + * + * Refn : async/dist/async.js : onlyOnce(fn) + */ + let cbOrig = cb, + cbCalled = 0; + function cbWrap(err, message, last) { + console.log('cbWrap', err && err.toString(), message, last); + /* insert_features_recursive() "passes" last === undefined, + * and when !err, message is datasetId (i.e. datasetName) + */ + if (last || (last === undefined) || err) { + if (cbCalled++ === 0) { + if (err && (errors.length || warnings.length)) { + err = [err].concat(errors).concat(warnings).join("\n"); + errors = []; warnings = []; + } + cbOrig(err, message); + } + } + } + cb = cbWrap; + /** msg.fileName : remove punctuation other than .-_, retain alphanumeric */ + const useFile = true; + if (useFile) { + const data = new Uint8Array(Buffer.from(msg.data, 'binary')); + fs.writeFile(msg.fileName, data, (err) => { + if (err) { + cb(err); + } else { + console.log('Written', msg.data.length, data.length, 'to', msg.fileName); + } + }); + } + + const + /** msg.replaceDataset is defined by uploadSpreadsheet(), but not by data-json.js : submitFile() + */ + replaceDataset = !!msg.replaceDataset, + currentDir = process.cwd(), + /** In the Docker container, server cwd is /, and uploadSpreadsheet.bash is in /app/scripts/ */ + scriptsDir = (currentDir === "/") ? "/app/scripts" : + currentDir.endsWith("/backend") ? 'scripts' : 'backend/scripts', + // process.execPath is /usr/bin/node, need /usr/bin/ for mv, mkdir, perl + PATH = process.env.PATH + ':' + scriptsDir, + /** file handles : stdin, stdout, stderr, output errors, output warnings. */ + options = {env : {PATH}, stdio: ['pipe', 'pipe', process.stderr, 'pipe', 'pipe'] }; + const child = spawn('uploadSpreadsheet.bash', [msg.fileName, useFile], options); + child.on('error', (err) => { + console.error('Failed to start subprocess.', 'uploadSpreadsheet', msg.fileName, err.toString()); + // const error = Error("Failed to start subprocess to upload xlsx file " + msg.fileName + '\n' + err.toString()); + cb(err/*or*/); + }); + console.log('uploadSpreadsheet', /*child,*/ msg.fileName, msg.data.length, replaceDataset, scriptsDir, currentDir); + if (! useFile) { + child.stdin.write(msg.data); + child.stdin.end(); + } + + // On MS Windows these handles may not be 3 and 4. + child.stdio[3].on('data', (chunk) => { + let message = chunk.toString(); + console.log('uploadSpreadsheet errors :', message); + errors.push(message); + }); + child.stdio[4].on('data', (chunk) => { + let message = chunk.toString(); + console.log('uploadSpreadsheet warnings :', message); + warnings.push(message); + }); + + child.stdout.on('data', (chunk) => { + // data from the standard output is here as buffers + // Possibly multiple lines, separated by \n, + // completed by \n. + const + textLines = chunk.toString().split('\n'); + textLines.forEach((textLine) => { + if (textLine !== "") { + let [fileName, datasetName] = textLine.split(';'); + console.log('uploadSpreadsheet stdout data', "'", fileName, "', '", datasetName, "'"); + if (fileName.startsWith('Error:') || ! datasetName) { + cb(new Error(fileName + " Dataset '" + datasetName + "'")); + } else { + console.log('before removeExisting "', datasetName, '"'); + upload.removeExisting(models, datasetName, replaceDataset, cb, loadAfterDelete); + } + function loadAfterDelete(err) { + upload.loadAfterDeleteCb( + fileName, + (jsonData) => uploadParsedTry(jsonData), + err, cb); + } + } + }); + }); + + // since these are streams, you can pipe them elsewhere + // child.stderr.pipe(dest); + child.on('close', (code) => { + console.log('child process exited with code', code); + if (code) { + const error = Error("Failed to read xlsx file " + msg.fileName); + cb(error); + } else { + // process each tmp/out_json/"$datasetName".json + const message = 'Uploaded xlsx file ' + msg.fileName; + if (child.killed) { + cb(null, message, true); + } // else check again after timeout + } + }); } else { cb(Error('Unsupported file type')); } - } + }; Dataset.tableUpload = function(data, options, cb) { var models = this.app.models; @@ -123,6 +235,7 @@ module.exports = function(Dataset) { array_features.push({ name: feature.name, value: [feature.val], + value_0: feature.val, blockId: blocks_by_name[feature.block] }); }); @@ -160,9 +273,19 @@ module.exports = function(Dataset) { Dataset.observe('before delete', function(ctx, next) { var Block = ctx.Model.app.models.Block + /** ctx.where contains the datasetId, but differently depending on the call which requested delete of the dataset : + * - deletes done via URL (as in curl -X DELETE api/Datasets/) place the datasetId in ctx.where.and[1].name + * - removeExisting() does Dataset.destroyAll({_id : id}, ) and that condition is copied to ctx.where, so where._id is the datasetId. + */ + let + where = ctx.where, + datasetId = where.and ? where.and[1].name : where._id; + if (where.and) { + console.log('Dataset.observe(before delete', where.and[0], where.and[1]); + } Block.find({ where: { - datasetId: ctx.where.and[1].name + datasetId } }, ctx.options).then(function(blocks) { blocks.forEach(function(block) { diff --git a/backend/common/models/feature.js b/backend/common/models/feature.js index aa632fd87..c4162468e 100644 --- a/backend/common/models/feature.js +++ b/backend/common/models/feature.js @@ -1,6 +1,12 @@ 'use strict'; +/* global require */ +/* global process */ + var acl = require('../utilities/acl') +const { childProcess } = require('../utilities/child-process'); +var upload = require('../utilities/upload'); + module.exports = function(Feature) { Feature.search = function(filter, options, cb) { @@ -44,6 +50,70 @@ module.exports = function(Feature) { }); }; + /** + * @param data contains : + * @param dnaSequence FASTA format for Blast; text string input for other searchType-s, e.g. string "actg..." + * @param parent datasetId of parent / reference of the blast db which is to be searched + * @param searchType 'blast' + * @param resultRows + * @param addDataset + * @param datasetName + * @param options + * + * @param cb node response callback + */ + Feature.dnaSequenceSearch = function(data, cb) { + const models = this.app.models; + + let {dnaSequence, parent, searchType, resultRows, addDataset, datasetName, options} = data; + const fnName = 'dnaSequenceSearch'; + console.log(fnName, dnaSequence.length, parent, searchType); + + /** Receive the results from the Blast. + * @param chunk is a Buffer + * @param cb is cbWrap of cb passed to dnaSequenceSearch(). + */ + let searchDataOut = (chunk, cb) => { + if (chunk.asciiSlice(0,6) === 'Error:') { + cb(new Error(chunk.toString())); + } else { + const + textLines = chunk.toString().split('\n'); + textLines.forEach((textLine) => { + if (textLine !== "") { + console.log(fnName, 'stdout data', "'", textLine, "'"); + } + }); + if (addDataset) { + let jsonFile='tmp/' + datasetName + '.json'; + console.log('before removeExisting "', datasetName, '"', '"', jsonFile, '"'); + upload.removeExisting(models, datasetName, /*replaceDataset*/true, cb, loadAfterDelete); + + function loadAfterDelete(err) { + upload.loadAfterDeleteCb( + jsonFile, + (jsonData) => + upload.uploadParsedTryCb(models, jsonData, options, cb), + err, cb); + } + + } + + cb(null, textLines); + } + }; + + if (true) { + let child = childProcess( + 'dnaSequenceSearch.bash', + dnaSequence, true, 'dnaSequence', [parent, searchType, resultRows, addDataset, datasetName], searchDataOut, cb); + } else { + let features = dev_blastResult; + cb(null, features); + } + }; + + Feature.remoteMethod('search', { accepts: [ {arg: 'filter', type: 'array', required: true}, @@ -64,9 +134,33 @@ module.exports = function(Feature) { returns: {arg: 'features', type: 'array'}, description: "Returns features by their level in the feature hierarchy" }); - + + Feature.remoteMethod('dnaSequenceSearch', { + accepts: [ + {arg: 'data', type: 'object', required: true, http: {source: 'body'}}, + /* Within data : .dnaSequence, and : + {arg: 'parent', type: 'string', required: true}, + {arg: 'searchType', type: 'string', required: true}, + {arg: "options", type: "object", http: "optionsFromRequest"} + resultRows, addDataset, datasetName + */ + ], + // http: {verb: 'post'}, + returns: {arg: 'features', type: 'array'}, + description: "DNA Sequence Search e.g. Blast, returns TSV output as text array" + }); + acl.assignRulesRecord(Feature) acl.limitRemoteMethods(Feature) acl.limitRemoteMethodsSubrecord(Feature) acl.limitRemoteMethodsRelated(Feature) }; + +/*----------------------------------------------------------------------------*/ + +const dev_blastResult = [ + "BobWhite_c10015_641 chr2A 100.000 50 0 0 1 50 154414057 154414008 2.36e-17 93.5 50 780798557", + "BobWhite_c10015_641 chr2B 98.000 50 1 0 1 50 207600007 207600056 1.10e-15 87.9 50 801256715" +]; +/*----------------------------------------------------------------------------*/ + diff --git a/backend/common/utilities/block-features.js b/backend/common/utilities/block-features.js index 49584e739..9a4b7f2a4 100644 --- a/backend/common/utilities/block-features.js +++ b/backend/common/utilities/block-features.js @@ -1,14 +1,46 @@ +const { blockFilterValue0 } = require('./paths-aggr'); + + var ObjectID = require('mongodb').ObjectID; /*----------------------------------------------------------------------------*/ /* global exports */ +/* global process */ const trace_block = 1; /** ObjectId is used in mongo shell; the equivalent defined by the node js client library is ObjectID; */ const ObjectId = ObjectID; +/** blockFeaturesCounts() can use a query which is covered by the index + * if .value[0] has been copied as .value_0 + * + * Using '$value_0' in place of {$arrayElemAt : ['$value', 0]} is functionally + * equivalent, and enables the combined index {blockId, value_0} to cover + * the query; + * this can be dropped if a way is found to access value[0] without $expr, + * which seems to not enable PROJECTION_COVERED. + */ +const use_value_0 = process.env.use_value_0 || false; + +/*----------------------------------------------------------------------------*/ + +/** Show whether a aggregation pipeline is covered by an index. + */ +function showExplain(label, aggregationCursor) { + /* Usage e.g. showExplain('blockFeaturesCounts', featureCollection.aggregate ( pipeline, {allowDiskUse: true} )) + */ + aggregationCursor + .explain() + .then((a, b) => { + let stage; try { stage = a.stages[0].$cursor.queryPlanner.winningPlan.stage; } catch (e) {}; + if (stage !== 'PROJECTION_COVERED') { + console.log(label, ' explain then', a, stage /*, b, arguments, this*/); + } + }); +} + /*----------------------------------------------------------------------------*/ @@ -27,9 +59,12 @@ exports.blockFeaturesCount = function(db, blockIds) { let ObjectId = ObjectID; let + /** may be faster to use simple string match for .length === 1, instead of $in array. */ + blockIdMatch = blockIds.length === 1 ? ObjectId(blockIds[0]) : + {$in : blockIds.map(function (blockId) { return ObjectId(blockId); }) }, matchBlock = [ - { $match : { "blockId" : {$in : blockIds.map(function (blockId) { return ObjectId(blockId); }) }}}, + { $match : { "blockId" : blockIdMatch}}, { $group: { _id: '$blockId', featureCount: { $sum: 1 } } } ], @@ -129,7 +164,7 @@ function binBoundaries(interval, lengthRounded) { * { "_id" : { "min" : 4000000, "max" : 160000000 }, "count" : 22 } * { "_id" : { "min" : 160000000, "max" : 400000000 }, "count" : 21 } */ -exports.blockFeaturesCounts = function(db, blockId, interval, nBins = 10) { +exports.blockFeaturesCounts = function(db, blockId, interval, nBins = 10, isZoomed, useBucketAuto) { // initial draft based on blockFeaturesCount() let featureCollection = db.collection("Feature"); /** The requirement (so far) is for even-size boundaries on even numbers, @@ -145,9 +180,9 @@ exports.blockFeaturesCounts = function(db, blockId, interval, nBins = 10) { * So $bucket is used instead, and the boundaries are given explicitly. * This requires interval; if it is not passed, $bucketAuto is used, without granularity. */ - const useBucketAuto = ! (interval && interval.length === 2); + useBucketAuto = useBucketAuto || ! (interval && interval.length === 2); if (trace_block) - console.log('blockFeaturesCounts', blockId, interval, nBins); + console.log('blockFeaturesCounts', blockId, interval, nBins, isZoomed, useBucketAuto); let ObjectId = ObjectID; let lengthRounded, boundaries; if (! useBucketAuto) { @@ -158,12 +193,14 @@ exports.blockFeaturesCounts = function(db, blockId, interval, nBins = 10) { let matchBlock = [ + use_value_0 ? blockFilterValue0(isZoomed ? interval : undefined, blockId) : {$match : {blockId : ObjectId(blockId)}}, useBucketAuto ? { $bucketAuto : { groupBy: {$arrayElemAt : ['$value', 0]}, buckets: Number(nBins)} } // , granularity : 'R5' : { $bucket : { - groupBy: {$arrayElemAt : ['$value', 0]}, boundaries, + /** faster query if .value_0 is available @see use_value_0 */ + groupBy: (use_value_0 ? '$value_0' : {$arrayElemAt : ['$value', 0]}), boundaries, 'default' : 'outsideBoundaries', output: { count: { $sum: 1 }, @@ -224,9 +261,18 @@ exports.blockFeatureLimits = function(db, blockId) { * handle this by checking for $type and applying $slice to the array type only. */ let - group = [ - {$project : {_id : 1, name: 1, blockId : 1, value : - {$cond: { if: { $isArray: "$value" }, then: {$slice : ['$value', 2]}, else: "$value" } } }}, + /** Project .value_0 if use_value_0, otherwise .value[0 and 1] + * or .value if it is not an array. + * Using .value_0 will probably be faster than array access; it misses the end + * of the feature interval, but for knowing the limits of the block it will be + * sufficient. + */ + group_array = [ + {$project : { + _id : 1, name: 1, blockId : 1, value : + use_value_0 ? "$value_0" : + {$cond: { if: { $isArray: "$value" }, then: {$slice : ['$value', 2]}, else: "$value" } } + }}, {$unwind : '$value'}, {$match: { $or: [ { value: { $ne: null } } ] } }, {$group : { @@ -236,6 +282,16 @@ exports.blockFeatureLimits = function(db, blockId) { min : { "$min": "$value" } }} ], + /** using .value_0 enables this simpler form, which is faster in tests so far. */ + group_0 = [ + {$group : { + _id : '$blockId' , + featureCount : { $sum: 1 }, + max : { "$max": "$value_0" }, + min : { "$min": "$value_0" } + }} + ], + group = use_value_0 ? group_0 : group_array, pipeline = blockId ? [ {$match : {blockId : ObjectId(blockId)}} @@ -248,6 +304,7 @@ exports.blockFeatureLimits = function(db, blockId) { if (trace_block > 1) console.dir(pipeline, { depth: null }); + showExplain('blockFeatureLimits', featureCollection.aggregate ( pipeline, {allowDiskUse: true} )); let result = featureCollection.aggregate ( pipeline, {allowDiskUse: true} ); diff --git a/backend/common/utilities/child-process.js b/backend/common/utilities/child-process.js new file mode 100644 index 000000000..016808c44 --- /dev/null +++ b/backend/common/utilities/child-process.js @@ -0,0 +1,180 @@ +'use strict'; + +/* global require */ +/* global Buffer */ +/* global process */ + +const { spawn } = require('child_process'); +var fs = require('fs'); + +/*----------------------------------------------------------------------------*/ + + +/** + * @param postdata + * @param useFile + * @param fileName + * @param moreParams array of params to pass as command-line params to + * child process, after [fileName, useFile] + * @param dataOutCb (Buffer chunk, cb) {} + * @param cb response node callback + * @return child + */ +exports.childProcess = (scriptName, postData, useFile, fileName, moreParams, dataOutCb, cb) => { + const fnName = 'childProcess'; + /** messages from child via file descriptors 3 and 4 are + * collated in these arrays and can be sent back to provide + * detail for / explain an error. + */ + let errors = [], warnings = []; + + /** Each worksheet in the .xslx will result in a dataset passed + * to upload.uploadDataset() which call cb(), so it is necessary + * to limit this to a single call-back, using cbWrap and cbCalled. + * It would be better to assemble an array of datasetId-s from + * insert_features_recursive(), and pass that to cb when complete. + * The client does not use this result value. + * + * Refn : async/dist/async.js : onlyOnce(fn) + */ + let cbOrig = cb, + cbCalled = 0; + function cbWrap(err, message, last) { + console.log('cbWrap', err && err.toString(), message, last); + /* insert_features_recursive() "passes" last === undefined, + * and when !err, message is datasetId (i.e. datasetName) + */ + if (last || (last === undefined) || err) { + if (cbCalled++ === 0) { + if (err && (errors.length || warnings.length)) { + err = [err].concat(errors).concat(warnings).join("\n"); + errors = []; warnings = []; + } + cbOrig(err, message); + } + } + } + cb = cbWrap; + /** fileName : remove punctuation other than .-_, retain alphanumeric */ + if (useFile) { + const data = new Uint8Array(Buffer.from(postData, 'binary')); + fs.writeFile(fileName, data, (err) => { + if (err) { + cb(err); + } else { + console.log('Written', postData.length, data.length, 'to', fileName); + } + }); + } + + const + /** msg.replaceDataset is defined by uploadSpreadsheet(), but not by data-json.js : submitFile() + replaceDataset = !!msg.replaceDataset, + */ + currentDir = process.cwd(), + /** In the Docker container, server cwd is /, and scriptName (e.g. uploadSpreadsheet.bash) is in /app/scripts/ */ + scriptsDir = (currentDir === "/") ? "/app/scripts" : + currentDir.endsWith("/backend") ? 'scripts' : 'backend/scripts', + // process.execPath is /usr/bin/node, need /usr/bin/ for mv, mkdir, perl + PATH = process.env.PATH + ':' + scriptsDir, + /** file handles : stdin, stdout, stderr, output errors, output warnings. */ + options = {env : {PATH}, stdio: ['pipe', 'pipe', process.stderr, 'pipe', 'pipe'] }; + let params = [fileName, useFile]; + if (moreParams && moreParams.length) { + params = params.concat(moreParams); + } + const child = spawn(scriptName, params, options); + child.on('error', (err) => { + console.error(fnName, 'Failed to start subprocess.', scriptName, fileName, err.toString()); + // const error = Error("Failed to start subprocess to upload xlsx file " + fileName + '\n' + err.toString()); + cb(err/*or*/); + }); + console.log(fnName, scriptName, postData.length, useFile, /*child,*/ fileName, postData.length, scriptsDir, currentDir); + if (! useFile) { + child.stdin.write(postData); + child.stdin.end(); + } + + // On MS Windows these handles may not be 3 and 4. + child.stdio[3].on('data', (chunk) => { + let message = chunk.toString(); + console.log(fnName, scriptName, ' errors :', message); + errors.push(message); + }); + child.stdio[4].on('data', (chunk) => { + let message = chunk.toString(); + console.log(fnName, scriptName, ' warnings :', message); + warnings.push(message); + }); + + child.stdout.on('data', (chunk) => dataOutCb(chunk, cb)); + + // since these are streams, you can pipe them elsewhere + // child.stderr.pipe(dest); + child.on('close', (code) => { + console.log('child process exited with code', code); + if (code) { + const error = Error("Failed processing file '" + fileName + "'."); + cb(error); + } else if (errors.length || warnings.length) { + let + errors_warnings = errors.concat(warnings).join("\n"); + errors = []; warnings = []; + cb(errors_warnings); + } else { + // process each tmp/out_json/"$datasetName".json + const message = 'Processed file ' + fileName; + if (child.killed) { + cb(null, message, true); + } // else check again after timeout + } + }); + + return child; +}; + +/*----------------------------------------------------------------------------*/ + +/* dataset upload */ +function factored(msg, cb) { + exports.childProcess('uploadSpreadsheet.bash', msg.data, true, msg.fileName, dataOutUpload, cb); +} + +// msg file param from API request {fileName, data, replaceDataset} + +// params needed : this (model/dataset), replaceDataset, uploadParsedTry +let dataOutUpload = (chunk, cb) => { + // data from the standard output is here as buffers + // Possibly multiple lines, separated by \n, + // completed by \n. + const + textLines = chunk.toString().split('\n'); + textLines.forEach((textLine) => { + if (textLine !== "") { + let [fileName, datasetName] = textLine.split(';'); + console.log('uploadSpreadsheet stdout data', "'", fileName, "', '", datasetName, "'"); + if (fileName.startsWith('Error:') || ! datasetName) { + cb(new Error(fileName + " Dataset '" + datasetName + "'")); + } else { + console.log('before removeExisting "', datasetName, '"', replaceDataset); + this.removeExisting(datasetName, replaceDataset, cb, loadAfterDelete); + } + function loadAfterDelete(err) { + if (err) { + cb(err); + } + else { + fs.readFile(fileName, (err, jsonData) => { + if (err) { + cb(err); + } else { + console.log('readFile', fileName, jsonData.length); + // jsonData is a Buffer; JSON.parse() handles this OK. + uploadParsedTry(jsonData); + } + }); + } + }; + } + }); +}; diff --git a/backend/common/utilities/identity.js b/backend/common/utilities/identity.js index d5178f9d8..1346b0275 100644 --- a/backend/common/utilities/identity.js +++ b/backend/common/utilities/identity.js @@ -2,7 +2,7 @@ /** * Find the client id from loopback context, if exists - * @param {Object} ctx - loopback method context + * @param {Object} data - ctx - loopback method context * @returns clientId */ exports.gatherClientId = (data) => { @@ -38,4 +38,4 @@ exports.queryFilterAccessible = (ctx) => { where = {and: [where, ctx.query.where]} } ctx.query.where = where; -} \ No newline at end of file +} diff --git a/backend/common/utilities/paths-aggr.js b/backend/common/utilities/paths-aggr.js index af1d799a9..2b75f9eb7 100644 --- a/backend/common/utilities/paths-aggr.js +++ b/backend/common/utilities/paths-aggr.js @@ -6,6 +6,7 @@ var ObjectID = require('mongodb').ObjectID; /* global exports */ /* global require */ +/* global process */ /* globals defined in mongo shell */ /* global db ObjectId print */ @@ -18,6 +19,11 @@ const trace_aggr = 1; /** ObjectId is used in mongo shell; the equivalent defined by the node js client library is ObjectID; */ const ObjectId = ObjectID; +/** blockFeaturesInterval() can use a query which is covered by the index + * if .value[0] has been copied as .value_0 + */ +const use_value_0 = process.env.use_value_0 || false; + /*----------------------------------------------------------------------------*/ @@ -325,6 +331,38 @@ function valueBound(intervals, b, l) { ); return r; }; +/** Similar to valueBound, but use value_0 instead of value[0]. + * Just 1 end (limit l), whereas valueBounds_0 constructs an expression for both ends. + * @param intervals domains for both ends (axes) + * @param b 0 for blockId0, axes[0] + * @param l limit : 0 for domain[0] and lte, 1 for domain[1] and gte + */ +function valueBound_0(intervals, b, l) { + let r = keyValue( + l ? '$lte' : '$gte', + [ "$value_0", + +intervals.axes[b].domain[l]] + ); + return r; +} + +/** Similar to valueBound, but use value_0 instead of value[0]. + * This is applicable when blockIds.length is 1, i.e. b === 0. + * For length 2 it could match either domain for both blocks, later + * $match would filter out the extra, and it would get the performance + * benefit from the index (narrow the docsExamined). + * + * @param domain from the intervals or interval param, e.g. intervals.axes[b] + */ +function valueBounds_0(domain) { + let r = { + $gte : +domain[0], + $lte : +domain[1] + }; + return r; +} + + /** If axis b is zoomed, append conditions on location (value[]) to the given array eq. * * If the axis has not been zoomed then Stacked : zoomed will be undefined, @@ -343,11 +381,35 @@ function blockFilter(intervals, eq, b) { let a = intervals.axes[b], /** if axisBrush, then zoom is not required. */ axisBrush = intervals.axes.length === 1, + vB = use_value_0 ? valueBound_0 : valueBound, r = (axisBrush || a.zoomed) && a.domain ? - eq.concat([valueBound(intervals, b, 0), valueBound(intervals, b, 1)]) : + eq.concat([vB(intervals, b, 0), vB(intervals, b, 1)]) : eq; return r; }; +/** Similar to blockFilter(); uses .value_0 (a copy of .value[0]), + * which enables a $match without $expr and hence is able to narrow the + * document pipeline using the index {blockId, value_0}. + * This can be inserted before a $match constructed using blockFilter(); + * the index use of the first match determines performance. + * + * @param domain from the intervals or interval param, e.g. intervals.axes[b].domain + * If undefined, no condition is added for feature .value_0, just .blockId. + */ +function blockFilterValue0(domain, blockId) { + let + l = 0, + matchBlock = + {$match : { + blockId : ObjectId(blockId) + } }; + if (domain) { + matchBlock.$match.value_0 = valueBounds_0(domain); + } + return matchBlock; +} +exports.blockFilterValue0 = blockFilterValue0; + /*----------------------------------------------------------------------------*/ /** The interval params passed to .pathsDirect() and .blockFeaturesInterval() @@ -600,6 +662,7 @@ function pathsAliases(db, blockId0, blockId1, namespace0, namespace1, intervals /** log the given filterValue, (which is derived from) intervals */ function log_filterValue_intervals(filterValue, intervals) { let l = ['filterValue', filterValue]; + l.push(JSON.stringify(filterValue)); intervals.axes.map(function (a) { /** log a.{zoomed,domain} .domain may be undefined or [start,end]. */ l.push(a.zoomed); @@ -669,6 +732,12 @@ exports.blockFeaturesInterval = function(db, blockIds, intervals) { blockFilters } }}, ]; + if (use_value_0 && (blockIds.length === 1)) { + const b = 0; + // could pass undefined for domain if ! .isZoomed + let useIndex = blockFilterValue0(intervals.axes[b].domain, blockIds[b]); + filterValue.unshift(useIndex); + } let pipeline; diff --git a/backend/common/utilities/upload.js b/backend/common/utilities/upload.js index 6046158a2..6f2ac643b 100644 --- a/backend/common/utilities/upload.js +++ b/backend/common/utilities/upload.js @@ -4,10 +4,20 @@ var fs = require('fs'); var Promise = require('bluebird') const bent = require('bent'); + +const load = require('./load'); + + /* global require */ /* global exports */ /* global process */ +/*----------------------------------------------------------------------------*/ + +function notDefined(value) { return value === undefined || value === null; } + +/*----------------------------------------------------------------------------*/ + /** * Divide array into smaller chunks * @param {Array} arr - array of data to be processed @@ -182,6 +192,9 @@ function insert_features_recursive(db, dataset_id, features_to_insert, ordered, }); delete feature.features; } + if (! notDefined(feature.value) && notDefined(feature.value_0)) { + feature.value_0 = feature.value.length ? feature.value[0] : feature.value; + } }); let promise = @@ -226,3 +239,138 @@ function checkDatasetExists(name, models) { return results.length > 0; }); } + + +/*----------------------------------------------------------------------------*/ + +/** Handle POST-ed JSON data, either plain file or gzip-ed, + * parse the JSON and apply the given function uploadParsed to it. + * Call cb(). + * @param msg received API request message + * @param uploadParsed function to pass parsed JSON object to + * @param cb node callback + */ +exports.handleJson = function(msg, uploadParsed, cb) { + // factored from dataset.js : Dataset.upload(), which can be changed to use this. + + // Parse as either .json or .gz + if (msg.fileName.endsWith('.json')) { + try { + let jsonMap = JSON.parse(msg.data); + uploadParsed(jsonMap); + } catch (e) { + console.log(e); + cb(Error("Failed to parse JSON")); + } + } else if (msg.fileName.endsWith('.gz')) { + var buffer = new Buffer(msg.data, 'binary'); + load.gzip(buffer).then(function(json) { + let jsonMap = json; + uploadParsed(jsonMap); + }) + .catch(function(err) { + console.log(err); + cb(Error("Failed to read gz file")); + }); + } else { + cb(Error('Unsupported file type')); + } +}; + +/*----------------------------------------------------------------------------*/ + + exports.uploadParsedCb = + // Common steps for both .json and .gz files after parsing + (models, jsonMap, options, cb) => { + if(!jsonMap.name){ + cb(Error('Dataset JSON has no "name" field (required)')); + } else { + // Check if dataset name already exists + // Passing option of 'unfiltered: true' overrides filter for public/personal-only + models.Dataset.exists(jsonMap.name, { unfiltered: true }).then((exists) => { + if (exists) { + cb(Error(`Dataset name "${jsonMap.name}" is already in use`)); + } else { + // Should be good to process saving of data + exports.uploadDataset(jsonMap, models, options, cb); + } + }) + .catch((err) => { + console.log(err); + cb(Error('Error checking dataset existence')); + }); + } + }; + + exports.uploadParsedTryCb = + /** Wrap uploadParsed with try { } and pass error to cb(). + */ + function uploadParsedTryCb(models, jsonData, options, cb) { + try { + let jsonMap = JSON.parse(jsonData); + exports.uploadParsedCb(models, jsonMap, options, cb); + } catch (e) { + let message = e.toString ? e.toString() : e.message || e.name; + // logging e logs e.stack, which is also logged by cb(Error() ) + console.log(message || e); + cb(Error("Failed to parse JSON" + (message ? ':\n' + message : ''))); + } + }; + + /** + * @param uploadFn uploadParsedTry(jsonData) + */ + exports.loadAfterDeleteCb = + function loadAfterDeleteCb(fileName, uploadFn, err, cb) { + if (err) { + cb(err); + } + else { + fs.readFile(fileName, (err, jsonData) => { + if (err) { + cb(err); + } else { + console.log('readFile', fileName, jsonData.length); + // jsonData is a Buffer; JSON.parse() handles this OK. + uploadFn(jsonData); + } + }); + } + }; + + + /** If Dataset with given id exists, remove it. + * If id doesn't exist, or it is removed OK, then call okCallback, + * otherwise pass the error to the (API request) replyCb. + * @param if false and dataset id exists, then fail - call replyCb() with Error. + */ + exports.removeExisting = function(models, id, replaceDataset, replyCb, okCallback) { + models.Dataset.exists(id, { unfiltered: true }).then((exists) => { + console.log('removeExisting', "'", id, "'", exists); + if (exists) { + if (! replaceDataset) { + replyCb(Error("Dataset '" + id + "' exists")); + } else { + /* without {unfiltered: true}, the dataset was not found by destroyAll. + * destroyAllById(id ) also did not found the dataset (callback gets info.count === 0). + * .exists() finds it OK. + */ + models.Dataset.destroyAll/*ById(id*/ ({_id : id}, {unfiltered: true}, (err) => { + if (err) { + replyCb(err); + } else { + console.log('removeExisting removed', id); + okCallback(); + } + }); + } + } else { + okCallback(); + } + }); + }; + + + + +/*----------------------------------------------------------------------------*/ diff --git a/backend/scripts/dnaSequenceSearch.bash b/backend/scripts/dnaSequenceSearch.bash new file mode 100755 index 000000000..c79d0216c --- /dev/null +++ b/backend/scripts/dnaSequenceSearch.bash @@ -0,0 +1,132 @@ +#!/bin/bash + +serverDir=$PWD +case $PWD in + /) + resourcesDir=/app/scripts + toolsDev=$resourcesDir + ;; + *backend) + resourcesDir=../resources + ;; + *) + resourcesDir=resources + ;; +esac +# Default value of toolsDev, if not set above. +unused_var=${toolsDev=$resourcesDir/tools/dev} +sp=$toolsDev/snps2Dataset.pl + +logFile=dnaSequenceSearch.log +(pwd; date; ) >> $logFile +echo $* >> $logFile + +[ -d tmp ] || mkdir tmp + +set -x +fileName=$1 +useFile=$2 +parent=$3 +searchType=$4 +resultRows=$5 +# addDataset is 'true' or 'false' +addDataset=$6 +datasetName=$7 +echo fileName="$fileName", useFile=$useFile, parent="$parent", searchType=$searchType, resultRows=$resultRows, addDataset=$addDataset, datasetName=$datasetName >> $logFile + + +#------------------------------------------------------------------------------- + +if ls -l /bin/ls | fgrep -q /bin/busybox +then + function ll() { ls -l "$@"; } +else + function ll() { ls -gG "$@"; } +fi + +#------------------------------------------------------------------------------- + +function dev_blastResult() { +# Convert spaces to \t +# unexpand does not alter single spaces, so use sed to map those. +unexpand -t 8 <<\EOF | sed "s/ /\t/" +BobWhite_c10015_641 chr2A 100.000 50 0 0 1 50 154414057 154414008 2.36e-17 93.5 50 780798557 +BobWhite_c10015_641 chr2B 98.000 50 1 0 1 50 207600007 207600056 1.10e-15 87.9 50 801256715 +EOF +} + +#------------------------------------------------------------------------------- + +columnHeaders=$(echo "query ID, subject ID, % identity, length of HSP (hit), n mismatches, n gaps, query start, query end, subject start, subject end, e-value, score, query length, subject length" | sed "s/, /\t/g") + +function convertSearchResults2Json() +{ + cd $serverDir + tsv=tmp/"$datasetName".tsv + out=tmp/"$datasetName".json + + parentName="$parent" + platform=$searchType # maybe + + datasetNameFull="$parentName.$datasetName" + nameArgs=(-d "$datasetNameFull" -p "$parentName" -n"$parentName:$platform") + + + export columnsKeyString='name chr pcIdentity lengthOfHspHit numMismatches numGaps queryStart queryEnd pos end' + # /dev/fd/2 + (echo "$columnHeaders"; \ + tee "$tsv" | \ + sort -t$'\t' -k 2 ) | \ + $sp -H -F$'\t' "${nameArgs[@]}" \ + > "$out" + ll "$out" >> $logFile; + # upload() will read $out + # caller knows the filename and datasetName, so probably won't need + # this (as is done in uploadSpreadsheet.bash) : + # echo "tmp/$out;$datasetNameFull" + cat "$tsv" +} + +#------------------------------------------------------------------------------- + +datasetId=$parent + +#echo ">BobWhite_c10015_641 +# AGCTGGGTGTCGTTGATCTTCAGGTCCTTCTGGATGTACAGCGACGCTCC" | + +fileName=/home/ec2-user/pretzel/"$fileName" + +datasetIdDir=/mnt/data_blast/blast/datasetId + +# Enable this to use dev_blastResult() for dev / loopback test, when blast is not installed. +if false +then + dev_blastResult | \ + ( [ "$addDataset" = true ] && convertSearchResults2Json || cat) | \ + ( [ -n "$resultRows" ] && head -n $resultRows || cat) + status=$? +else + # fastafile.fa + + # (! does not preserve $?, so if that is required, if cd ... ; then : ; else status=$?; echo 1>&3 "..."; exit $status; fi; , and equivalent for each if. ) + status=1 + if ! cd "$datasetIdDir" + then + echo 1>&3 'Error:' "Blast Database is not configured" + elif ! dbName="$datasetId".dir/$(cat "$datasetId".dbName) + then + echo 1>&3 'Error:' "Blast datasetId is not configured", "$datasetId" + elif ! time blastn -query "$fileName" -db "$dbName" -outfmt '6 std qlen slen' | \ + ( [ "$addDataset" = true ] && convertSearchResults2Json || cat) | \ + ( [ -n "$resultRows" ] && head -n $resultRows || cat) + then + echo 1>&3 'Error:' "Unable to run Blast" + else + status=$? # 0 + fi + +fi + +# exit $status + +#------------------------------------------------------------------------------- diff --git a/backend/scripts/uploadSpreadsheet.bash b/backend/scripts/uploadSpreadsheet.bash new file mode 100755 index 000000000..9ba40cfdf --- /dev/null +++ b/backend/scripts/uploadSpreadsheet.bash @@ -0,0 +1,478 @@ +#!/bin/bash + +case $PWD in + /) + resourcesDir=/app/scripts + toolsDev=$resourcesDir + ;; + *backend) + resourcesDir=../resources + ;; + *) + resourcesDir=resources + ;; +esac +# Default value of toolsDev, if not set above. +unused_var=${toolsDev=$resourcesDir/tools/dev} +sp=$toolsDev/snps2Dataset.pl +# functions_convert.bash is related, but not a current dependency +# source $toolsDev/functions_convert.bash + +echo $* >> uploadSpreadsheet.log + +[ -d tmp ] || mkdir tmp + +set -x +fileName=$1 +useFile=$2 + +#------------------------------------------------------------------------------- + +if ls -l /bin/ls | fgrep -q /bin/busybox +then + function ll() { ls -l "$@"; } +else + function ll() { ls -gG "$@"; } +fi + +#------------------------------------------------------------------------------- + +function filterOutComments +{ + sed '/^#/d;/^"#/d' + # or egrep -v '^#|^"#' | +} + +# Sanitize input by removing punctuation other than comma, _, ., \n, space. +# Commonly _ and . are present in parentName. +# Space appears in commonName. +function deletePunctuation() +{ + tr -d -c '[,\n_. ][:alnum:]' +} +# Split the Metadata table into 1 file per dataset, with the left +# column (Field names) and the dataset column. +# Name the file out/"$fileName"/$datasetName.Metadata.csv +function splitMetadata() +{ + fileDir=out/"$fileName" + # Remove any tmp files from previous upload. + [ -d "$fileDir" ] && rm -r "$fileDir" + mkdir "$fileDir" + meta4dataset="$fileDir"/"$datasetName".Metadata.csv + + # Filter out comments. + < "$fileName".Metadata.csv sed '/^#/d;/^"#/d' > "$fileDir"/Metadata.csv + # Select the first line. Trim off Field,. Trim spaces around , and |. Convert , to " ", prepend and append ". + # Result is the headings of the dataset columns, e.g. Alignment|EST_SNP, Map|Red x Blue + eval datasetNames=( $(< "$fileDir"/Metadata.csv head -1 | quotedHeadings) ) + # + for di in ${!datasetNames[*]}; + do + diN="${datasetNames[$di]}" + echo $di $diN >> uploadSpreadsheet.log; + # Skip columns with empty dataset name. + [ "$diN" = 'empty_datasetName' ] && continue; + datasetMeta="$fileDir"/"$diN".Metadata.csv + < "$fileDir"/Metadata.csv sed '/^#/d;/^"#/d' | cut -d, -f 1,$(($di+2)) | tail -n +2 > "$datasetMeta" ; + done +} + +# Given the headings line (first non-comment line), convert to quoted column header strings. +# These are the dataset names, with the prefix 'Map|', 'Alignment|' etc. +# i.e. +# . discard col1 (Field,) +# . trim spaces around , and | +# . ensure " at ^ and $ +# . ensure " before and after comma +# . convert comma to space +# If the dataset name field is empty, flag it with 'empty_datasetName' +function quotedHeadings() +{ + sed 's/^Field,//; +s/ *, */,/g;s/ *| */|/g; +s/,,/,empty_datasetName,/g; +s/,,/,empty_datasetName,/g; +s/,$/,empty_datasetName/; +s/^\([^"]\)/"\1/;s/\([^"]\)$/\1"/; +s/,\([^"]\)/,"\1/g;s/\([^"]\),/\1",/g; +s/,/ /g;' + } + +# get namespace and commonName from metadata : +function readMetadata() +{ + worksheetName=$(echo "$i" | fileName2worksheetName) + datasetMeta="$fileDir"/"$worksheetName".Metadata.csv + + eval $( < "$datasetMeta" egrep '^(commonName|parentName|platform|shortName),' | deletePunctuation \ + | awk -F, '{ printf("%s=\"%s\";\n", $1, $2); }' ) + echo namespace=$namespace, commonName=$commonName >> uploadSpreadsheet.log +} + +# echo PATH=$PATH +mv "$fileName" tmp/. +cd tmp +# out/ was for .csv, maybe not needed +[ -d out ] || mkdir out out_json +#[ -d chrSnps ] || mkdir chrSnps + +# Extract datasetName from filename of the worksheet csv +function fileName2worksheetName() { + sed 's/\.csv$//;s/.*\.\([A-Za-z ]*\)|/\1|/;s/^ *//;s/ *$//;s/ *| */|/g;'; +} + +# Extract datasetName from filename of the worksheet csv +function fileName2DatasetName() { + # Trim off trailing .csv and fileName and worksheet label up to | + # Trim off outside spaces. + # input : $worksheetFileName, output: $datasetName + sed 's/\.csv$//;s/.*|//;s/^ *//;s/ *$//;'; +} + +set +x +# Long names for column headers +declare -A columnFullName +columnFullName['name']=Name +columnFullName['chr']=Chromosome +columnFullName['pos']=Position +columnFullName['end']=Position_End +set -x + +# Handle some variation in the recognised column header names. +# Prepate columnsKeyString, which is used in snps2Dataset.pl +# @param worksheetFileName name of .csv output for 1 worksheet +columnsKeyStringPrepare() +{ + worksheetFileName=$1 + head -1 "$worksheetFileName" >> uploadSpreadsheet.log + # There may not be a comma after Position and End. + export columnsKeyString=$(head -1 "$worksheetFileName" | sed "s/Marker,/name,/i;s/Name,/name,/g;s/Chromosome,/chr,/; +s/,Qs,/,pos,/;s/,Qe/,end/; +s/,Start,/,pos,/i;s/,End/,end/i; +s/,Position/,pos/i; +s/,/ /g; +") + echo columnsKeyString="$columnsKeyString" >> uploadSpreadsheet.log + + # Check that the required columns are present + errorMessages= + for columnName in name chr pos + do + echo "$columnsKeyString" | fgrep -q "$columnName" || errorMessages+="${columnFullName[$columnName]} column is required. " + done + if [ -n "$errorMessages" ] + then + # Output to Error channel back to server. (maybe also to stderr?) + echo 1>&3 "Error: '$worksheetFileName' : $errorMessages;$datasetName" + fi + # return true (0) if there are no errors + [ -z "$errorMessages" ] +} + +# @param env $i worksheetFileName +function linkageMap() +{ + datasetName=$(echo "$i" | fileName2DatasetName); + echo "fileName=$fileName, datasetName=$datasetName" >> uploadSpreadsheet.log; + columnsKeyStringPrepare "$i" || return $? + # ../ because of cd tmp + out=out_json/"$i".json + <"$i" chrOmit | ../$sp "${optionalArgs[@]}" -d "$datasetName" -p '' -n "$namespace" -c "$commonName" -g > "$out" ; + ll "$out" >> uploadSpreadsheet.log; + # upload() will read these files + echo "tmp/$out;$datasetName" +} + +# @param env $i worksheetFileName +function snpList() +{ + datasetName=$(echo "$i" | fileName2DatasetName); + echo "fileName=$fileName, datasetName=$datasetName" >> uploadSpreadsheet.log; + # or continue/break. whether to check later worksheets in the file for errors ? + columnsKeyStringPrepare "$i" || return $? + + out=out_json/"$i".json + # 'tail -n +2' to remove header before sort. (note also headerLine()). + + # from metadata : parentName platform shortName commonName + + # $datasetNameFull is $datasetName, with $parentName. prefixed, if it is + # defined; this is the actual dataset name created, and the value + # used by removeExisting(). + + if [ -n "$parentName" ] + then + datasetNameFull="$parentName.$datasetName" + nameArgs=(-d "$datasetNameFull" -p $parentName -n"$parentName:$platform") + else + datasetNameFull="$datasetName" + nameArgs=(-d "$datasetName" ) + if [ -n "$platform" ] + then + nameArgs+=(-n "$platform") + fi + fi + if [ -n "$shortName" ] + then + nameArgs+=(-s "$shortName") + fi + if [ -n "$commonName" ] + then + nameArgs+=(-c "$commonName") + fi + + columnHeaderFile=out/columnHeaders.csv + <"$i" filterOutComments | head -1 > $columnHeaderFile + (cat $columnHeaderFile; \ + <"$i" filterOutComments | tail -n +2 | chrOmit | sort -t, -k 2 ) | \ + ../$sp "${nameArgs[@]}" "${optionalArgs[@]}" \ + > "$out" + ll "$out" >> uploadSpreadsheet.log; + # upload() will read these files + echo "tmp/$out;$datasetNameFull" +} + +function qtlList() +{ + datasetName=$(echo "$i" | fileName2DatasetName); + echo "fileName=$fileName, datasetName=$datasetName" >> uploadSpreadsheet.log; + columnsKeyStringPrepare "$i" || return $? + cd .. + # out=out_json/"$i".json + outDir=tmp/"$fileDir"/out_json + # or && rm -r "$outDir" + [ -d "$outDir" ] || mkdir "$outDir" + # If the dataset worksheet has a column in Metadata, append to that file, otherwise /metaType.csv + if [ -f "$datasetMeta" ] + then + metaTypeFile="$datasetMeta" + # $datasetMeta is already listed in $prefixedArgs + else + metaTypeFile=tmp/"$fileDir"/metaType.csv + localArgs=(-M "$metaTypeFile") + fi + echo "type,QTL" > "$metaTypeFile" + prefixTmpToArgs + > uploadSpreadsheet.log; + # upload() will read these files + # echo "tmp/$out;$datasetName" + cd "$outDir"; + for datasetFile in *.json + do + datasetName=$(echo "$datasetFile" | sed 's/.json$//') + echo "$outDir/$datasetFile;$datasetName" + done +} + +# Prefix tmp/ to the paths in $optionalArgs +# Put the result in $prefixedArgs. +function prefixTmpToArgs() +{ + prefixedArgs=(); for arg in "${optionalArgs[@]}"; do case "$arg" in out/*|*csv) prefixedArgs+=("tmp/$arg");; *) prefixedArgs+=("$arg");; esac; done; + echo "${prefixedArgs[@]}" >> uploadSpreadsheet.log; +} + +# The 'Chromosome Renaming' worksheet was handled here by chrRenamePrepare() and chrRename() up until 7b0bbf20, +# by creating a .sed script from the 'Chromosome Renaming' worksheet and applying it to rename the chromosome column. +# Now this is done by passing $chrRenameCSV via optionalArgs to $sp. +# +# related in functions_convert.bash : sed -f chrSnps/"$datasetName".chrRename.sed | +# and : sed -f $genBankRename | + + +# If the spreadsheet contains a 'Chromosomes to Omit' worksheet, +# then create a .sed script to filter out SNPs with those values in chromosome column. +function chrOmitPrepare() +{ + chrOmitCSV=$(echo "$fileName".*[Cc]hromosomes' to '[Oo]mit*csv) + if [ -f "$chrOmitCSV" ] + then + chrOmitSed=out/"$fileName".chrOmit.sed + # Can change this to generate awk which can target only the chromosome column. + # + # Skip empty lines, because they would generate /,/d which filters out everything. + # Match the leading comma and not the following comma because the + # data case in hand has a fixed part followed by an id; perhaps + # change to regexp. + < "$chrOmitCSV" awk -F, '/./ { printf("/,%s/d\n", $1); }' > $chrOmitSed + fi +} + +# If the spreadsheet contains a 'Chromosomes to Omit' worksheet, (i.e. $chrOmitSed is defined) +# then map the chromosome column as indicated. +function chrOmit() +{ + if [ -n "$chrOmitSed" ] + then + sed -f "$chrOmitSed" + else + cat + fi +} + + + +# Spaces in $fileName are not handled when running ssconvert via docker, so +# rename the file into a directory +function renameIfSpaces() +{ + # "" around $fileName not required here - bash does not split the var value at white-space + case $fileName in + *' '*) + # [ -d renamed ] || mkdir renamed + fileNameTo=$(echo "$fileName" | sed "s/ /_/g") + renamed="$fileNameTo" + # renamed/; mkdir $renamed + suffix=$(echo $fileName | sed -n "s/.*\.//p") + newName="$fileNameTo" # $renamed/1."$suffix" + mv "$fileName" $newName + fileName="$newName" + ;; + *) + ;; + esac +} + +# These warnings output by ssconvert do not seem to be significant : +# Undefined number format id '43' +# Undefined number format id '41' +# Undefined number format id '44' +# Undefined number format id '42' +# Unexpected element 'workbookProtection' in state : +# workbook +# +# from unzip of the .xlsx : ./xl/styles.xml : +# ... +# +# ... +# +# +# ... ditto for 41, 44, 42 +# These 4 numFmtId are not defined; see the list here : https://stackoverflow.com/a/4655716 +# ... +# 40 = '#,##0.00;[Red](#,##0.00)'; +# +# 44 = '_("$"* #,##0.00_);_("$"* \(#,##0.00\);_("$"* "-"??_);_(@_)'; +# 45 = 'mm:ss'; +# ... +# from https://stackoverflow.com/questions/4655565/reading-dates-from-openxml-excel-files +# which also links : +# https://docs.microsoft.com/en-us/previous-versions/office/developer/office-2010/ee857658(v=office.14) +# which also does not define those 4 Numfmtid-s + +function spreadsheetConvert() +{ + # installation of ssconvert (gnumeric) on centos had dependency problems, so using docker + if [ -f /etc/system-release-cpe ] + then + renameIfSpaces + # if renameIfSpaces has changed $fileName, then "$2" and "$3" need to change also + # Perhaps switch from centos and install ssconvert directly; but if renameIfSpaces + # is needed, can refactor this to pass in $fileName perhaps. + docker run \ + -u $(id -u):$(id -g) \ + -v /home/ec2-user/pretzel/tmp:/home/user \ + -e PARAMS="$1" \ + -e FILETOREAD="$fileName" \ + -e FILETOWRITE="$fileName.%s.csv" \ + nalscher/ssconvert:latest + else + ssconvert "$1" "$2" "$3" + fi +} + +case $fileName in + *.xlsx|*.xls|*.ods) + ll -d "$fileName" >> uploadSpreadsheet.log + echo ssconvert >> uploadSpreadsheet.log + # Remove outputs from previous upload of $fileName + rm -f "$fileName".*.csv + # for streaming input : if [ "$useFile" != true ] ; then cat >"$fileName"; fi + spreadsheetConvert -S "$fileName" "$fileName.%s.csv" + status=$? + echo ssconvert status $status >> uploadSpreadsheet.log + if [ $status -eq 0 ] + then + chrOmitPrepare + splitMetadata + warningsFile="$fileDir"/warnings + + # i is worksheetFileName + for i in "$fileName".*'|'*csv + do + echo "i=$i" >> uploadSpreadsheet.log; + + readMetadata + + optionalArgs=() + if [ -f "$datasetMeta" ] + then + optionalArgs=(-M "$datasetMeta") + fi + + chrRenameCSV=$(echo "$fileName".*'Chromosome Renaming'*csv) + if [ -f "$chrRenameCSV" ] + then + optionalArgs+=(-R "$chrRenameCSV") + fi + + # (until f556a24e, the fileName prefix guided which of these + # functions was called, but now the fileName is arbitrary and + # only the worksheet name indicates the type of dataset) + case $i in + "$fileName".*'| Template'*csv) + msg="$i : worksheet name is Template" + echo "$msg" >> uploadSpreadsheet.log + echo "$msg" >> "$warningsFile" + ;; + + "$fileName".*Map'|'*csv) + linkageMap + status=$? + ;; + "$fileName".*Alignment'|'*csv) + snpList + status=$? + ;; + "$fileName".*QTL'|'*csv) + qtlList + status=$? + ;; + # Later : Genome, etc + *) + echo "$i : expected Map|, Alignment|, QTL| *" >> uploadSpreadsheet.log + ;; + + esac + done + + if [ -z "$datasetName" ] + then + if [ -f "$warningsFile" ] + then + warningsText=" Warnings: "$(cat "$warningsFile") + else + warningsText= + fi + echo "Error: '$fileName' : no worksheets defined datasets. $warningsText;" + ll "$fileName".*csv >> uploadSpreadsheet.log + else + if [ -f "$warningsFile" ] + then + cat "$warningsFile" 1>&4 + fi + fi + + fi + ;; + *) + echo $* .xlsx, .xls, or .ods expected >>uploadSpreadsheet.log + status=$? + ;; +esac + + +exit $status + diff --git a/backend/server/boot/access.js b/backend/server/boot/access.js index 4aafacf4e..d90205c41 100644 --- a/backend/server/boot/access.js +++ b/backend/server/boot/access.js @@ -1,3 +1,6 @@ +/* global process */ +/* global module */ + module.exports = function(app) { var Role = app.models.Role; @@ -73,14 +76,20 @@ module.exports = function(app) { } if (context.property == 'find' || context.property == 'create' || + // Dataset context.property == 'upload' || context.property == 'tableUpload' || context.property == 'createComplete' || + // Feature context.property == 'search' || context.property == 'depthSearch' || + context.property == 'dnaSequenceSearch' || + // Alias context.property == 'bulkCreate' || + // Block context.property == 'paths' || context.property == 'pathsProgressive' || + context.property == 'blockFeaturesAdd' || context.property == 'blockFeaturesCount' || context.property == 'blockFeaturesCounts' || context.property == 'blockFeatureLimits' || @@ -90,7 +99,9 @@ module.exports = function(app) { context.property == 'pathsAliasesProgressive' || context.property == 'pathsAliasesViaStream' || context.property == 'namespacesAliases' || + // Configuration context.property === 'runtimeConfig' || + // Dataset context.property === 'cacheClear' || context.property === 'cacheClearRequests' ) { diff --git a/backend/server/middleware/route_time.js b/backend/server/middleware/route_time.js index f35dbccfa..cea021566 100644 --- a/backend/server/middleware/route_time.js +++ b/backend/server/middleware/route_time.js @@ -1,12 +1,12 @@ module.exports = function() { return function tracker(req, res, next) { - // console.log('Request tracking middleware triggered on %s', req.url); + // console.log('Request tracking middleware triggered on %s', req.url, new Date().toISOString()); var start = process.hrtime(); res.once('finish', function() { var diff = process.hrtime(start); // var ms = diff[0] * 1e3; var ms = diff[0] * 1e3 + diff[1] * 1e-6; - console.log('The request processing time is %d ms.', ms.toFixed(3), 'for', req.path); + console.log('The request processing time is', ms.toFixed(3), 'ms.', 'for', req.path); }); next(); }; diff --git a/frontend/app/components/axis-2d.js b/frontend/app/components/axis-2d.js index 54771c161..220c68ea0 100644 --- a/frontend/app/components/axis-2d.js +++ b/frontend/app/components/axis-2d.js @@ -347,6 +347,8 @@ export default Component.extend(Evented, AxisEvents, { dLog('axis-2d didInsertElement', this.get('axisID')); this.getUse(); + + later(() => this.dragResizeListen(), 1000); }, getUse(backoffTime) { let oa = this.get('data'), @@ -360,7 +362,7 @@ export default Component.extend(Evented, AxisEvents, { } else { this.set('axisUse', axisUse); this.set('use', use); - dLog("axis-2d didInsertElement", this, this.get('axisID'), axisUse.node(), use.node()); + dLog("axis-2d didInsertElement getUse", this, this.get('axisID'), axisUse.node(), use.node()); this.set('subComponents', []); } }, @@ -603,8 +605,6 @@ export default Component.extend(Evented, AxisEvents, { let prevSize, currentSize; let stacks = this.get('data').stacks; dLog("components/axis-2d didRender()"); - - later(() => this.dragResizeListen(), 1000); }, /** Called when resizer element for split axis resize is dragged. diff --git a/frontend/app/components/axis-charts.js b/frontend/app/components/axis-charts.js index ff1c71283..6c0d31fcc 100644 --- a/frontend/app/components/axis-charts.js +++ b/frontend/app/components/axis-charts.js @@ -274,8 +274,11 @@ export default InAxis.extend({ dLog('resizeEffectHere in axis-charts', this.get('axisID')); }), drawContentEffect : computed( - /** .zoomedDomain is (via InAxis) axis1d.zoomedDomain; for this dependency use the -Debounced */ - 'axis1d.currentPosition.yDomainDebounced', + /** .zoomedDomain is (via InAxis) axis1d.zoomedDomain; for this dependency use the -Debounced + * and -Throttled so that it maintains a steady update and catches the last update. + * equiv : this.axis1d.{zoomedDomainThrottled,zoomedDomainDebounced} + */ + 'axis1d.currentPosition.{yDomainDebounced,yDomainThrottled}', 'blockViews.@each.isZoomedOut', /** .@each.x.y is no longer supported; if these dependencies are needed, can * define block.featuresCounts{,Results}Length @@ -384,7 +387,9 @@ export default InAxis.extend({ charts = this.get('chartsVariableWidth'), chartWidths = charts.mapBy('allocatedWidth') .filter((aw) => aw), - widthSum = chartWidths.reduce((sum, w) => sum += w, 0); + widthSum = chartWidths.reduce((sum, w) => sum += w, 0), + chartWidth = this.childWidths.get(className); + if (chartWidth && chartWidth[1] !== widthSum) { // later allocate each chart, for separate offsets : (this.get('className') + '_' + chart.name) next(() => { let childWidths = this.get('childWidths'), @@ -394,6 +399,7 @@ export default InAxis.extend({ childWidths.set(className, [widthSum, widthSum]); } }); + } }, /** @@ -404,6 +410,7 @@ export default InAxis.extend({ let axisCharts = this.get('axisCharts'), chart = this.get('charts')[chartName]; + if (chart) /*if (! chart.ranges)*/ { let blocksData = this.get('blocksData'), diff --git a/frontend/app/components/axis-tracks.js b/frontend/app/components/axis-tracks.js index aa726e1e3..14ad0f472 100644 --- a/frontend/app/components/axis-tracks.js +++ b/frontend/app/components/axis-tracks.js @@ -141,8 +141,8 @@ function configureSubTrackHover(interval) function configureClick(selected, featureData2Feature) { return function (selection) { - selection.on('click', function (d, i, g) { clickTrack.apply(this, [selected, featureData2Feature, d])}); - } + selection.on('click', function (d, i, g) { clickTrack.apply(this, [selected, featureData2Feature, d]);}); + }; } function clickTrack(selected, featureData2Feature, featureData) { let feature = featureData2Feature.get(featureData); @@ -696,6 +696,23 @@ export default InAxis.extend({ return result; }, + remove () { + /** Based on layoutAndDrawTracks() : gp .remove() + * related : axis-1d.js : selectGroup() + */ + let + axisID = this.get('axisID'), + aS = selectAxis(axisID), + gp = aS.select("g.axis-use") + .selectAll("g.tracks"); + if (! gp.empty()) { + console.log('removing', gp.nodes(), gp.node()); + gp + .remove(); + } + }, + + /** Layout the feature rectangles and render them. * @param resized undefined or {width, height}, which are true if the caller is a resize event. * @param tracks result of tracksTree @@ -825,11 +842,20 @@ export default InAxis.extend({ return ((d.layer || 1) - 1) * trackWidth * 2; }; }; + /** @return the position of the start of the feature interval. + * @desc This is used in combination with heightDir(). + */ + function yPosnDir(d) { + return y(d[0]); + }; + /** @return the position of the start or end of the feature interval, whichever is smaller. + * This is used in combination with height(), which returns a positive value. + */ function yPosn(d) { /*console.log("yPosn", d);*/ if (y(d[0]) > y(d[1])) return y(d[1]); else - return y(d[0]) + return y(d[0]); }; /** return the end of the y scale range which d is closest to. * Used when transitioning in and out. @@ -844,11 +870,25 @@ export default InAxis.extend({ end = range[1-closerToStart]; return end; } - function height(d) { + /** @return height of feature in pixels at current scale, + * including the direction of the feature interval, i.e. start - end, + * i.e. .value[1] - .value[0], + */ + function heightDir(d) { + /** createIntervalTree() requires positive intervals, so the + * .value [start,end] are reversed if negative (start > end) by + * tracksTree() and SubElement.getInterval(). + * height() is used to determine the direction of triangles so + * use feature.value[] here + */ + let feature = thisAt.featureData2Feature.get(d), + value = feature.get('value'); + d = value; + let nonZeroInterval = Ember.isArray(d) && (d.length > 1) && (d[1] !== d[0]); /** if axis.zoomed then 0-height intervals are included, not filtered out. * In that case, need to give a height > 0. */ - let height = (d[1] == d[0]) ? 0.01 : y(d[1]) - y(d[0]); + let height = ! nonZeroInterval ? 0.01 : y(d[1]) - y(d[0]); /* There was an issue causing NaN here, likely caused by 1-element array * .value, which is now handled. Here that was causing d[1] undefined, * and hence y(d[1]) NaN. @@ -857,7 +897,16 @@ export default InAxis.extend({ { console.log('height NaN', d, 'y:', y.domain(), y.range()); } - // When axis is flipped, height will be negative, so make it positive + return height; + } + /** Same as heightDir() but return the absolute value, used for + * drawing which requires a positive height. + */ + function height(d) { + let height = heightDir(d); + /* When axis is flipped or feature direction is negative, height + * will be negative, so make it positive + */ if (height < 0) height = -height; return height; @@ -983,7 +1032,7 @@ export default InAxis.extend({ // Make description unique when multiple features with same name. return featureData.description+"_"+featureData[0]; } - /** Add the within */ + /** Add the s and/or s, or sub-elements, within */ let /** block select - datum is blockId. */ bs = gAxis.selectAll("g.axis-use > g.tracks > g"), @@ -1009,34 +1058,47 @@ export default InAxis.extend({ .data(trackBlocksData, trackKeyFn), re = rs.enter(), rx = rs.exit(); dLog(rs.size(), re.size(), 'rx', rx.size()); - rx - .transition().duration(featureTrackTransitionTime) - .attr('x', 0) - .attr('y', yEnd) - .on('end', () => { - rx.remove(); - }); let blockC = thisAt.lookupAxisTracksBlock(blockId), trackWidth = blockC.get('trackWidth'); appendRect.apply(this, [re, rs, trackWidth, false]); } - /** - rename re and rs to ge and gs + /** Within a single block, render the features or sub-features of + * the selection as a or + * + * @param re .entry() selection + * @param rs selection + * @param width + * @param subElements true if features are sub-features + * - rename re and rs to ge and gs * es could be called rs * @param subElements true if (gene) sub-elements (intro/exon) are displayed for this block. */ function appendRect(re, rs, width, subElements) { + const + // this.parentElement.__data__ is also the blockId + blockId = this.__data__, + block = thisAt.lookupBlock(blockId), + isPhased = block && (block.hasTag('phased') || block.get('datasetId._meta.phased')), /** true to enable use of 5-point (rectangle+triangle) as an * alternate representation, with the triangle vertex indicating * direction. */ - const useTriangle = thisAt.get('urlOptions.trackTriangles'); + useTriangle = false && isPhased; /** true means will not be used - only (rectangle+triangle). */ const alwaysTri = true; let + /** true for the 3-point . This is not alternated with , i.e. useBoth is false */ + useTriangle3 = isPhased && ! subElements, + /** true when using */ + usePath = useTriangle3 || useTriangle, + /** true when using either or depending on zoom + * (triangle point is only visible when zoomed in). */ + useBoth = useTriangle && ! alwaysTri, + tagName = ['rect', 'path'][+usePath], ra = re - .append((d) => createElementSvg(useTriangle && (alwaysTri || showTriangleP(y, d)) ? 'path' : 'rect')); + .append(! useBoth ? tagName : (d) => createElementSvg(useTriangle && (alwaysTri || showTriangleP(y, d)) ? 'path' : 'rect')); ra .attr('class', 'track') .each(subElements ? configureSubTrackHover : configureTrackHover); @@ -1070,6 +1132,8 @@ export default InAxis.extend({ elementSelector = isSubelement ? '.element' : ':not(.element)', /** match either {rect,path}.track */ es = subElements ? + /** .track here will match rect.track, path.track; not just tagName + '.track' */ + // if tagName is required here, would need to repeat the whole selector for rect,path - better to make class more specific rs.selectAll("g" + gSelector + " > .track" + elementSelector) : rs, /** ra._parents is the g[clip-path], whereas es._parents are the g.track.element * es.merge(ra) may work, but ra.merge(es) has just 1 elt. @@ -1081,15 +1145,24 @@ export default InAxis.extend({ .attr('y', (d,i,g) => useTriangle && (alwaysTri || showTriangleP(y, d)) ? undefined : yEnd.apply(this, [d, i, g])); if (! useTriangle) { ra - .call(rectUpdate); + .call(positionUpdate); + let rmt = rm .transition().duration(featureTrackTransitionTime) - .call(rectUpdate); + .call(fadeOutIfZoomedOut) + .call(positionUpdate); + if (useTriangle3) { + ra + .each(triangleDimensions); + rmt + .each(triangleDimensions); + } } else if (alwaysTri) { let xPosnFn = xPosnS(subElements); rm - .attr('d', (d,i,g) => rectTrianglePath(y, d, width, xPosnFn.apply(this, [d, i, g]))) + // maybe transition + .attr('d', (d,i,g) => rectTrianglePath(y, d, width, xPosnFn.apply(this, [d, i, g]))); } else { rm @@ -1100,7 +1173,7 @@ export default InAxis.extend({ g[i] = swapTag('rect', 'path', g[i], attributesForReplace); let x = xPosnS(subElements).apply(this, [d, i, g]); d3.select(g[i]) - .attr('d', (d,i,g) => rectTrianglePath(y, d, width, x)) + .attr('d', (d,i,g) => rectTrianglePath(y, d, width, x)); }.apply(this, [d, i, g]) : function (d, i, g) { g[i] = swapTag('path', 'rect', g[i], attributesForReplace); @@ -1109,20 +1182,131 @@ export default InAxis.extend({ }.apply(this, [d, i, g]) ); } + + /** Transition the exiting s and to the end of the + * y scale range which d is closest to, using yEnd. + * Then remove them after that transition. + */ + let rx = rs.exit(); + if (usePath) { + const + xPosnD = 0, + yPosn = yEnd; + rx + .transition().duration(featureTrackTransitionTime) + .attr('transform', (d,i,g) => "translate(" + xPosnD + ", " + yPosn.apply(this, [d, i, g]) + ")") + .on('end', () => { + rx.remove(); + }); + } else { + rx + .transition().duration(featureTrackTransitionTime) + .attr('x', 0) + .attr('y', yEnd) + .on('end', () => { + rx.remove(); + }); + } + + + function positionUpdate(selection) { + if (usePath) { + selection + .attr('transform', featureTransform); + } else { + rectUpdate(selection); + } + } function rectUpdate(selection) { selection .attr('x', xPosnS(subElements)) .attr('y', yPosn) .attr('height' , height); } + /** zoom:Reset will generally replace feature tracks with + * features counts charts (axis-charts.js), depending on + * featuresCountsThreshold. + * To make this less abrupt, transition the tracks out by + * reducing opacity. + */ + function fadeOutIfZoomedOut(selection) { + /** If block.isZoomedRightOut and selection is a transition, + * and opacity is not set, set it to 1 immediately, and + * transition to 0. + */ + let t0 = selection.node(); + if (t0) { + /** may not be blockId, e.g subElements, so default to false. */ + let blockId = t0.parentElement.__data__, + block = blockId && oa.stacks.blocks[blockId], + out = block && block.block.isZoomedRightOut(); + /** if selection is a transition */ + if (out && selection.selection) { + /* if called again before transition is complete, + * don't restart the attribute values. */ + if (! t0.hasAttribute('stroke-opacity')) { + selection.selection() + .attr('stroke-opacity', 1) + .attr('fill-opacity', 1); + } + selection + .attr('stroke-opacity', 0) + .attr('fill-opacity', 0); + } + } + } rm .attr('stroke', blockTrackColourI) .attr('fill', blockTrackColourI) ; + dLog('ra', ra.size(), ra.node(), 'rm', rm.size(), rm.node()); // result is not used yet. return ra; } + /** Position the feature representation () with a transform. + * This plays an analogous role to rectUpdate(), which does not + * apply to path because it sets x,y,height. + */ + function featureTransform(d, i, g) { + let + xPosnD = xPosnS(/*subElements*/false).apply(this, [d, i, g]), + yPosnD = yPosnDir.apply(this, [d, i, g]), + /** xPosnS() offsets by + d.layer*trackWidth */ + transform = "translate(" + xPosnD + ", " + yPosnD + ")"; + return transform; + } + /** triangle with tip facing toward the axis. + * Used to make features obvious when small (height) and sparse / useTriangle3. + * (could use a line segment with an arrow, see lineDimensions() : showArrow). + */ + function triangleDimensions(d, i, g) { + let + width = trackWidth / 2, + tWidth = width/2, + heightD = heightDir.apply(this, [d, i, g]), + /** either a horizontal arrow pointing left, or a vertical arrow pointing in the direction of the feature interval. */ + vertical = true, + /** based on lineDimensions() : sideWedge -> triangle, tipY -> heightD, wedgeX -> tWidth, */ + triangle = vertical ? + [ + [width, 0], + [0, 0], + [tWidth, heightD] + ] : + [ + [tWidth, heightD], + [0, heightD / 2], + [tWidth, 0] + ], + points = triangle, + l = + d3.select(this) + .attr('d', d3.line()(points)) + ; + dLog('triangleDimensions', width, tWidth, heightD, points); + } + /** subElements */ function eachGroup(blockId, i, g) { let @@ -1449,7 +1633,7 @@ export default InAxis.extend({ return blockTrackColour; } /** note of how blockColour() would be used. */ - let blockTrackColour = blockColour('rect.track'); + let blockTrackColour = blockColour('rect.track, path.track'); if (false) // gp d3.selectAll('g.tracks') @@ -1557,12 +1741,12 @@ export default InAxis.extend({ } interval.description = feature.get('name'); interval.udescription = intervalUniqueName(interval.description, interval); - /* for datasets with tag 'SNP', feature value[2] is reference / alternate, + /* for datasets with tag 'SNP', feature .values.{ref,alt} is reference / alternate, * e.g. "A/G", "T/C" etc */ - let tags = feature.get('blockId.datasetId.tags'); - if (tags && tags.length && (tags.indexOf("SNP") !== -1) && - (typeof interval[2] === 'string')) { - interval.description += ('\n' + interval[2]); + let values = feature.get('blockId.isSNP') && feature.get('values'); + if (values && (values.ref || values.alt)) { + let refAlt = (values.ref || '') + '/' + (values.alt || ''); + interval.description += ('\n' + refAlt); } return interval; }); @@ -1586,6 +1770,13 @@ export default InAxis.extend({ return tracks; }), + lookupBlock(blockId) { + let + blockS = this.get('stackBlocks')[blockId], + block = blockS && blockS.block; + return block; + }, + /** The blocks within axis-tracks have collected some attributes, * so will likely be split out as a sub-component. * As an interim step, this function maps blockId to an object @@ -1770,6 +1961,11 @@ export default InAxis.extend({ yDomain = this.get('yDomain'); console.log('showTrackBlocks', this, tracks, axis1d, isViewed, /*yDomain*/ this.get('axis1d.currentPosition.yDomainThrottled'), 'axis1d.zoomed', zoomed, extended, featureLength); let featuresLength; + /* This works but doesn't give a transition from tracks to charts + if (! this.get('trackBlocksR.length') || axis1d.isZoomedRightOut()) { + this.remove(); + } else + */ if (isViewed) { let blockIds = d3.keys(tracks.intervalTree); diff --git a/frontend/app/components/draw-map.js b/frontend/app/components/draw-map.js index d3d71119b..13bbd7fd2 100644 --- a/frontend/app/components/draw-map.js +++ b/frontend/app/components/draw-map.js @@ -376,9 +376,21 @@ export default Component.extend(Evented, { let featuresAsArray = d3.keys(selectedFeatures) .map(function (key) { return selectedFeatures[key].map(function(feature) { - //feature contains feature name and position, separated by " ". - var info = feature.split(" "); - return {Chromosome:key,Feature:info[0],Position:info[1]}; + /** feature is now the Ember object models/feature + * Until 0eeda0a7, feature contained feature name and position, separated by " ". + */ + let selectedFeature = { + Chromosome : key, + Feature : feature.name, + Position : feature.location, /* i.e. .value[0]*/ + /** Chromosome, Feature and Position can be derived from + * feature, so after the various uses of this are + * changed to use .feature, the structure can be + * replaced by simply feature. + */ + feature + }; + return selectedFeature; }); }) .reduce(function(a, b) { @@ -1229,7 +1241,7 @@ export default Component.extend(Evented, { breakPoint(b.longName(), isParent, 'should be !=', isChild, b.axis, features); if (filterChildren && isParent) { - let add = b.axis.dataBlocks().filter(function (b) { return b.block.get('isViewed'); }); + let add = b.axis.dataBlocks(false, false).filter(function (b) { return b.block.get('isViewed'); }); if (add.length) console.log(b.longName(), 'add to orphaned :', Block_list_longName(add)); orphaned = orphaned.concat(add); @@ -2701,7 +2713,7 @@ export default Component.extend(Evented, { axisIds = axisTitleS.nodes().mapBy('__data__'), axes1 = axisIds.map((axisId) => oa.axes[axisId]); axes1.forEach( - (a) => a.axis1d && bind(a.axis1d, a.axis1d.showZoomResetButtonXPosn)()); + (a) => a && a.axis1d && bind(a.axis1d, a.axis1d.showZoomResetButtonXPosn)()); } updateAxisTitleSize(axisG.merge(axisS)); @@ -3067,7 +3079,9 @@ export default Component.extend(Evented, { } /** Setup hover info text over scaffold horizTick-s. - * @see based on similar configureAxisTitleMenu() + * Based on similar @see configureAxisTitleMenu() + * @desc These are being factored to utils/hover.js : + * @see configureHover, configureHorizTickHover */ function configureHorizTickHover(location) { @@ -3725,6 +3739,10 @@ export default Component.extend(Evented, { { let brushExtents = getBrushExtents(); + if (! brushExtents[i]) { + dLog('axisBrushedDomain no brush for', p, i, brushExtents); + return undefined; + } let brushedDomain = axisRange2Domain(p, brushExtents[i]); console.log('axisBrushedDomain', p, i, brushExtents, brushedDomain); return brushedDomain; @@ -3747,7 +3765,8 @@ export default Component.extend(Evented, { /** Convert the given brush extent (range) to a brushDomain. * @param p axisID * @param range a value or an interval in the axis range. This may be e.g. a brush extent - * @return domain the (reverse) mapping of range into the axis domain + * @return domain the (reverse) mapping of range into the axis domain. + * undefined if range is undefined. */ function axisRange2Domain(p, range) { @@ -3758,6 +3777,9 @@ export default Component.extend(Evented, { dLog('axisRange2Domain', p, range, 'scale has no domain', oa.y[p].domain()); return undefined; } + if (! range) { + return undefined; + } let axis = oa.axes[p], brushedDomain = range.length ? range.map(r2dFn) : r2dFn(range); @@ -3941,7 +3963,8 @@ export default Component.extend(Evented, { * features so don't brush them. */ /* can pass visible=true here - a slight optimisation; it depends on the * expression in dataBlocks() which distinguishes data blocks. */ - let childBlocks = axis.dataBlocks(); + let childBlocks = axis.dataBlocks(true, false) + .filter((blockS) => blockS.block.get('isBrushableFeatures')); let range = [0, axis.yRange()]; console.log(axis, 'childBlocks', childBlocks, range); /* @@ -3969,6 +3992,7 @@ export default Component.extend(Evented, { let blockFeatures = oa.z[block.axisName]; // or block.get('features') d3.keys(blockFeatures).forEach(function(f) { + let feature = blockFeatures[f]; let fLocation; if (! isOtherField[f] && ((fLocation = blockFeatures[f].location) !== undefined)) { @@ -3982,7 +4006,8 @@ export default Component.extend(Evented, { ) { //selectedFeatures[p].push(f); selectedFeaturesSet.add(f); - selectedFeatures[mapChrName].push(f + " " + fLocation); + // previously pushed : f + " " + fLocation + selectedFeatures[mapChrName].push(feature); /** Highlight the features in the brushed regions * o[p] : the axis location; now use 0 because the translation of parent g.axis-outer does x offset of stack. * fLocation : actual feature position in the axis, diff --git a/frontend/app/components/draw/axis-1d.js b/frontend/app/components/draw/axis-1d.js index bb95a17cc..69728813b 100644 --- a/frontend/app/components/draw/axis-1d.js +++ b/frontend/app/components/draw/axis-1d.js @@ -42,7 +42,8 @@ import { selectGroup } from '../../utils/draw/d3-svg'; import { breakPoint } from '../../utils/breakPoint'; import { configureHover } from '../../utils/hover'; import { getAttrOrCP } from '../../utils/ember-devel'; -import { intervalExtent } from '../../utils/interval-calcs'; +import { intervalExtent, intervalOverlap } from '../../utils/interval-calcs'; +import { inRange } from '../../utils/draw/zoomPanCalcs'; import { updateDomain } from '../../utils/stacksLayout'; @@ -83,24 +84,27 @@ const componentName = 'axis-1d'; const className = "horizTick"; /** filter : @return true if the given Block is configured to display ticks. - * i.e. ! block.block.get('dataset').get('showPaths') + * + * Previously : ! block.block.get('dataset').get('showPaths') to select + * the scaffolds, but that is no longer relevant since ticks are no + * longer used for scaffolds. So now return block ... .isData */ function blockWithTicks(block) { - let showPaths = block.block.get('showPaths'); + let isData = block.block.get('isData'); // was .showPaths // dLog('blockWithTicks', block.axisName, showPaths); - return ! showPaths; + return isData; } /** Return a filter to select features which are within the current zoomedDomain * of the given block. * @param block stacks Block */ -function inRangeBlock(axisApi, range0, block) { +function inRangeBlock(range0, block) { return function (feature) { - /** comment in @see keyFn() */ - let featureName = getAttrOrCP(feature, 'name'); - return axisApi.inRangeI(block.axisName, featureName, range0); + let + axis1d = block.axis.axis1d; + return axis1d.inRangeR(feature, range0); }; } @@ -134,14 +138,16 @@ FeatureTicks.prototype.featuresOfBlock = function (featuresOfBlockLookup) { range0 = this.axis.yRange2(); return (block) => { - let inRange = inRangeBlock(this.axisApi, range0, block); + let inRange = inRangeBlock(range0, block); let blockR = block.block, blockId = blockR.get('id'), featuresAll = featuresOfBlockLookup(blockR), features = ! featuresAll ? [] : featuresAll .filter(inRange); - dLog(blockId, features.length, 'showTickLocations featuresOfBlock'); + if (trace_stack > 1) { + dLog(blockId, features.length, 'showTickLocations featuresOfBlock'); + } return features; }; }; @@ -151,7 +157,7 @@ FeatureTicks.prototype.featureColour = function (feature) { }; function blockTickEltId(groupName) { - return function (block) { return className + '_' + groupName + '_' + block.axisName; } + return function (block) { return className + '_' + groupName + '_' + block.axisName; }; } @@ -194,7 +200,7 @@ FeatureTicks.prototype.showTickLocations = function (featuresOfBlockLookup, setu /** data blocks of the axis, for calculating blockIndex i.e. colour. * colour assignment includes non-visible blocks . */ - let blocksUnfiltered = extended ? [] : axis.dataBlocks(false); + let blocksUnfiltered = extended ? [] : axis.dataBlocks(false, false); if (trace_stack) dLog('blockIndex', axisName, axis, axis.blocks); blocksUnfiltered.forEach(storeBlockIndex); @@ -514,26 +520,22 @@ FeatureTicks.prototype.showLabels = function (featuresOfBlockLookup, setupHover, let attrY_featureY = this.attrY_featureY.bind(this); pSE.call(attrY_featureY); - if (false) { - pSM.call(attrY_featureY); - } else { let transition = this.selectionToTransition(pSM); if (transition === pSM) { pSM.call(attrY_featureY); } else { transitionFn(transition, attrY_featureY); } - } } } }; FeatureTicks.prototype.attrY_featureY = function(selection) { - console.log('attrY_featureY', selection.node(), this.axis1d.zoomedDomain) + console.log('attrY_featureY', selection.node(), this.axis1d.zoomedDomain); selection - .attr('y', (feature) => this.axis1d.featureY(feature)) -} + .attr('y', (feature) => this.axis1d.featureY(feature)); +}; /** * @property zoomed selects either .zoomedDomain or .blocksDomain. initially undefined (false). @@ -557,6 +559,7 @@ export default Component.extend(Evented, AxisEvents, AxisPosition, { oa : alias('drawMap.oa'), axisApi : alias('oa.axisApi'), + featuresCountsThreshold : alias('controls.view.featuresCountsThreshold'), /** flipRegion implies paths' positions should be updated. The region is * defined by brush so it is within the domain, so the domain does not change. @@ -591,16 +594,16 @@ export default Component.extend(Evented, AxisEvents, AxisPosition, { next(() => this.axis1dExists(this, true)); }, - willDestroyElement() { - next(() => this.axis1dExists(this, false)); - this._super(...arguments); - }, /*--------------------------------------------------------------------------*/ /** @return true if there is a brush on this axis. */ - brushed : computed( + brushed : computed('brushedRegion', function () { + let brushed = !! this.get('brushedRegion'); + return brushed; + }), + brushedRegion : computed( 'axis.id', 'axisBrush.brushedAxes.[]', /** oa.brushedRegions is a hash, and it is updated not replaced, @@ -613,10 +616,28 @@ export default Component.extend(Evented, AxisEvents, AxisPosition, { function () { let brushedRegions = this.get('oa.brushedRegions'), axisId = this.get('axis.id'), - brushed = !! brushedRegions[axisId]; + brushed = brushedRegions[axisId]; dLog('brushed', axisId, brushedRegions[axisId], this.get('axisBrush.brushedAxes')); return brushed; }), + brushedDomain : computed('brushedRegion', function () { + let + brushedRegion = this.get('brushedRegion'), + /** refBlockId */ + axisId = this.get('axis.id'), + brushedDomain = brushedRegion && this.get('axisApi').axisRange2Domain(axisId, brushedRegion); + return brushedDomain; + }), + + brushedBlocks : computed('brushed', 'block', 'zoomedDomain.{0,1}', function () { + let blocks; + if (this.brushed) { + blocks = this.get('dataBlocks'); + dLog('brushedBlocks', blocks, this); + } + return blocks || []; + }), + zoomed2 : computed('zoomed', 'domain', 'zoomedDomain', function () { let @@ -630,6 +651,23 @@ export default Component.extend(Evented, AxisEvents, AxisPosition, { return zoomed; }), + /** similar to isZoomedOut, this is quicker to evaluate because it + * only considers the fully-zoomed out case, which means that the + * total .featureCount for each block can be used instead of + * calculating .featuresCountIncludingZoom. + * i.e. if all .dataBlocks[] have block.featureCount < featuresCountsThreshold + */ + isZoomedRightOut() { + let out = ! this.zoomed; + if (out) { + let + featuresCountsThreshold = this.get('featuresCountsThreshold'); + out = ! this.dataBlocks.any((b) => b.featureCount <= featuresCountsThreshold); + dLog('isZoomedRightOut', out, featuresCountsThreshold, this.dataBlocks); + } + return out; + }, + /*--------------------------------------------------------------------------*/ /** axis-1d receives axisStackChanged and zoomedAxis from draw-map @@ -896,7 +934,7 @@ export default Component.extend(Evented, AxisEvents, AxisPosition, { axisFeatureCirclesBrushed(); /** Update the featureCount shown in the axis block title */ - this.axisTitleFamily(); + this.axisTitleTextBlockCount(); if (featureLength) dLog('featureLengthEffect', this.get('axis.id'), featureLength); @@ -925,6 +963,27 @@ export default Component.extend(Evented, AxisEvents, AxisPosition, { } }, + /** Update the display of the feature (loaded / total) count in the + * axis title text for the data blocks. + * + * This is a small part of draw-map.js : axisTitleFamily(), and it + * is used in response to receipt of features (possibly via paths), + * which may be via zoomedDomain change. So the usage is high + * frequency, and the remainder of axisTitleFamily() is not needed + * for these updates. + */ + axisTitleTextBlockCount() { + let subTitleS = this.get('axisSelectTextBlock'); + // dLog('axisTitleTextBlockCount', subTitleS.nodes(), subTitleS.node()); + subTitleS + .text(function (block) { return block.titleText(); }); + if (true || trace_stack) { + let nodes = subTitleS.nodes(), + lastNode = nodes.length ? nodes[nodes.length - 1] : null; + dLog('axisTitleTextBlockCount', nodes, lastNode); + } + }, + /** * Equivalent : this.get('axisS').selectAll(), which does a selection by id * from svgContainer through g.stack to the g.axis-outer. @@ -939,6 +998,14 @@ export default Component.extend(Evented, AxisEvents, AxisPosition, { return as; }), + axisSelectTextBlock : computed('axisSelect', function () { + let + gAxis = this.get('axisSelect'), + axisTitleS = gAxis.selectAll("g.axis-all > text"), + subTitleS = axisTitleS.selectAll("tspan.blockTitle"); + return subTitleS; + }), + /** d3.select g.groupName within g.axis-all > g.axis-1d * Create g.axis-1d and g.groupName if needed. */ @@ -1004,6 +1071,35 @@ export default Component.extend(Evented, AxisEvents, AxisPosition, { return akYs; }, + inDomain(feature) { + let + /** comment re. getAttrOrCP() in @see keyFn() */ + value = getAttrOrCP(feature, 'value'), // feature.get('value'), + domain = this.currentDomain, + overlap = intervalOverlap([value, domain]); + return overlap; + }, + inRange(feature) { + let + axisS = this.get('axisS'), + range0 = axisS.yRange2(), + overlap = this.inRangeR(feature); + return overlap; + }, + inRangeR(feature, range0) { + let + axisS = this.get('axisS'), + y = this.featureY(feature), + yScale = axisS.getY(), + value = getAttrOrCP(feature, 'value'), // feature.value, + yInterval = value.length ? value.map(yScale) : yScale(value), + overlap = value.length === 1 ? + inRange(yInterval[0], range0) : + value.length ? intervalOverlap([yInterval, range0]) : + inRange(yInterval, range0); + return overlap; + }, + /*--------------------------------------------------------------------------*/ /** @param [axisID, t] */ @@ -1101,7 +1197,9 @@ export default Component.extend(Evented, AxisEvents, AxisPosition, { function showText(text) { - this.set('headsUp.tipText', text); + if (! this.get('headsUp.isDestroying')) { + this.set('headsUp.tipText', text); + } } gAxis.selectAll('text') .on('mouseover', showText.bind(this, 'Ctrl-click to drag axis')) @@ -1190,6 +1288,7 @@ export default Component.extend(Evented, AxisEvents, AxisPosition, { } let axisName = this.get('axis.id'); Stacked.axis1dRemove(axisName, this); + next(() => this.axis1dExists(this, false)); this._super(...arguments); }, diff --git a/frontend/app/components/draw/axis-blocks.js b/frontend/app/components/draw/axis-blocks.js index 70fdf6269..da04661de 100644 --- a/frontend/app/components/draw/axis-blocks.js +++ b/frontend/app/components/draw/axis-blocks.js @@ -22,6 +22,8 @@ export default Component.extend({ blockService: service('data/block'), queryParams: service('query-params'), urlOptions : alias('queryParams.urlOptions'), + axisZoom: service('data/axis-zoom'), + /** The allocated block space is used for either axis-tracks or axis-charts * (featuresCounts). This name is used to identify the allocated space. */ @@ -92,7 +94,8 @@ export default Component.extend({ 'blocks.[]', 'blocks.@each.featuresForAxis', // axis1d.domain also reflects zoomedDomain - 'axis1d.axis.limits.{0,1}', 'axis1d.zoomedDomainDebounced.{0,1}', + 'axis1d.axis.limits.{0,1}', + 'axis1d.{zoomedDomainDebounced,zoomedDomainThrottled}.{0,1}', function () { let blocks = this.get('blocks'); @@ -104,10 +107,43 @@ export default Component.extend({ let /** featuresForAxis() uses getBlockFeaturesInterval(), which is also used by * models/axis-brush.js */ - blockFeatures = blocks.map(function (b) { return b.get('featuresForAxis'); } ); + blockFeatures = blocks.forEach((b) => this.block_get_featuresForAxis(b) ); /* no return value - result is displayed by axis-track : showTrackBlocks() with data * collated by tracksTree(), and axis-charts : featureCountBlocks() and drawChart(). */ }), + /** For most blocks - simply request featuresForAxis. + * For HighDensity blocks, i.e. featuresCountIncludingZoom is large, + * and currentZoomPanIsWheel, delay the request until the zoom/pan is finished. + */ + block_get_featuresForAxis (b) { + let featuresInScope = b.get('featuresCountIncludingZoom'); + /** blockFeaturesCounts for 10M features is currently about 1min, + * which is not very useful because the bins from the initial + * (zoomed-out) request are still an appropriate size, so there is + * no benefit from a large-scale request, only delay, and the user + * will likely have zoomed somewhere else by the time of the + * response, so delay the request until featuresInScope < 5e5 + * which will give ~3sec response. + */ + if (featuresInScope > 5e5) { + dLog('featuresCountIncludingZoom', b.id, featuresInScope, 'featuresFor', 'skip'); + } else if (this.get('axisZoom.currentZoomPanIsWheel') && + (featuresInScope > 1e4)) { + dLog('featuresCountIncludingZoom', b.id, featuresInScope, 'featuresFor'); + let + axis1d = this.get('axis1d'), + endOfZoom = axis1d.get('nextEndOfDomainDebounced'); + if (! endOfZoom) { + b.get('featuresForAxis'); + } else { + endOfZoom.then(() => { + console.log('featuresForBlocksRequestEffect endOfZoom', b.id); + b.get('featuresForAxis'); + }); + } } else { + b.get('featuresForAxis'); + } + } /*--------------------------------------------------------------------------*/ diff --git a/frontend/app/components/draw/axis-brush.js b/frontend/app/components/draw/axis-brush.js index 75c13012d..2764639ef 100644 --- a/frontend/app/components/draw/axis-brush.js +++ b/frontend/app/components/draw/axis-brush.js @@ -1,9 +1,12 @@ import { computed } from '@ember/object'; import { alias } from '@ember/object/computed'; import Evented from '@ember/object/evented'; +import { on } from '@ember/object/evented'; import Component from '@ember/component'; import { inject as service } from '@ember/service'; import { throttle, debounce } from '@ember/runloop'; +import DS from 'ember-data'; + import PathData from './path-data'; @@ -84,6 +87,12 @@ export default Component.extend(Evented, AxisEvents, { block = this.get('block'), /** axis-brush object in store */ record = this.get('pathsP').ensureAxisBrush(block); + + let axis1d = block.get('block.axis.axis1d'); + if (axis1d && ! axis1d.axisBrushComp) { + axis1d.axisBrushComp = this; + } + if (trace_axisBrush) dLog('block', block.id, block, record); return record; @@ -103,15 +112,67 @@ export default Component.extend(Evented, AxisEvents, { }), features : computed('axisBrush.features.[]', 'zoomCounter', function () { - console.log('features', this); + console.log('features', this.zoomCounter, this); let featuresP = this.get('axisBrush.features'); featuresP.then((features) => { + this.receivedLengths(features); + /** features is now an array of results, 1 per block, so .length is the number of data blocks. */ if (features && features.length) throttle(this, () => ! this.isDestroying && this.draw(features), 200, false); }); return featuresP; }), + /*--------------------------------------------------------------------------*/ + + featuresReceived : undefined, + + initData : on('init', function () { + this.set('featuresReceived', {}); + }), + + /** round to 1 decimal place. + * @param featuresCount undefined or number + */ + round1(featuresCount) { + return featuresCount && Math.round(featuresCount * 10) / 10; + }, + + /** Augment axis1d.brushedBlocks with features.length and block + * .featuresCountIncludingZoom (later : featuresCountInBrush) + */ + brushedBlocks : computed( + 'axis.axis1d.brushedBlocks.[]', + 'block.brushedDomain.{0,1}', + function () { + let + blocks = this.get('axis.axis1d.brushedBlocks') || [], + brushedBlocks = blocks.map((block, i) => { + let + featureCountInBrush = this.round1(block.get('featuresCountIncludingBrush')), + featuresCount = this.round1(block.get('featuresCountIncludingZoom')); + return {block, featuresCount, featureCountInBrush}; + }); + dLog('brushedBlocks', brushedBlocks); + return brushedBlocks; + }), + + receivedLengths(featuresResults) { + /** could use an immutable structure, then template get would depend on it. */ + featuresResults.forEach((featuresLengthResult) => { + if (featuresLengthResult) { + let length = featuresLengthResult.value.length; + /** if length, get blockId from value[0], otherwise not + * straightforward to use this result. */ + if (length) { + let blockId = length && featuresLengthResult.value[0].blockId; + this.featuresReceived[blockId] = length; + } + } + }); + }, + + /*--------------------------------------------------------------------------*/ isAxis(axisID) { diff --git a/frontend/app/components/draw/axis-ticks-selected.js b/frontend/app/components/draw/axis-ticks-selected.js index 82e6232da..731cef286 100644 --- a/frontend/app/components/draw/axis-ticks-selected.js +++ b/frontend/app/components/draw/axis-ticks-selected.js @@ -1,5 +1,5 @@ import { debounce, throttle, bind as run_bind } from '@ember/runloop'; -import { computed } from '@ember/object'; +import { computed, observer } from '@ember/object'; import Component from '@ember/component'; import { inject as service } from '@ember/service'; import { on } from '@ember/object/evented'; @@ -9,6 +9,8 @@ import { task, timeout, didCancel } from 'ember-concurrency'; import AxisEvents from '../../utils/draw/axis-events'; import { transitionEndPromise } from '../../utils/draw/d3-svg'; +/* global d3 */ + const trace = 0; const dLog = console.debug; @@ -61,7 +63,7 @@ export default Component.extend(AxisEvents, { /** Render elements which are dependent on axis scale - i.e. the axis ticks. */ - axisScaleEffect : computed('axis1d.domainChanged', function () { + axisScaleEffect : observer('axis1d.domainChanged', function () { let axisScaleChanged = this.get('axis1d.domainChanged'); let axisID = this.get('axisId'); // if (trace) @@ -88,26 +90,37 @@ export default Component.extend(AxisEvents, { renderTicks(axisID) { if (trace) dLog("renderTicks in ", CompName, axisID); - let featureTicks = this.get('axis1d.featureTicks'); - let block = this.axis1d.axis, - /** clickedFeatures will be undefined or an array with .length > 1 */ - clickedFeatures = this.get('selected.clickedFeaturesByAxis').get(block); - if (featureTicks || clickedFeatures) { + let + axis1d = this.get('axis1d'), + featureTicks = axis1d.get('featureTicks'); + if (featureTicks) { + let block = axis1d.axis, + clickedFeaturesMap = this.get('selected.clickedFeaturesByAxis'), + /** clickedFeatures will be undefined or an array with .length > 1 + * + * clickedFeaturesByBlock are included by featuresOfBlockLookup(); + * call that for data blocks if clickedFeaturesByAxis is not empty . + */ + clickedFeatures = clickedFeaturesMap && clickedFeaturesMap.get(block); + // if ((featuresInBlocks[blockId] for any of the axis' data blocks) || clickedFeatures) { featureTicks.showTickLocations( this.featuresOfBlockLookup.bind(this), false, /* hover text not required on axis feature triangles. */ - 'foundFeatures', false, + 'foundFeatures', true, this.clickTriangle.bind(this) ); - } + featureTicks.showSpanningLine(this.selectedFeaturesOfBlockLookup.bind(this, 'shiftClickedFeatures')); // currently called via didRender(), so ticks and labels are both updated. this.renderLabels(axisID); + } }, renderLabels(axisID) { - let featureTicks = this.get('axis1d.featureTicks'); let - block = this.axis1d.axis; + axis1d = this.get('axis1d'), + featureTicks = axis1d.get('featureTicks'); + let + block = axis1d.axis; /** if this block had labelledFeatures, and in this update they (1) are * toggled off, then labelledFeatures is undefined, but we still want to * call showLabels() to .remove() the existing -s. @@ -183,7 +196,8 @@ export default Component.extend(AxisEvents, { let blockId = block.get('id'); /** return [] for blocks which don't have features in the search result. */ let features = featuresInBlocks ? (featuresInBlocks[blockId] || []) : []; - let clickedFeatures = this.get('selected.clickedFeaturesByBlock').get(block); + let clickedFeaturesByBlock = this.get('selected.clickedFeaturesByBlock'), + clickedFeatures = clickedFeaturesByBlock && clickedFeaturesByBlock.get(block); if (clickedFeatures && clickedFeatures.length) { features = features.concat(clickedFeatures); } @@ -198,9 +212,11 @@ export default Component.extend(AxisEvents, { * @param block Ember object */ selectedFeaturesOfBlockLookup(listName, block) { - let features = this.get('selected.' + listName + 'ByBlock').get(block); + let + map = this.get('selected.' + listName + 'ByBlock'), + features = map && map.get(block); if (trace) - dLog('selectedFeaturesOfBlockLookup', listName, featuresInBlocks, block, block.id, features); + dLog('selectedFeaturesOfBlockLookup', listName, this.featuresInBlocks, block, block.id, features); return features; }, @@ -235,7 +251,7 @@ export default Component.extend(AxisEvents, { ctrlHandler(event) { // as in : query-params.js : optionsToDom() d3.select('body') - .classed("ctrl-modifier", event.ctrlKey) + .classed("ctrl-modifier", event.ctrlKey); } diff --git a/frontend/app/components/draw/block-adj.js b/frontend/app/components/draw/block-adj.js index 7e22aebad..130444523 100644 --- a/frontend/app/components/draw/block-adj.js +++ b/frontend/app/components/draw/block-adj.js @@ -168,7 +168,12 @@ export default Component.extend(Evented, AxisEvents, { throw error; } }); + } else if (! this.get('blockAdj.receivedAll')[prType.typeName]) { + /** drawCurrent() will do this if more paths required. + * If 0 paths then request paths. */ + this.incrementProperty('blockAdj.pathsRequestCount'); } + // length returned by paths{,Aliases}ResultLength is currently just used in logging, not functional. return length; }, @@ -246,6 +251,7 @@ export default Component.extend(Evented, AxisEvents, { * pathsResultType e.g. pathsResultTypes.{direct,alias} */ pathsApiResultType.flowName = pathsResultTypes.alias.flowName; + pathsApiResultType.typeName ||= pathsResultTypes.alias.flowName; pathsApiResultType.fieldName = pathsResultTypes.alias.fieldName; let diff --git a/frontend/app/components/draw/block-view.js b/frontend/app/components/draw/block-view.js index 9af19110f..e231adbba 100644 --- a/frontend/app/components/draw/block-view.js +++ b/frontend/app/components/draw/block-view.js @@ -1,4 +1,4 @@ -import EmberObject, { computed } from '@ember/object'; +import EmberObject, { computed, observer } from '@ember/object'; import { alias } from '@ember/object/computed'; import Component from '@ember/component'; import { inject as service } from '@ember/service'; @@ -7,6 +7,7 @@ import { ensureBlockFeatures } from '../../utils/feature-lookup'; import { subInterval } from '../../utils/draw/zoomPanCalcs'; import { intervalSize, intervalOverlapCoverage } from '../../utils/interval-calcs'; import { binEvenLengthRound } from '../../utils/draw/interval-bins'; +import { featuresCountsResultsSansOverlap } from '../../utils/draw/featuresCountsResults'; /*----------------------------------------------------------------------------*/ @@ -43,8 +44,14 @@ export default Component.extend({ }, /** If the block contains chartable data, collate it into .blocksData.blockData, for axis-charts. + * @return undefined */ - blockFeatures : computed('block', 'block.featuresLengthThrottled', 'axis.axis1d.domainChanged', function () { + blockFeaturesEffect : observer( + 'block', 'block.featuresLengthThrottled', + // 'axis.axis1d.domainChanged', + 'axis.axis1d.blocksDomain', + 'axis.axis1d.{zoomedDomainThrottled,zoomedDomainDebounced}.{0,1}', + function () { if (this.get('block.isChartable')) { let features = this.get('block.features'); let domain = this.get('axis.axis1d.domainChanged'); @@ -70,7 +77,10 @@ export default Component.extend({ * to be read by axis-charts : featureCountBlocks etc and drawn. */ featuresCounts : computed( - 'block', 'block.featuresCountsInZoom.[]', 'axis.axis1d.domainChanged', + 'block', 'block.featuresCountsInZoom.[]', + // 'axis.axis1d.domainChanged', + 'axis.axis1d.blocksDomain', + 'axis.axis1d.{zoomedDomainThrottled,zoomedDomainDebounced}.{0,1}', // featuresCountsNBins is used in selectFeaturesCountsResults() 'blockService.featuresCountsNBins', function () { @@ -100,6 +110,7 @@ export default Component.extend({ /** id.min may be 0 */ dataTypeName = (id.min !== undefined) ? 'featureCountAutoData' : 'featureCountData'; this.setBlockFeaturesData(dataTypeName, featuresCounts); + dLog('featuresCounts', featuresCounts.length, this.axis.axis1d.zoomedDomainThrottled); } return featuresCounts; @@ -170,6 +181,8 @@ export default Component.extend({ // choose the result with the smallest binSize (this is now incorporated into the above sort) // .sortBy('binSize') .slice(0,1); + } else { + selectedResults = featuresCountsResultsSansOverlap(selectedResults, lengthRounded); } return selectedResults; }, diff --git a/frontend/app/components/draw/stacks-view.js b/frontend/app/components/draw/stacks-view.js index 5e06125e3..8ec7db5f3 100644 --- a/frontend/app/components/draw/stacks-view.js +++ b/frontend/app/components/draw/stacks-view.js @@ -14,11 +14,13 @@ import { axisId2Name*/ } from '../../utils/stacks'; +import { _internalModel_data } from '../../utils/ember-devel'; + /* global d3 */ const dLog = console.debug; -function blockInfo(block) { return block && [block.id, block.store.name, block.get('_internalModel.__data'), block.get('isCopy'), block.get('_meta._origin')]; } +function blockInfo(block) { return block && [block.id, block.store.name, block.get(_internalModel_data), block.get('isCopy'), block.get('_meta._origin')]; } export default Component.extend({ block: service('data/block'), @@ -65,7 +67,7 @@ export default Component.extend({ blocks = blocks.filter((b) => b.get('isViewed')); mapByReferenceBlock[blocks[0].id] = blocks; if (true /*trace*/ ) - dLog('axesBlocks', referenceName, scope, blocks.mapBy('_internalModel.__data')); + dLog('axesBlocks', referenceName, scope, blocks.mapBy(_internalModel_data)); } } } @@ -91,8 +93,16 @@ export default Component.extend({ let original = blocks.filter((b) => !b.get('isCopy')); if (original.length) { block = original[0]; - if (original.length > 1) // not expected to happen - dLog('axesP', axisID, 'choosing [0] from', original.map(blockInfo)); + if (original.length > 1) { + let originalReferences = original.filter((b) => !b.get('isData')); + if (originalReferences.length == 1) { + block = originalReferences[0]; + } else if (originalReferences.length > 1) { // not expected to happen + dLog('axesP', axisID, 'choosing [0] from', originalReferences.map(blockInfo)); + } else { + dLog('axesP', axisID, 'no original reference, choosing [0] from', original.map(blockInfo)); + } + } } else { dLog('axesP', axisID, 'no original, choosing [0] from', blocks.map(blockInfo)); diff --git a/frontend/app/components/elem/panel-container.js b/frontend/app/components/elem/panel-container.js index 1e12c1f5a..51098e4b9 100644 --- a/frontend/app/components/elem/panel-container.js +++ b/frontend/app/components/elem/panel-container.js @@ -1,6 +1,18 @@ import { computed } from '@ember/object'; import Component from '@ember/component'; +const dLog = console.debug; + +/** Used to group elements in the left and right side panels. + * Owns the flag and action which enables display of the child + * components after the heading. + * The first child is expected to be panel-heading, which provides a + * toggle button for showComponent / toggleShow. + * + * @param showComponent optional, default true : if given, provides + * the initial value of showComponent. If true, the child elements + * and components are displayed. + */ export default Component.extend({ // attributes // classes @@ -8,5 +20,13 @@ export default Component.extend({ panelClass: computed('state', function() { return 'panel panel-' + this.state }), + showComponent : true, + /** later can make this @action instead of passing panelContainer as action param. */ + // @action + toggleShow(panelContainer) { + // this is currently panel-heading + dLog('toggleShow', panelContainer.showComponent); + panelContainer.toggleProperty('showComponent'); + } // actions }); diff --git a/frontend/app/components/elem/panel-heading.js b/frontend/app/components/elem/panel-heading.js index dd21ffaf9..ad2eee2de 100644 --- a/frontend/app/components/elem/panel-heading.js +++ b/frontend/app/components/elem/panel-heading.js @@ -1,5 +1,16 @@ import Component from '@ember/component'; +/** The heading line which is the first child a panel-container. + * + * Provides a solid rectangle, and a toggle button to enable / disable + * display of the child components after the heading. + * The rectangle colour is currently rgb(51, 122, 183) - + * $panel-primary-border (ember-bootstrap/bootstrap/_panels.scss), + * #337ab7 in app.scss, can abstract this, e.g. appPrimaryColour. + * + * @param panelContainer parent panel-container, used in template for + * panelContainer.toggleShow action and panelContainer.showComponent flag + */ export default Component.extend({ tagName: 'div', // attributes diff --git a/frontend/app/components/new-datasource-modal.js b/frontend/app/components/new-datasource-modal.js index e365dcf3b..1bb69819f 100644 --- a/frontend/app/components/new-datasource-modal.js +++ b/frontend/app/components/new-datasource-modal.js @@ -36,6 +36,15 @@ export default Component.extend({ dLog('onConfirm', 'empty input', host, user, password.length); } else { + if (host.match(/\/mapview\/.*/)) { + host = host.replace(/\/mapview\/.*/, ''); + $('input[name=host]', this.element).val(host); + } + if (! host.match(/^https?:\/\//)) { + host = "https://" + host; + $('input[name=host]', this.element).val(host); + } + this.set('errorText', null); let promise = this.get('apiServers').ServerLogin(host, user, password); promise diff --git a/frontend/app/components/panel/left-panel.js b/frontend/app/components/panel/left-panel.js index 2f561f80c..04e4af66a 100644 --- a/frontend/app/components/panel/left-panel.js +++ b/frontend/app/components/panel/left-panel.js @@ -18,8 +18,15 @@ export default Component.extend({ loadBlock(block) { this.sendAction('loadBlock', block); }, - changeTab(tab) { - $('.nav-tabs a[href="#left-panel-' + tab + '"]').tab('show'); + /** Change to the named tab. + * @param select this is @action select() defined in ember-bootstrap/addon/components/base/bs-tab.js + * @param tab name of tab to go to; without the prefix 'left-panel-' + * @desc Usage : + * left-panel.hbs : changeTab=(action 'changeTab' tab.select ) + * manage-explorer.hbs : onClick=(action "changeTab" "upload") + */ + changeTab(select, tab) { + select('left-panel-' + tab); }, selectBlock(block) { this.sendAction('selectBlock', block); diff --git a/frontend/app/components/panel/manage-dataset.js b/frontend/app/components/panel/manage-dataset.js index 0befebe5d..10fed01e4 100644 --- a/frontend/app/components/panel/manage-dataset.js +++ b/frontend/app/components/panel/manage-dataset.js @@ -20,10 +20,12 @@ export default ManageBase.extend({ apiHost : alias("dataset.store.name"), datasetMeta: Ember.computed("dataset._meta", function() { - return this.get("dataset._meta") || {} + return this.get("dataset._meta") || {}; }), copyToCurrentMeta : observer('dataset', function () { - dLog('copyToCurrentMeta', this.get('currentMeta'), this.get('dataset._meta')); + if (trace > 2) { + dLog('copyToCurrentMeta', this.get('currentMeta'), this.get('dataset._meta')); + } this.set('currentMeta', this.get('dataset._meta')); this.updateViewer(); }), diff --git a/frontend/app/components/panel/manage-features.js b/frontend/app/components/panel/manage-features.js index fd9d45592..8387b441b 100644 --- a/frontend/app/components/panel/manage-features.js +++ b/frontend/app/components/panel/manage-features.js @@ -64,8 +64,29 @@ export default ManageBase.extend({ return include }) } + filtered = this.showRefAlt(filtered); return filtered }), + /** if .Feature is just "chr"* and .feature.blockId.isSNP and it has + * .values{ref,alt} then show ref/alt in place of .Feature. + */ + showRefAlt(filtered) { + filtered = filtered.map((f) => { + let {Feature, ...rest} = f; + if (Feature.startsWith('chr')) { + let feature = rest.feature; + // copied from axis-tracks.js : tracksTree, maybe factor depending on format changes + let values = feature.get('blockId.isSNP') && feature.get('values'); + if (values && (values.ref || values.alt)) { + let refAlt = (values.ref || '') + '/' + (values.alt || ''); + Feature = refAlt; + } + } + rest.Feature = Feature; + return rest; + }); + return filtered; + }, actions: { changeFilter: function(f) { this.set('filter', f) diff --git a/frontend/app/components/panel/sequence-search.js b/frontend/app/components/panel/sequence-search.js new file mode 100644 index 000000000..ece67a9bd --- /dev/null +++ b/frontend/app/components/panel/sequence-search.js @@ -0,0 +1,234 @@ +import Component from '@ember/component'; +import { bind, once, later, throttle, debounce } from '@ember/runloop'; +import { inject as service } from '@ember/service'; +import { observer, computed } from '@ember/object'; +import { alias } from '@ember/object/computed'; + + +const dLog = console.debug; + +export default Component.extend({ + auth: service(), + + /** limit rows in result */ + resultRows : 50, + /** true means add / upload result to db as a Dataset */ + addDataset : false, + + classNames: ['col-xs-12'], + + /*--------------------------------------------------------------------------*/ + /** copied from data-base.js; may factor or change the approach. */ + isProcessing: false, + successMessage: null, + errorMessage: null, + warningMessage: null, + progressMsg: '', + setError(msg) { + this.setProperties({ + isProcessing: false, + errorMessage: msg, + }); + }, + + clearMsgs() { + this.setProperties({ + successMessage: null, + errorMessage: null, + warningMessage: null, + nameWarning : null, + }); + }, + + /*--------------------------------------------------------------------------*/ + /** copied from data-csv.js; could factor as a mixin. */ + newDatasetName: '', + nameWarning: null, + selectedParent: '', + /** Checks if entered dataset name is already taken in dataset list + * Debounced call through observer */ + isDupName: function() { + { + let datasetName = this.get('newDatasetName'); + let datasets = this.get('datasets'); + let matched = datasets.findBy('name', datasetName); + if(matched){ + this.set('nameWarning', `Dataset name '${datasetName}' is already in use`); + return true; + } + } + this.set('nameWarning', null); + return false; + }, + onNameChange: observer('newDatasetName', function() { + debounce(this, this.isDupName, 500); + }), + onSelectChange: observer('selectedParent', function() { + this.checkInputs(); + }), + + /*--------------------------------------------------------------------------*/ + + loading : alias('taskGet.isRunning'), + + refreshClassNames : computed('loading', function () { + let classNames = "btn btn-info pull-right"; + return this.get('loading') ? classNames + ' disabled' : classNames; + }), + + /*--------------------------------------------------------------------------*/ + + /** Filter for those datasets which have tag : BlastDb + */ + datasetsToSearch : computed('datasets.[]', function () { + // could also check d.get('_meta.type') === 'Genome' + let datasetsWithBlastDb = this.get('datasets').filter((d) => d.hasTag('BlastDb')); + return datasetsWithBlastDb; + }), + + /*--------------------------------------------------------------------------*/ + + // actions + actions: { + // copied from feature-list, may not be required + inputIsActive() { + dLog('inputIsActive'); + }, + paste: function(event) { + let text = event && (event.target.value || event.originalEvent.target.value); + console.log('paste', event, text.length); + /** this action function is called before jQuery val() is updated. */ + later(() => { + this.set('text', text); + // this.dnaSequenceInput(/*text*/); + }, 500); + }, + + dnaSequenceInput(text, event) { + dLog("dnaSequenceInput", this, text.length, event.keyCode); + this.set('text', text); + // throttle(this.get('dnaSequenceInputBound'), 2000); + }, + + search() { + if (this.checkInputs()) { + let text = this.get('text'); + this.dnaSequenceInput(text); + } + } + + }, + + /*--------------------------------------------------------------------------*/ + + checkInputs() { + let ok; + this.clearMsgs(); + + let datasetName = this.get('newDatasetName'); + let parentName = this.get('selectedParent'); + if (! parentName || ! parentName.length || (parentName === 'None')) { + this.set('nameWarning', 'Please select a reference genome to search'); + ok = false; + } else if (this.get('addDataset') && ! (datasetName && datasetName.length)) { + this.set('nameWarning', 'Please enter name for the dataset to add containing the search results.o'); + ok = false; + } else if (this.get('addDataset') && this.isDupName()) { + ok = false; + } else { + ok = true; + } + return ok; + }, + inputsOK : computed('selectedParent', 'addDataset', 'newDatasetName', 'datasets.[]', function() { + return this.checkInputs(); + }), + searchButtonDisabled : computed('inputsOK', 'isProcessing', function() { + return ! this.get('inputsOK') || this.get('isProcessing'); + }), + + /** throttle depends on constant function */ + dnaSequenceInputBound : computed(function() { + return bind(this, this.dnaSequenceInput); + }), + + dnaSequenceInput(rawText) { + // dLog("dnaSequenceInput", rawText && rawText.length); + /** if the user has use paste or newline then .text is defined, + * otherwise use jQuery to get it from the textarea. + */ + if (! rawText) { + let text$ = $('textarea', this.element); + /** before textarea is created, .val() will be undefined. */ + rawText = text$.val(); + } + if (rawText) + { + let + seq = rawText; + /* + .replaceAll(/[ \n\t]+/g, "") + .toLowerCase(); + */ + dLog("dnaSequenceInput", seq); + let + /** based on serverTabSelected or primary */ + apiServer = this.get('controls.apiServerSelectedOrPrimary'), + auth = this.get('auth'), + parent = "Triticum_aestivum_IWGSC_RefSeq_v1.0", + searchType = 'blast', + promise = auth.dnaSequenceSearch( + apiServer, + seq, parent, searchType, + this.get('resultRows'), + this.get('addDataset'), + this.get('newDatasetName'), + /*options*/{/*dataEvent : receivedData, closePromise : taskInstance*/}); + + promise.then( + (data) => { + dLog('dnaSequenceInput', data.features.length); + this.set('data', data.features); + if (this.get('addDataset') && this.get('replaceDataset')) { + this.unviewDataset(this.get('newDatasetName')); + } + }, + // copied from data-base.js - could be factored. + (err, status) => { + let errobj = err.responseJSON.error; + console.log(errobj); + let errmsg = null; + if (errobj.message) { + errmsg = errobj.message; + } else if (errobj.errmsg) { + errmsg = errobj.errmsg; + } else if (errobj.name) { + errmsg = errobj.name; + } + this.setError(errmsg); + this.scrollToTop(); + } + + ); + } + }, + + /*--------------------------------------------------------------------------*/ + /* copied from file-drop-zone.js, can factor if this is retained. */ + + /** Unview the blocks of the dataset which has been replaced by successful upload. + */ + unviewDataset(datasetName) { + let + store = this.get('apiServers').get('primaryServer').get('store'), + replacedDataset = store.peekRecord('dataset', datasetName), + viewedBlocks = replacedDataset.get('blocks').toArray().filterBy('isViewed'), + blockService = this.get('blockService'), + blockIds = viewedBlocks.map((b) => b.id); + dLog('unviewDataset', datasetName, blockIds); + blockService.setViewed(blockIds, false); + } + + /*--------------------------------------------------------------------------*/ + +}); diff --git a/frontend/app/components/panel/upload-data.js b/frontend/app/components/panel/upload-data.js index 79d84f92b..3e5667b62 100644 --- a/frontend/app/components/panel/upload-data.js +++ b/frontend/app/components/panel/upload-data.js @@ -10,10 +10,11 @@ export default ManageBase.extend({ filterOptions: { + 'fileDrop': {'formal': 'fileDrop', 'icon': 'cloud-upload'}, // or upload 'cell': {'formal': 'CSV', 'icon': 'th-large'}, 'json': {'formal': 'JSON', 'icon': 'list-alt'}, }, - filter: 'cell', + filter: 'fileDrop', actions: { changeFilter: function(f) { diff --git a/frontend/app/components/panel/upload/blast-results.js b/frontend/app/components/panel/upload/blast-results.js new file mode 100644 index 000000000..c1193fe9f --- /dev/null +++ b/frontend/app/components/panel/upload/blast-results.js @@ -0,0 +1,143 @@ +import Component from '@ember/component'; +import { observer, computed } from '@ember/object'; +import { inject as service } from '@ember/service'; +import { later as run_later } from '@ember/runloop'; + + +import config from '../../../config/environment'; + + +const dLog = console.debug; + +/* global Handsontable */ +/* global $ */ + +/** Display a table of results from sequence-search API request + * /Feature/dnaSequenceSearch + */ +export default Component.extend({ + + /** copied from data-base.js, not used yet */ + isProcessing: false, + successMessage: null, + errorMessage: null, + warningMessage: null, + progressMsg: '', + + dataMatrix : computed('data.[]', function () { + let cells = this.get('data').map((r) => r.split('\t')); + return cells; + }), + dataMatrixEffect : computed('dataMatrix.[]', function () { + let table = this.get('table'); + if (table) { + table.loadData(this.get('dataMatrix')); + } + }), + + didRender() { + // this.showTable(); + }, + + /*--------------------------------------------------------------------------*/ + /** comments for activeEffect() and shownBsTab() in @see data-csv.js + * @desc + * active is passed in from parent component sequence-search to indicate if + * the tab containing this sub component is active. + */ + activeEffect : computed('active', function () { + let active = this.get('active'); + if (active) { + this.shownBsTab(); + } + }), + shownBsTab() { + run_later(() => this.showTable(), 500); + }, + /*--------------------------------------------------------------------------*/ + + + showTable() { + // Ensure table is created when tab is shown + let table = this.get('table'); + if (! table) { + this.createTable(); + } else { + // trigger rerender when tab is shown + table.updateSettings({}); + // or .updateSettings({ data : ... }) + table.loadData(this.get('dataMatrix')); + } + }, + + createTable() { + const cName = 'upload/blast-results'; + const fnName = 'createTable'; + dLog('createTable'); + var that = this; + $(function() { + let eltId = 'blast-results-hotable'; + let hotable = $('#' + eltId)[0]; + if (! hotable) { + console.warn(cName, fnName, ' : #', eltId, ' not found', that); + return; // fail + } + /** +blast output columns are +query ID, subject ID, % identity, length of HSP (hit), # mismatches, # gaps, query start, query end, subject start, subject end, e-value, score, query length, subject length + */ + var table = new Handsontable(hotable, { + data: [['', '', '', '', '', '', '', '', '', '', '', '', '', '']], + // minRows: 20, + rowHeaders: true, + /* + columns: [ + { + data: 'name', + type: 'text' + }, + { + data: 'block', + type: 'text' + }, + { + data: 'val', + type: 'numeric', + numericFormat: { + pattern: '0,0.*' + } + } + ], + */ + colHeaders: [ + 'query ID', 'subject ID', '% identity', 'length of HSP (hit)', '# mismatches', '# gaps', 'query start', 'query end', 'subject start', 'subject end', 'e-value', 'score', 'query length', 'subject length' + ], + height: 500, + // colWidths: [100, 100, 100], + manualRowResize: true, + manualColumnResize: true, + manualRowMove: true, + manualColumnMove: true, + contextMenu: true, + /* + afterChange: function() { + }, + afterRemoveRow: function() { + }, + */ + /* see comment re. handsOnTableLicenseKey in frontend/config/environment.js */ + licenseKey: config.handsOnTableLicenseKey + }); + that.set('table', table); + + }); + }, + + + clearTable() { + var table = this.get('table'); + table.updateSettings({data:[]}); + }, + + +}); diff --git a/frontend/app/components/panel/upload/data-base.js b/frontend/app/components/panel/upload/data-base.js index af87907a3..8d4272b39 100644 --- a/frontend/app/components/panel/upload/data-base.js +++ b/frontend/app/components/panel/upload/data-base.js @@ -82,5 +82,41 @@ export default Component.extend({ this.clearMsgs(); } }, + }, + + /** + * @param data {fileName, data} + * @return promise of completion of ajax API operation. + * This is a jQuery promise, jqXHR. refn https://api.jquery.com/jquery.ajax/. + * + * This promise does not include the refreshDatasets API request + * which this function performs after this promise completes. + */ + uploadData(data) { + let promise = + this.get('auth').uploadData(data, this.updateProgress.bind(this)); + promise + .then((res) => { + this.setSuccess(); + this.scrollToTop(); + // On complete, trigger dataset list reload + // through controller-level function + this.get('refreshDatasets')(); + }, (err, status) => { + let errobj = err.responseJSON.error; + console.log(errobj); + let errmsg = null; + if (errobj.message) { + errmsg = errobj.message; + } else if (errobj.errmsg) { + errmsg = errobj.errmsg; + } else if (errobj.name) { + errmsg = errobj.name; + } + this.setError(errmsg); + this.scrollToTop(); + }); + return promise; } + }); diff --git a/frontend/app/components/panel/upload/data-json.js b/frontend/app/components/panel/upload/data-json.js index 352dda882..bee660b06 100644 --- a/frontend/app/components/panel/upload/data-json.js +++ b/frontend/app/components/panel/upload/data-json.js @@ -9,27 +9,7 @@ export default UploadBase.extend({ var that = this; reader.onload = function(e) { let data = {data: reader.result, fileName: f.name}; - that.get('auth').uploadData(data, that.updateProgress.bind(that)) - .then(function(res){ - that.setSuccess(); - that.scrollToTop(); - // On complete, trigger dataset list reload - // through controller-level function - that.get('refreshDatasets')(); - }, function(err, status) { - let errobj = err.responseJSON.error; - console.log(errobj); - let errmsg = null; - if (errobj.message) { - errmsg = errobj.message; - } else if (errobj.errmsg) { - errmsg = errobj.errmsg; - } else if (errobj.name) { - errmsg = errobj.name; - } - that.setError(errmsg); - that.scrollToTop(); - }); + that.uploadData(data); }; reader.readAsBinaryString(f); this.setProcessing(); diff --git a/frontend/app/components/panel/upload/file-drop-zone.js b/frontend/app/components/panel/upload/file-drop-zone.js new file mode 100644 index 000000000..3d2f8baa8 --- /dev/null +++ b/frontend/app/components/panel/upload/file-drop-zone.js @@ -0,0 +1,110 @@ +import { inject as service } from '@ember/service'; + +const dLog = console.debug; + +import UploadBase from './data-base'; + +/*----------------------------------------------------------------------------*/ + +/** Convert ArrayBuffer to String, using single-char operations. + */ +function arrayBufferToString1Char(buffer) { + /* Based on https://stackoverflow.com/a/60782610 Anthony O. + * Similar using reduce() : https://stackoverflow.com/a/60301316 + */ + let binary = ''; + let bytes = new Uint8Array(buffer); + let len = bytes.byteLength; + for (let i = 0; i < len; i++) { + binary += String.fromCharCode(bytes[i]); + } + return binary; +} + + +/** Convert ArrayBuffer to String, using 2^16 Byte chunks. + * from : https://stackoverflow.com/a/20604561 Ryan Weinstein + * "... about 20 times faster than using blob. It also works for large strings of over 100mb." + */ +function arrayBufferToString(buffer){ + var bufView = new Uint8Array(buffer); + var length = bufView.length; + var result = ''; + var addition = Math.pow(2,16)-1; + + for (var i = 0; i < length; i += addition) { + if (i + addition > length) { + addition = length - i; + } + result += String.fromCharCode.apply(null, bufView.subarray(i, i + addition)); + } + + return result; +} + + +/*----------------------------------------------------------------------------*/ + + +export default UploadBase.extend({ + apiServers : service(), + blockService : service('data/block'), + + replaceDataset : true, + + uploadSpreadsheet(file) { + const + blob = file.blob, + queue = file.queue; + dLog( + 'uploadSpreadsheet', file.name, + blob.size, + blob.type, + blob.lastModifiedDate, + blob.lastModified, + queue + ); + + this.setProcessing(); + this.scrollToTop(); + + var bufferPromise = blob.arrayBuffer(); + blob.arrayBuffer().then((buffer) => { + /* process the ArrayBuffer */ + dLog('arrayBuffer', buffer, buffer.byteLength); + const + fileName = file.name, + data = arrayBufferToString(buffer), + replaceDataset = this.replaceDataset, + /** corresponds to the param msg in backend/common/models/dataset.js : upload(). */ + message = {fileName, data, replaceDataset}; + /** a jQuery promise (jqXHR) */ + let promise = + this.uploadData(message); + promise.always(() => file.queue.remove(file)); + /** data-base:uploadData() calls setSuccess() (i.e. 'Uploaded successfully') + * Prepend the datasetName to that message. + */ + promise.then((res) => { + let datasetName = res.status; + this.unviewDataset(datasetName); + this.setSuccess("Dataset '" + datasetName + "' " + this.successMessage); + }); + }); + + }, + + /** Unview the blocks of the dataset which has been replaced by successful upload. + */ + unviewDataset(datasetName) { + let + store = this.get('apiServers').get('primaryServer').get('store'), + replacedDataset = store.peekRecord('dataset', datasetName), + viewedBlocks = replacedDataset.get('blocks').toArray().filterBy('isViewed'), + blockService = this.get('blockService'), + blockIds = viewedBlocks.map((b) => b.id); + dLog('unviewDataset', datasetName, blockIds); + blockService.setViewed(blockIds, false); + } + +}); diff --git a/frontend/app/components/panel/view-controls.js b/frontend/app/components/panel/view-controls.js index 82376fde5..646454315 100644 --- a/frontend/app/components/panel/view-controls.js +++ b/frontend/app/components/panel/view-controls.js @@ -155,6 +155,10 @@ export default Component.extend({ featuresCountsThresholdLinear : expRangeInitial(500, expRangeBase(100, 10000)), + /** Threshold between showing featuresCounts charts and features tracks : + * - if (count <= featuresCountsThreshold) show features (axis-tracks) + * - if (count > featuresCountsThreshold) show featuresCounts (axis-charts) + */ featuresCountsThreshold : computed('featuresCountsThresholdLinear', function () { let thresholdLinear = +this.get('featuresCountsThresholdLinear'), diff --git a/frontend/app/components/record/entry-base.js b/frontend/app/components/record/entry-base.js index 0b4567b8a..c5240b55b 100644 --- a/frontend/app/components/record/entry-base.js +++ b/frontend/app/components/record/entry-base.js @@ -2,6 +2,7 @@ import { on } from '@ember/object/evented'; import { computed } from '@ember/object'; import Component from '@ember/component'; import { inject as service } from '@ember/service'; +import { run } from '@ember/runloop'; export default Component.extend({ onInit: on('init', function() { @@ -49,7 +50,9 @@ export default Component.extend({ /** equiv : record._internalModel.modelName */ let modelName = record.get('constructor.modelName'); // destroyRecord() is equivalent to deleteRecord() and immediate save() - record.destroyRecord().then(function() { + record.destroyRecord() + .then(() => run(() => record.unloadRecord())) + .then(function() { // Don't trigger downstream effects until complete that.sendAction('onDelete', modelName, id); }); diff --git a/frontend/app/components/table-brushed.js b/frontend/app/components/table-brushed.js index 887e104e7..674964eb5 100644 --- a/frontend/app/components/table-brushed.js +++ b/frontend/app/components/table-brushed.js @@ -1,6 +1,9 @@ import $ from 'jquery'; + import Component from '@ember/component'; import { observer } from '@ember/object'; +import { computed } from '@ember/object'; + import { eltClassName } from '../utils/domElements'; @@ -12,6 +15,25 @@ import config from '../config/environment'; const trace = 0; const dLog = console.debug; +/** Provide default types for feature .values fields + */ +const featureValuesTypes = { + location : 'number' +}; +/** Provide additional column attributes for feature .values fields + */ +const featureValuesColumnsAttributes = { + ref : { className: "htCenter"}, + alt : { className: "htCenter"}, +}; +/** Provide default widths for feature .values fields + */ +const featureValuesWidths = { + ref : 40, + alt : 40, +}; + + export default Component.extend({ actions : { @@ -62,17 +84,78 @@ export default Component.extend({ this.get('createTable').apply(this); }, + /** @return true if any of the features in data have an end position : .value[1] + */ + positionEnd : computed('data.[]', function () { + let + data = this.get('data'), + positionEnd = data.any((datum) => datum.feature.value && (datum.feature.value.length > 1)); + return positionEnd; + }), + extraColumnsNames : computed('data.[]', function () { + let + data = this.get('data'), + nameSet = data.reduce( + (result, datum) => { + let feature = datum.feature; + if (feature.values) { + Object.keys(feature.values).forEach((n) => result.add(n)); + } + return result; + }, + new Set()), + names = Array.from(nameSet.values()); + dLog('extraColumnsNames', names, data); + return names; + }), + extraColumns : computed('extraColumnsNames.[]', function () { + return this.get('extraColumnsNames').map( + (name) => { + let c = { + data: name, + type: featureValuesTypes[name] || 'text' + }; + let a = featureValuesColumnsAttributes[name]; + if (a) { + Object.keys(a).forEach((k) => c[k] = a[k]); + } + return c; + }); + }), + + extraColumnsHeaders : computed('extraColumnsNames.[]', function () { + return this.get('extraColumnsNames').map((name) => name.capitalize()); + }), + extraColumnsWidths : computed('extraColumnsNames.[]', function () { + /** ref, alt are configured in featureValuesWidths; default value + * for other columns, which may be user-defined. */ + return this.get('extraColumnsNames').map((columnName) => featureValuesWidths[columnName] || 120); + }), + + dataForHoTable : computed('data', function () { + let data = this.get('data').map((f) => { + /** remove .feature from structure because it causes Handsontable to give errors. */ + let {feature, ...rest} = f, + values = feature.values; + if (values) { + Object.keys(values).forEach((valueName) => rest[valueName] = values[valueName]); + } + if (feature.value.length > 1) { + // .Position is .value[0] + rest.PositionEnd = feature.value[1]; + } + return rest; + }); + return data; + }), createTable: function() { var that = this; dLog("createTable", this); let tableDiv = $("#table-brushed")[0]; dLog("tableDiv", tableDiv); - var table = new Handsontable(tableDiv, { - data: this.get('data') || [['', '', '']], - minRows: 1, - rowHeaders: true, - columns: [ + let + columns = [ { data: 'Chromosome', type: 'text' @@ -88,14 +171,41 @@ export default Component.extend({ pattern: '0,0.*' } } - ], - colHeaders: [ + ], + colHeaders = [ 'Block', 'Feature', 'Position' - ], + ], + colWidths = [100, 135, 60]; + function addColumns(cols, headers, widths) { + columns = columns.concat(cols); + colHeaders = colHeaders.concat(headers); + colWidths = colWidths.concat(widths); + } + if (this.get('positionEnd')) { + addColumns( + [{ + data: 'PositionEnd', + type: 'numeric', + numericFormat: { + pattern: '0,0.*' + } + }], + ['End'], + [60] + ); + } + addColumns(this.get('extraColumns'), this.get('extraColumnsHeaders'), this.get('extraColumnsWidths')); + + var table = new Handsontable(tableDiv, { + data: this.get('dataForHoTable') || [['', '', '']], + minRows: 1, + rowHeaders: true, + columns, + colHeaders, headerTooltips: true, - colWidths: [100, 135, 60], + colWidths, height: 600, manualRowResize: true, manualColumnResize: true, @@ -133,8 +243,8 @@ export default Component.extend({ }, - onSelectionChange: observer('data', function () { - let data = this.get('data'), + onSelectionChange: observer('dataForHoTable', function () { + let data = this.get('dataForHoTable'), me = this, table = this.get('table'); if (table) diff --git a/frontend/app/controllers/mapview.js b/frontend/app/controllers/mapview.js index 10846bbd2..bc4a5978d 100644 --- a/frontend/app/controllers/mapview.js +++ b/frontend/app/controllers/mapview.js @@ -151,9 +151,11 @@ export default Controller.extend(Evented, { loadBlock : function loadBlock(block) { dLog('loadBlock', block); // previously done in useTask() : (mixins/viewed-blocks)setViewed() : (data/block.js)setViewedTask() - block.set('isViewed', true); + if (! block.get('isViewed')) { + later(() => block.set('isViewed', true)); + } let referenceBlock = block.get('referenceBlock'); - if (referenceBlock) + if (referenceBlock && (referenceBlock !== block)) loadBlock.apply(this, [referenceBlock]); /* Before progressive loading this would load the data (features) of the block. diff --git a/frontend/app/mixins/axis-position.js b/frontend/app/mixins/axis-position.js index 52f3c4c01..8a85d5add 100644 --- a/frontend/app/mixins/axis-position.js +++ b/frontend/app/mixins/axis-position.js @@ -79,10 +79,20 @@ export default Mixin.create({ }, setDomainDebounced(domain) { this.set('currentPosition.yDomainDebounced', domain); + dLog('setDomainDebounced', domain, !!this.resolveDebounce, 'featuresFor'); + if (this.resolveDebounce) { + this.resolveDebounce(); + } + this.nextEndOfDomainDebounced = new Promise((resolve, reject) => { + this.resolveDebounce = resolve; + }); }, - setDomainThrottled(domain) { - this.set('currentPosition.yDomainThrottled', domain); - }, + /** a promise which resolves at the end of a domain debounce phase. */ + nextEndOfDomainDebounced : undefined, + /** @return a function wrapped with lodash_throttle(). + * @desc this updates (generates a new function) when .throttleTime + * changes + */ setDomainThrottled : computed('controls.view.throttleTime', function () { let throttled = lodash_throttle( diff --git a/frontend/app/models/axis-brush.js b/frontend/app/models/axis-brush.js index 7771864f1..b6376e690 100644 --- a/frontend/app/models/axis-brush.js +++ b/frontend/app/models/axis-brush.js @@ -24,8 +24,9 @@ export default Model.extend({ features : computed('blockId', 'zoomCounter', 'brushedDomain.[0]', 'brushedDomain.[1]', function () { let blockId = this.get('blockId'), id = this.get('id'); - if (blockId === undefined) + if (blockId === undefined) { blockId = this.id; + } let features = this.get('pathsP').getBlockFeaturesInterval(blockId); let me = this; diff --git a/frontend/app/models/block-adj.js b/frontend/app/models/block-adj.js index bce6618bd..bbded22fc 100644 --- a/frontend/app/models/block-adj.js +++ b/frontend/app/models/block-adj.js @@ -145,9 +145,14 @@ export default Model.extend(Evented, { * Similar to following @see axesDomains() * @desc but that function determines the referenceBlock's domain if the block is not zoomed. */ - zoomedDomains : mapBy('axes1d', 'zoomedDomainThrottled'), + zoomedDomains : computed('axes1d', 'axes1d.@each.zoomedDomainThrottled', 'axes', function () { + let axes1d = this.get('axes1d'), + zoomedDomains = axes1d + .map((axis1d) => axis1d && axis1d.zoomedDomainThrottled); + return zoomedDomains; + }), /** domain incorporates zoomedDomain and also flipped and blocksDomain */ - domains : mapBy('axes1d', 'domain'), + domains : computed.alias('axesDomains'), // .map('axes1d', (axis1d) => axis1d && axis1d.domain), /** Return the domains (i.e. zoom scope) of the 2 axes of this block-adj. * These are equivalent : @@ -506,7 +511,7 @@ export default Model.extend(Evented, { receivedAllCheck(resultLength, flow) { if (resultLength === 0) { - let anyZoomed = this.get('axes1d').any((a) => a.zoomed); + let anyZoomed = this.get('axes1d').any((a) => a && a.zoomed); if (! anyZoomed) { /** getting empty result after receiving paths - may be the end of streamed paths, so distinguish this case. */ let resultLengthName = 'paths' + (flow.name === 'alias' ? 'Aliases' : '') + 'Result'; diff --git a/frontend/app/models/block.js b/frontend/app/models/block.js index 863ce5fd1..8e33e1cfd 100644 --- a/frontend/app/models/block.js +++ b/frontend/app/models/block.js @@ -17,9 +17,14 @@ import { intervalOverlap, intervalOverlapCoverage } from '../utils/interval-calcs'; -import { inDomain } from '../utils/draw/interval-overlap'; import { binEvenLengthRound } from '../utils/draw/interval-bins'; import { subInterval, overlapInterval } from '../utils/draw/zoomPanCalcs'; +import { + featuresCountsResultsCheckOverlap, + featuresCountsResultsMerge, + featuresCountsResultsFilter, + featuresCountsResultsTidy, + } from '../utils/draw/featuresCountsResults'; import { featureCountDataProperties } from '../utils/data-types'; @@ -122,8 +127,12 @@ export default Model.extend({ /*--------------------------------------------------------------------------*/ /** @return true if this block's dataset defined _meta.paths and it is true. + * and ! .isSNP */ - showPaths : computed('datasetId._meta.paths', 'id', function () { + showPaths : computed( + 'datasetId._meta.paths', 'id', + 'featuresCountIncludingZoom', 'featuresCount', + function () { let dataset = this.get('datasetId'), paths = dataset.get('_meta.paths'); @@ -144,6 +153,19 @@ export default Model.extend({ paths |= odd; dLog(id, odd); } + /* don't request paths for HighDensity SNPs until zoomed in to small scale. + * The comparison < 5e4 will be false until .featureCount or + * .featuresCountsResults are received, i.e. while + * featuresCountIncludingZoom is undefined. + * + * + * Currently the high density data does not have symbolic names + * (just chr:location) so paths via direct and aliases are not + * applicable. It is tagged HighDensity, but we should add a + * separate tag to indicate the lack of a feature name. + * So disable paths if tagged HighDensity. + */ + paths &&= ! this.get('isHighDensity') && (this.get('featuresCountIncludingZoom') < 5e4); // dLog('showPaths', dataset, paths); return paths; }), @@ -151,7 +173,6 @@ export default Model.extend({ /*--------------------------------------------------------------------------*/ hasFeatures : computed('featureCount', function () { - return this.get('featureCount') > 0; /** featureValueCount > 0 implies featureCount > 0. * Could also use .featuresCountsResults - if any non-zero counts then block has features. */ let count = this.get('featureCount') || this.get('featureValueCount'); @@ -243,6 +264,26 @@ export default Model.extend({ */ featuresDomain : alias('featureLimits'), + /** @return true if the parent dataset of this block has the given tag. + * @desc This can be extended to provided inheritance : first lookup + * this.get('tags'), and if tag is not found, then lookup + * .datasetId.tags + */ + hasTag : function (tag) { + let tags = this.get('datasetId.tags'), + has = tags && tags.length && (tags.indexOf(tag) >= 0); + return has; + }, + isSNP : computed('datasetId.tags', function () { + let isSNP = this.hasTag('SNP'); + return isSNP; + }), + isHighDensity : computed('datasetId.tags', function () { + let isHighDensity = this.hasTag('HighDensity'); + return isHighDensity; + }), + /** hasTag() can now be used in isChartable() and isSubElements() also. + */ isChartable : computed('datasetId.tags', function () { let tags = this.get('datasetId.tags'), isChartable = tags && tags.length && (tags.indexOf('chartable') >= 0); @@ -272,8 +313,9 @@ export default Model.extend({ ensureFeatureLimits() { let limits = this.get('featureLimits'); /** Reference blocks don't have .featureLimits so don't request it. - * block.get('isData') depends on featureCount, which won't be present for + * block.get('isDataCount') depends on featureCount, which won't be present for * newly uploaded blocks. Only references have .range (atm). + * Could use block.get('isData') here; this (!range) seems equivalent. */ let range = this.get('range'), isData = ! range || ! range.length; @@ -319,6 +361,7 @@ export default Model.extend({ /*--------------------------------------------------------------------------*/ + /** generate a text name for the block, to be displayed - it should be * user-readable and uniquely identify the block. */ @@ -327,7 +370,7 @@ export default Model.extend({ * selectedFeatures.Chromosome * In paths-table.js @see blockDatasetNameAndScope() */ - let name = (this.get('datasetId._meta.shortName') || this.get('datasetId.id')) + ':' + this.get('scope'); + let name = this.get('datasetId.shortNameOrName') + ':' + this.get('scope'); return name; }), @@ -484,7 +527,7 @@ export default Model.extend({ /** Alternative method of getting the array of blocks. performance seems the same. */ store = this.get('apiServers').id2Store(this.get('id')), - blocks = ! store ? [] : store.peekAll('block') + blocks = ! store ? [] : store.peekAll('block'); } else { let /** all blocks from the same server as `this`. */ @@ -549,9 +592,11 @@ export default Model.extend({ } } else { blocks.forEach((block, i) => { - if ((block === undefined) && (i === 0)) + if ((block === undefined) && (i === 0)) { dLog('viewedReferenceBlock', 'reference not viewed', datasetName, scope); - if (scope !== block.get('scope')) { + } else if ((block === undefined)) { + dLog('viewedReferenceBlock', 'block undefined', datasetName, scope); + } else if (scope !== block.get('scope')) { dLog('viewedReferenceBlock', 'not grouped by scope', block.get('id'), scope, block._internalModel.__data, datasetName); } /* viewedBlocksByReferenceAndScope() does not filter out @@ -589,7 +634,7 @@ export default Model.extend({ if (referenceBlock.get('isCopy') && ! block.get('isCopy')) referenceBlock = block; else { - dLog('viewedReferenceBlock', 'duplicate match', block.get('id'), block._internalModel.__data, parentName, scope); + console.warn('viewedReferenceBlock', 'duplicate match', block.get('id'), block._internalModel.__data, parentName, scope); } } else referenceBlock = block; @@ -695,12 +740,36 @@ export default Model.extend({ /*--------------------------------------------------------------------------*/ + brushedDomain : computed( + 'axis1d.axisBrushComp.block.brushedDomain.{0,1}', + 'axis1d.brushedDomain.{0,1}', + function() { + let brushedDomain = this.get('axis1d.axisBrushComp.block.brushedDomain') || + this.get('axis1d.brushedDomain'); + return brushedDomain; + }), + + featuresCountIncludingBrush : computed( + 'featuresCountsResults.[]', + 'featureCountInBrush', 'brushedDomain.{0,1}' /* -Debounced */, 'limits', + function () { + let + count = this.get('axis1d.brushed') ? + (this.featuresCountsResults.length ? this.get('featureCountInBrush') : undefined ) : + this.featureCount; + if (trace_block > 1) + dLog('featuresCountIncludingBrush', count); + return count; + }), + /** @return the features count within zoomedDomain, or if there is no zoom, * i.e. zoomedDomain is undefined, then simply return .featureCount */ featuresCountIncludingZoom : computed( 'featuresCountsResults.[]', - 'featureCountInZoom', 'zoomedDomainDebounced.{0,1}', 'limits', + 'featureCountInZoom', + '{zoomedDomainDebounced,zoomedDomainThrottled}.{0,1}', + 'limits', function () { let count = this.get('zoomedDomain') ? @@ -711,6 +780,24 @@ export default Model.extend({ return count; }), + /** Same as featuresCountsInZoom(), but for the brushedDomain instead of the zoomedDomain + */ + featuresCountsInBrush : computed( + 'featuresCountsResults.[]', 'brushedDomain.{0,1}' /* -Debounced */, 'limits', + function () { + let + domain = this.get('brushedDomain'), + overlaps; + if (! domain) { + overlaps = this.get('featuresCountsResults'); + } + else { + overlaps = this.featuresCountsOverlappingInterval(domain); + } + if (trace_block > 1) + dLog('featuresCountsInBrush', domain, this.limits, overlaps && overlaps.length); + return overlaps; + }), /** From the featuresCounts results received, filter to return the bins * overlapping zoomedDomain. * If not zoomed (no zoomedDomain), return featuresCountsResults. @@ -719,7 +806,9 @@ export default Model.extend({ * [ {binSize, nBins, domain: Array(2), result: Array}, ... ] */ featuresCountsInZoom : computed( - 'featuresCountsResults.[]', 'zoomedDomainDebounced.{0,1}', 'limits', + 'featuresCountsResults.[]', + '{zoomedDomainDebounced,zoomedDomainThrottled}.{0,1}', + 'limits', function () { let domain = this.get('zoomedDomain'), @@ -756,6 +845,23 @@ export default Model.extend({ let overlaps = this.get('featuresCountsInZoom') || []; let domain = this.get('zoomedDomain'), + count = this.featureCountInInterval(overlaps, domain, 'Zoom'); + return count; + }), + featureCountInBrush : computed('featuresCountsInBrush.[]', function () { + let overlaps = this.get('featuresCountsInBrush') || []; + let + domain = this.get('brushedDomain'), + count = this.featureCountInInterval(overlaps, domain, 'Brush'); + return count; + }), + /** Use featuresCounts results to calculate featureCount in the given interval. + * @param overlaps featuresCounts results which overlap the domain + * @param domain [start,end] or if undefined then the whole count of all bins are summed. + * @param intervalName used only in log message + */ + featureCountInInterval(overlaps, domain, intervalName) { + let /** assume that the bins in each result are contiguous; use the * result which covers the interval best, and maybe later : (secondary measure * if >1 cover the interval equally) has the smallest binSize. @@ -777,9 +883,9 @@ export default Model.extend({ selectedOverlap = (selectedOverlapI === -1) ? undefined : overlaps[selectedOverlapI], count = selectedOverlap && this.featureCountResultInZoom(selectedOverlap, domain); if (trace_block > 1) - dLog('featureCountInZoom', overlaps, domain, coverage, smallestOver1I, largestUnder1I, selectedOverlapI, selectedOverlap, count); + dLog('featureCountInZoom', intervalName, overlaps, domain, coverage, smallestOver1I, largestUnder1I, selectedOverlapI, selectedOverlap, count); return count; - }), + }, /** Determine how well this result covers the given domain. * via overlap size / domain size * @return 0 if there is no overlap @@ -789,6 +895,7 @@ export default Model.extend({ return coverage; }, /** Sum the counts of bins which overlap the domain + * Used for both zoomedDomain and brushedDomain. * @param domain [start,end] or if undefined then the whole count of all bins are summed. */ featureCountResultInZoom(fcs, domain) { @@ -900,7 +1007,9 @@ export default Model.extend({ let axes1d = this.get('blockService.axes1d.axis1dArray'); axis1d = axes1d.find((a1) => !a1.isDestroying && a1.viewedBlocks.find((b) => b === this)); - dLog('axis1d', axis1d, axes1d, this.id, this.get('axis.axis1d')); + if (trace_block > 1) { + dLog('axis1d', axis1d, axes1d, this.id, this.get('axis.axis1d')); + } } return axis1d; }, @@ -947,6 +1056,27 @@ export default Model.extend({ return out; }), + /** Same as axis-1d .isZoomedRightOut, except this evaluates just this block. + * Refer to the comment in axis-1d : @see isZoomedRightOut() + */ + isZoomedRightOut() { + let out = ! this.axis1d.zoomed && + ! (this.featureCount <= this.get('featuresCountsThreshold')); + dLog('isZoomedRightOut', out, this.featureCount, this.get('featuresCountsThreshold')); + return out; + }, + + /** @return true if features should be requested in response to axis brush, + * and displayed in features table as axis red circles. + */ + isBrushableFeatures : computed( + 'isZoomedOut', 'featuresCountIncludingBrush', 'featuresCountsThreshold', + function () { + let brushable = ! this.get('isZoomedOut') || + (! this.get('isHighDensity') && (this.get('featuresCountIncludingBrush') <= this.get('featuresCountsThreshold'))); + return brushable; + }), + /*--------------------------------------------------------------------------*/ /** @return current .zoomedDomain, or .limits @@ -1093,6 +1223,51 @@ export default Model.extend({ } ); return result; - } + }, + /** Add the received featuresCountsResult to .featuresCountsResults, + * either merging it with an existing result which overlaps the + * domain and has the same binSize, or otherwise append. + * @param fcResult + */ + featuresCountsResultsMergeOrAppend(fcResult) { + featuresCountsResultsTidy(fcResult); + // based on featuresCountsResultsSearch() + let + featuresCountsResults = this.get('featuresCountsResults'), + combined = featuresCountsResults + .find( + (fcr) => { + let found = + // if the domains are equal, that is considered a match. + (fcResult !== fcr) && (fcResult.binSize === fcr.binSize) && overlapInterval(fcResult.domain, fcr.domain); + /* If the received result bridges the gap between two + * existing results, then merge all three (later). + */ + if (found) { + /*if (trace_block > 1)*/ { + dLog('featuresCountsResultsSearch', fcResult.domain.toArray(), fcResult.nBins, fcResult.binSize, fcr.domain.toArray()); + } + /* Since these are counts within the same block, the + * domain direction of the results will be the same. */ + if (featuresCountsResultsCheckOverlap(fcr, fcResult)) { + /** if one of fcr or fcResult is a sub-interval then the + * result is the other value, otherwise the result is in fcr. + */ + let fcrM = featuresCountsResultsMerge(fcr, fcResult); + if (fcrM === fcResult) { // probably ignore this condition, to get update for CP dependency. + /** replace fcr with fcrM */ + featuresCountsResults.removeObject(fcr); + featuresCountsResults.pushObject(fcrM); + // to bridge a gap, use instead : featuresCountsResultsMergeOrAppend(fcrM) + } + } + } + return found; + } + ); + if (! combined) { + featuresCountsResults.pushObject(fcResult); + } + }, }); diff --git a/frontend/app/models/dataset.js b/frontend/app/models/dataset.js index 1977179e0..6b6f2ebf7 100644 --- a/frontend/app/models/dataset.js +++ b/frontend/app/models/dataset.js @@ -19,6 +19,8 @@ export default Record.extend({ 'apiServers.datasetsBlocksRefresh', '_meta.referenceHost', function () { + if (this.isDestroyed || this.isDestroying || this.isDeleted) + return undefined; let parentName = this.get('parentName'), parent; if (parentName) { @@ -88,6 +90,15 @@ export default Record.extend({ /*--------------------------------------------------------------------------*/ + /** @return shortName if defined, otherwise name + */ + shortNameOrName : computed('datasetId._meta.shortName', function () { + return this.get('_meta.shortName') || this.get('id'); + }), + + /*--------------------------------------------------------------------------*/ + + /** is this dataset copied from a (secondary) server, cached on the server it was loaded from (normally the primary). */ isCopy : computed('_meta._origin', function () { return !! this.get('_meta._origin'); @@ -99,7 +110,17 @@ export default Record.extend({ let blocks = this.get('blocks') .filter((b) => ! b.get('isCopy')); return blocks; - }) + }), + + /*--------------------------------------------------------------------------*/ + + /** @return true if this dataset has the given tag. + */ + hasTag : function (tag) { + let tags = this.get('tags'), + has = tags && tags.length && (tags.indexOf(tag) >= 0); + return has; + }, /*--------------------------------------------------------------------------*/ diff --git a/frontend/app/models/feature.js b/frontend/app/models/feature.js index b32f070aa..931116613 100644 --- a/frontend/app/models/feature.js +++ b/frontend/app/models/feature.js @@ -4,23 +4,40 @@ import Model, { attr, belongsTo, hasMany } from '@ember-data/model'; export default Model.extend({ blockId: belongsTo('block'), - name: attr('string'), + _name: attr('string'), /* currently have a mix of .range and .value in pretzel-data [develop]; * handle both for now; chrData() also handles either. */ value: attr(), range: attr(), + values: attr(), parentId: belongsTo('feature', {inverse: 'features'}), features: hasMany('feature', {inverse: 'parentId'}), /*--------------------------------------------------------------------------*/ - /** feature can have a direction, i.e. (value[0] > value[1]) + name : computed('_name', 'isAnon', function () { + let name = this.get('_name') || + (this.get('isAnon') && (this.get('blockId.name') + ':' + this.get('value.0'))); + return name; + }), + + isAnon : computed('blockId.datasetId.tags', function () { + let block = this.get('blockId.content') || this.get('blockId'), + anon = block.hasTag('AnonFeatures'); + return anon; + }), + + /*--------------------------------------------------------------------------*/ + + /** @return a positive interval equal in range to .value[] + * @desc + * feature can have a direction, i.e. (value[0] > value[1]) * For domain calculation, the ordered value is required. */ valueOrdered : computed('value', function () { let value = this.get('value'); if (value[0] > value[1]) { - let value = [value[1], value[0]]; + value = [value[1], value[0]]; } return value; }) diff --git a/frontend/app/serializers/feature.js b/frontend/app/serializers/feature.js new file mode 100644 index 000000000..77fe8fa2d --- /dev/null +++ b/frontend/app/serializers/feature.js @@ -0,0 +1,8 @@ +import ApplicationSerializer from './application'; + +export default ApplicationSerializer.extend({ + + attrs: { + _name : 'name' + } +}); diff --git a/frontend/app/services/api-servers.js b/frontend/app/services/api-servers.js index 468019bec..6d480f75b 100644 --- a/frontend/app/services/api-servers.js +++ b/frontend/app/services/api-servers.js @@ -253,7 +253,9 @@ export default Service.extend(Evented, { */ blockId2Stores : function (blockId) { let stores = this.id2Stores('block', blockId); - dLog('blockId2stores', blockId, stores); + if (trace > 1) { + dLog('blockId2stores', blockId, stores); + } return stores; }, dataset2stores : function (datasetName) { diff --git a/frontend/app/services/auth.js b/frontend/app/services/auth.js index 1fb08552e..13e21128b 100644 --- a/frontend/app/services/auth.js +++ b/frontend/app/services/auth.js @@ -245,10 +245,10 @@ export default Service.extend({ return this._ajax('Blocks/pathsByReference', 'GET', {blockA : blockA, blockB : blockB, reference, max_distance, options : options}, true); }, - getBlockFeaturesCounts(block, interval, nBins, options) { + getBlockFeaturesCounts(block, interval, nBins, isZoomed, useBucketAuto, options) { if (trace_paths) - dLog('services/auth getBlockFeaturesCounts', block, interval, nBins, options); - return this._ajax('Blocks/blockFeaturesCounts', 'GET', {block, interval, nBins, options}, true); + dLog('services/auth getBlockFeaturesCounts', block, interval, nBins, isZoomed, useBucketAuto, options); + return this._ajax('Blocks/blockFeaturesCounts', 'GET', {block, interval, nBins, isZoomed, useBucketAuto, options}, true); }, getBlockFeaturesCount(blocks, options) { @@ -277,6 +277,27 @@ export default Service.extend({ return this._ajax('Features/search', 'GET', {server : apiServer, filter : featureNames, options}, true); }, + /** Request DNA sequence search (Blast). + * @param dnaSequence string "actg..." + * @param parent datasetId of parent / reference of the blast db which is to be searched + * @param searchType 'blast' + * @param resultRows limit rows in result + * @param addDataset true means add / upload result to db as a Dataset + * @param datasetName if addDataset, this value is used to name the added dataset. + */ + dnaSequenceSearch(apiServer, dnaSequence, parent, searchType, resultRows, addDataset, datasetName, options) { + dLog('services/auth featureSearch', dnaSequence.length, parent, searchType, resultRows, addDataset, datasetName, options); + /** Attach .server to JSON string, instead of using + * requestServerAttr (.session.requestServer) + * (this can be unwound after adding apiServer as param to ._ajax(), + * dropping the new String() ). + */ + let data = {dnaSequence, parent, searchType, resultRows, addDataset, datasetName, options}, + dataS = JSON.stringify(data); // new String(); + // dataS.server = apiServer; + return this._ajax('Features/dnaSequenceSearch', 'POST', dataS, true); + }, + createDataset(name) { return this._ajax('Datasets', 'POST', JSON.stringify({name: name}), true) }, @@ -375,6 +396,12 @@ export default Service.extend({ * @param data params to the API; these guide the server determination; * e.g. if the param is block: , use the server from which blockId was loaded. * + * For POST, data is a JSON string, so data.server is not defined (except by dnaSequenceSearch); + * this is handled by the 'if (! requestServer) {' case. + * This will be simplified by adding apiServer as an (optional) + * param to _ajax(), _server(), _endpointURLToken(). + * + * @desc * The compound result includes a copy of these params, modified to suit the * server which is chosen : paths request params may be remote references, and * are converted to local if they are being sent to the server they refer to. diff --git a/frontend/app/services/controls.js b/frontend/app/services/controls.js index bded09ee8..d6722fe2e 100644 --- a/frontend/app/services/controls.js +++ b/frontend/app/services/controls.js @@ -2,6 +2,7 @@ import { alias } from '@ember/object/computed'; import { computed } from '@ember/object'; import Evented from '@ember/object/evented'; import Service from '@ember/service'; +import { inject as service } from '@ember/service'; import { stacks } from '../utils/stacks'; @@ -10,6 +11,7 @@ const dLog = console.debug; /** Registry for user controls which are global in their effect. */ export default Service.extend(Evented, { + apiServers : service(), /** this can change to a registry, e.g. 'view' for the view controls */ @@ -22,7 +24,22 @@ export default Service.extend(Evented, { dLog('controls', controls); return controls; }), - view : alias('controls.view') + view : alias('controls.view'), + /** @return the api server indicated by the tab currently selected + * by the user (serverTabSelected), or primaryServer if tab not + * changed. + * @desc + * Used for featureSearch and dnaSequenceSearch which don't have a + * block param to use to select apiServer. + */ + apiServerSelectedOrPrimary : computed('serverTabSelected', function () { + // factored from components/goto-feature-list.js:blocksUnique() . (taskGet getBlocksOfFeatures) + let + serverTabSelectedName = this.get('serverTabSelected'), + serverTabSelected = serverTabSelectedName && this.get('apiServers').lookupServerName(serverTabSelectedName), + apiServer = serverTabSelected || this.get('apiServers.primaryServer'); + return apiServer; + }) }); diff --git a/frontend/app/services/data/block.js b/frontend/app/services/data/block.js index b84889079..157492441 100644 --- a/frontend/app/services/data/block.js +++ b/frontend/app/services/data/block.js @@ -188,8 +188,9 @@ export default Service.extend(Evented, { let block = this.peekBlock(bfc._id); if (! block) { let stores = apiServers.blockId2Stores(bfc._id); - if (! stores.length) + if (! stores.length) { dLog('taskGetLimits', bfc._id); + } } else { // console.log('taskGetLimits', bfc, block); @@ -389,6 +390,12 @@ export default Service.extend(Evented, { getSummary: function (blockIds) { // console.log("block getSummary", id); let + /** true enables $bucketAuto as an alternative to $bucket using + * boundaries calculated from nBins and interval. This option + * will enable comparison of performance in operation. + */ + useBucketAuto = this.get('parsedOptions.useBucketAuto'), + /** check if feature count of block is already received. */ blocksWithoutCount = blockIds.filter((blockId) => { let block = this.peekBlock(blockId); @@ -428,8 +435,14 @@ export default Service.extend(Evented, { } let taskId = blockId + '_' + nBins + (zoomedDomainText || ''); let summaryTask = this.get('summaryTask'); - let p = summaryTask[taskId]; - if (! p) { + let p; + if ((p = summaryTask[blockId]) && (p.state() === "pending")) { + // state() : pending ~ readyState : 1 + dLog('getSummary current', blockId, p, p.readyState); + } else if ((p = summaryTask[taskId])) { + // .state() : resolved ~ .readyState : 4 ~ .statusText : OK + dLog('getSummary re-use', taskId, p, p.state(), p.readyState, p.statusText); + } else { if (zoomedDomain) { dLog('getSummary', zoomedDomainText, zoomedDomain); } @@ -443,8 +456,17 @@ export default Service.extend(Evented, { if (! zoomedDomain) { interval = intervalFromLimits(blockId); } - let getCountsForInterval = (interval) => - this.get('auth').getBlockFeaturesCounts(blockId, interval, nBins, /*options*/{}) + let getCountsForInterval = (interval) => { + let countsP; + if (interval[0] === interval[1]) { + dLog('getCountsForInterval', interval); + countsP = Promise.resolve([]); + } else { + countsP = + this.get('auth').getBlockFeaturesCounts(blockId, interval, nBins, !!zoomedDomain, useBucketAuto, /*options*/{}); + } + return countsP; + }; if (interval) { p = summaryTask[taskId] = getCountsForInterval(interval); @@ -463,6 +485,7 @@ export default Service.extend(Evented, { resolve(interval); }, 4000); }) .then(getCountsForInterval); } + summaryTask[blockId] = p; /* this could be structured as a task within models/block.js * A task would have .drop() to avoid concurrent request, but * actually want to bar any subsequent request for the same taskId, @@ -484,7 +507,7 @@ export default Service.extend(Evented, { featuresCounts[0].idWidth[0] : intervalSize(interval) / nBins, result = {binSize, nBins, domain : interval, result : featuresCounts}; - block.get('featuresCountsResults').pushObject(result); + block.featuresCountsResultsMergeOrAppend(result); block.set('featuresCounts', featuresCounts); } }); @@ -877,6 +900,20 @@ export default Service.extend(Evented, { return records; // .toArray() }), + /** @return blocks which are viewed and are configured for display + * of paths, i.e. are data blocks not reference, have + * datasetId._meta.paths === true, and datasetId.tags[] does not + * contain 'SNP' + */ + viewedForPaths: computed( + 'viewed.@each.{isData,showPaths}', + function() { + let blocks = this.get('viewed'), + filtered = blocks.filter((block) => block.get('isData') && block.get('showPaths')); + dLog('viewedForPaths', blocks.length, filtered); + return filtered; + }), + /*----------------------------------------------------------------------------*/ /** collate the blocks by the parent they refer to. diff --git a/frontend/app/services/data/flows-collate.js b/frontend/app/services/data/flows-collate.js index e21dbbbd1..36e788d1f 100644 --- a/frontend/app/services/data/flows-collate.js +++ b/frontend/app/services/data/flows-collate.js @@ -246,10 +246,11 @@ export default Service.extend({ * dragging which changes left-to-right order and stacking. * The values b0, b1 are block IDs. */ - blockAdjIds : computed('block.viewedIds.[]', 'adjAxesArr.[]', function () { - let viewedIds = this.get('block.viewedIds'); + blockAdjIds : computed('block.viewedForPaths.[]', 'adjAxesArr.[]', function () { + /** this could be used as the basis for adjAxes */ + let blockForPaths = this.get('blockForPaths'); let axesP = this.get('oa.axesP'); - dLog('blockAdjIds', viewedIds, axesP); + dLog('blockAdjIds', blockForPaths, axesP); let blockAdjIds = run(this, convert); /** Convert the hash adjAxes, e.g. adjAxes[b0] === b1, to an array of ordered pairs [b0, b1] */ diff --git a/frontend/app/services/data/paths-progressive.js b/frontend/app/services/data/paths-progressive.js index 8f988ff7b..df7386f2a 100644 --- a/frontend/app/services/data/paths-progressive.js +++ b/frontend/app/services/data/paths-progressive.js @@ -50,7 +50,7 @@ function verifyFeatureRecord(fr, f) { same = (fr.id === f._id) && direction && sameDirection && - ((frd ? frd.name : fr.get('name')) === f.name); + ((frd ? frd._name : fr.get('name')) === f.name); return same; } @@ -310,8 +310,12 @@ export default Service.extend({ blockId = blockId.get('id'); } if (trace_pathsP > 3) - dLog('pushFeature', f.blockId, c.get('blockId.features.length'), c.get('blockId.featuresLength'), f, 'featuresLength'); - storeFeature(stacks.oa, flowsService, f.name, c, blockId); + dLog('pushFeature', f.blockId, c.get('blockId.features.length'), c.get('blockId.featuresLength'), f, 'featuresLength'); + /** if feature has no ._name, i.e. datasetId.tags[] contains "AnonFeatures", + * then use e.g. "1H:" + value[0] + */ + let fName = f._name || (c.get('blockId.name') + ':' + f.value[0]); + storeFeature(stacks.oa, flowsService, fName, c, blockId); if (trace_pathsP > 2) dLog(c.get('id'), c._internalModel.__data); } @@ -660,7 +664,8 @@ export default Service.extend({ } else if (brushedDomain) paramAxis.domain = brushedDomain; - let dataBlockIds = axis.dataBlocks(true) + let dataBlockIds = axis.dataBlocks(true, false) + .filter((blockS) => blockS.block.get('isBrushableFeatures')) // equiv : blockS.block.get('id') .map(function (blockS) { return blockS.axisName; }); /** The result of passing multiple blockIds to getBlockFeaturesInterval() diff --git a/frontend/app/styles/app.scss b/frontend/app/styles/app.scss index eedc6a7c7..f7d8df068 100644 --- a/frontend/app/styles/app.scss +++ b/frontend/app/styles/app.scss @@ -1431,7 +1431,7 @@ div#left-panel > div > div.tab-content div#left-panel > div > div.tab-content > div.tab-pane { height: inherit; - overflow-y: overlay; + overflow-y: auto; /* possibly scrollbar-gutter in future. */ } #left-panel > div > div.tab-content > div.tab-pane > div { @@ -1439,6 +1439,14 @@ div#left-panel > div > div.tab-content > div.tab-pane margin-bottom: 70px; } +/*------------------------------------*/ + +/** e.g. sequence-search */ +ul.config-list { + padding-left: 0px; + margin: 1em; + margin-right: 0px; +} /*------------------------------------*/ /* in featureSearch results, wrap block names */ @@ -1462,6 +1470,14 @@ ul.view-controls > li > div > .sub-label > span { } /*------------------------------------*/ +/* used for #left-panel-upload > div > .panel + * Space is tight and the upload table has no side margin. + */ +.panel.no-side-border { + border-left: none; + border-right: none; + padding-bottom: 10px; +} div#left-panel-upload select { diff --git a/frontend/app/templates/components/axis-2d.hbs b/frontend/app/templates/components/axis-2d.hbs index ec8c6bcdd..bac27e539 100644 --- a/frontend/app/templates/components/axis-2d.hbs +++ b/frontend/app/templates/components/axis-2d.hbs @@ -34,7 +34,7 @@ {{/ember-wormhole}} {{positionRightEdgeEffect}} -
axis-2d :{{this}}, {{axisID}}, {{targetEltId}}, {{subComponents.length}} : +
axis-2d : {{axisID}}, {{targetEltId}}, {{subComponents.length}} :
subComponents : {{#draw/axis-blocks axis1d=axis1d dataBlocks=dataBlocks childWidths=childWidths trackWidth=trackWidth diff --git a/frontend/app/templates/components/draw/axis-brush.hbs b/frontend/app/templates/components/draw/axis-brush.hbs index 809f9f3e8..f09f797ff 100644 --- a/frontend/app/templates/components/draw/axis-brush.hbs +++ b/frontend/app/templates/components/draw/axis-brush.hbs @@ -11,3 +11,28 @@ + + + + + + + + + + + + + + + {{#each brushedBlocks as |brushedBlock| }} + + + + + + + + {{/each}} + +
Estimated Features in
Loaded in BrushBrushZoomBlockIdDataset
{{get featuresReceived brushedBlock.block.id}}{{brushedBlock.featureCountInBrush}}{{brushedBlock.featuresCount}}{{brushedBlock.block.id}}{{brushedBlock.block.datasetId.shortNameOrName}}
\ No newline at end of file diff --git a/frontend/app/templates/components/draw/axis-ticks-selected.hbs b/frontend/app/templates/components/draw/axis-ticks-selected.hbs index 22bf585a5..b60f25133 100644 --- a/frontend/app/templates/components/draw/axis-ticks-selected.hbs +++ b/frontend/app/templates/components/draw/axis-ticks-selected.hbs @@ -1,2 +1,2 @@ -{{axisId}} featureTicks {{ featureTicks.axis1d }} {{ axisScaleEffect }} +{{axisId}} featureTicks {{ featureTicks.axis1d }} diff --git a/frontend/app/templates/components/draw/block-view.hbs b/frontend/app/templates/components/draw/block-view.hbs index c6afb5a7a..57f6e334c 100644 --- a/frontend/app/templates/components/draw/block-view.hbs +++ b/frontend/app/templates/components/draw/block-view.hbs @@ -1,3 +1 @@ -{{log 'blockFeatures' blockFeatures.length}} -{{blockFeatures.length}} featuresCounts {{featuresCounts.length}} diff --git a/frontend/app/templates/components/elem/panel-container.hbs b/frontend/app/templates/components/elem/panel-container.hbs new file mode 100644 index 000000000..f4a0b5921 --- /dev/null +++ b/frontend/app/templates/components/elem/panel-container.hbs @@ -0,0 +1 @@ +{{yield this}} diff --git a/frontend/app/templates/components/elem/panel-heading.hbs b/frontend/app/templates/components/elem/panel-heading.hbs index d6d515f1b..32e61657d 100644 --- a/frontend/app/templates/components/elem/panel-heading.hbs +++ b/frontend/app/templates/components/elem/panel-heading.hbs @@ -1,4 +1,13 @@ {{#if icon}} {{elem/icon-base name=icon}}  {{/if}} -{{yield}} \ No newline at end of file + + + + {{x-toggle + theme='light' + value=panelContainer.showComponent + onToggle=(action panelContainer.toggleShow panelContainer) }} + + +{{yield}} diff --git a/frontend/app/templates/components/elem/panel-message.hbs b/frontend/app/templates/components/elem/panel-message.hbs index b899cf93a..c72752fcb 100644 --- a/frontend/app/templates/components/elem/panel-message.hbs +++ b/frontend/app/templates/components/elem/panel-message.hbs @@ -1,24 +1,24 @@ -{{#if errorMessage}} +{{#if @errorMessage}}
{{#elem/panel-form name="danger" heading="Error"}} - {{errorMessage}} + {{@errorMessage}} {{/elem/panel-form}} {{/if}} -{{#if successMessage}} +{{#if @successMessage}}
{{#elem/panel-form name="success" heading="Success"}} - {{successMessage}} + {{@successMessage}} {{/elem/panel-form}} {{/if}} -{{#if warningMessage}} +{{#if @warningMessage}}
{{#elem/panel-form name="warning" heading="Warning"}} - {{warningMessage}} + {{@warningMessage}} {{/elem/panel-form}} {{/if}} \ No newline at end of file diff --git a/frontend/app/templates/components/form/api-servers.hbs b/frontend/app/templates/components/form/api-servers.hbs index e194caac1..1a907770c 100644 --- a/frontend/app/templates/components/form/api-servers.hbs +++ b/frontend/app/templates/components/form/api-servers.hbs @@ -1,21 +1,14 @@ {{!-- enable display of , when apiServers.serversLength > 1 --}} {{multipleServersEffect}} -{{#elem/panel-container state="primary"}} +{{#elem/panel-container state="primary" showComponent=showList as |panelContainer|}} - {{#elem/panel-heading icon="globe"}} + {{#elem/panel-heading icon="globe" panelContainer=panelContainer}} Datasources - - {{x-toggle - theme='light' - value=showList - onToggle=(action (mut showList)) }} - - {{/elem/panel-heading}} - {{#if showList}} + {{#if panelContainer.showComponent}}
    {{#each-in servers as |apiServerName apiServer| }}
  • @@ -37,7 +30,8 @@
- {{/if}} + {{/if}} {{!-- showComponent --}} + {{/elem/panel-container}} diff --git a/frontend/app/templates/components/form/user-signup.hbs b/frontend/app/templates/components/form/user-signup.hbs index 70fedc688..d7b1a0066 100644 --- a/frontend/app/templates/components/form/user-signup.hbs +++ b/frontend/app/templates/components/form/user-signup.hbs @@ -18,7 +18,7 @@ autocomplete="new-password" placeholder='Enter a New Password'}} {{suggest-link - suggest-text="Already a member?" + suggest-text="Already have an account?" link-name="login" link-pretty="Log In"}} {{elem/button-submit loading=isProcessing}} diff --git a/frontend/app/templates/components/panel/left-panel.hbs b/frontend/app/templates/components/panel/left-panel.hbs index f85c0429b..3adfc48c1 100644 --- a/frontend/app/templates/components/panel/left-panel.hbs +++ b/frontend/app/templates/components/panel/left-panel.hbs @@ -5,6 +5,7 @@ {{elem/icon-base name="folder-open"}} Explorer {{elem/icon-base name="picture"}} View {{elem/icon-base name="search"}} Feature Search + {{elem/icon-base name="search"}} Sequence Search {{elem/icon-base name="cloud-upload"}} Upload
@@ -18,6 +19,13 @@ }} + + {{panel/sequence-search + datasets=datasets + view=view + }} + + {{panel/manage-explorer view=view @@ -30,7 +38,7 @@ selectedBlock=selectedBlock selectedDataset=selectedDataset selectDataset="selectDataset" - changeTab="changeTab" + changeTab=(action 'changeTab' tab.select ) }} @@ -39,7 +47,7 @@ displayData=displayData selectedBlock=selectedBlock view=view - changeTab="changeTab" + changeTab=(action 'changeTab' tab.select ) loadBlock="loadBlock" removeBlock="removeBlock" selectBlock="selectBlock" diff --git a/frontend/app/templates/components/panel/manage-block.hbs b/frontend/app/templates/components/panel/manage-block.hbs index de4016f7e..0b51ef7ca 100644 --- a/frontend/app/templates/components/panel/manage-block.hbs +++ b/frontend/app/templates/components/panel/manage-block.hbs @@ -7,13 +7,14 @@

{{block.features.length}}

- {{#elem/panel-container state="primary"}} - {{#elem/panel-heading icon="tags"}} + {{#elem/panel-container state="primary" as |panelContainer|}} + {{#elem/panel-heading icon="tags" panelContainer=panelContainer}} Annotations {{block.annotations.length}} {{elem/button-refresh}} {{/elem/panel-heading}} + {{#if panelContainer.showComponent}}
    {{!-- existing annotations --}} {{#if block.annotations}} @@ -47,15 +48,17 @@
+ {{/if}} {{!-- showComponent --}} {{/elem/panel-container}} - {{#elem/panel-container state="primary"}} - {{#elem/panel-heading icon="resize-vertical"}} + {{#elem/panel-container state="primary" as |panelContainer|}} + {{#elem/panel-heading icon="resize-vertical" panelContainer=panelContainer}} Intervals {{block.intervals.length}} {{elem/button-refresh}} {{/elem/panel-heading}} + {{#if panelContainer.showComponent}}
    {{#if block.intervals}} {{#each block.intervals as |entry index|}} @@ -108,6 +111,7 @@ {{/if}}
+ {{/if}} {{!-- showComponent --}} {{/elem/panel-container}} {{else}}

No Map Selected

diff --git a/frontend/app/templates/components/panel/manage-dataset.hbs b/frontend/app/templates/components/panel/manage-dataset.hbs index af9e00f4f..65611a466 100644 --- a/frontend/app/templates/components/panel/manage-dataset.hbs +++ b/frontend/app/templates/components/panel/manage-dataset.hbs @@ -4,11 +4,19 @@ - + + {{#if dataset.tags.length}} + + {{/if}}
Namespace{{dataset.namespace}}
Parent{{dataset.parent.id}}
Parent{{dataset.parentName}}
Created{{formatDate dataset.createdAt}}
Last Updated{{formatDate dataset.updatedAt}}
Public{{dataset.public}}
Read Only{{dataset.readOnly}}
tags + {{#each dataset.tags as |tag index|}} + {{#if index}}{{/if}} + {{tag}} + {{/each}} +
diff --git a/frontend/app/templates/components/panel/manage-explorer.hbs b/frontend/app/templates/components/panel/manage-explorer.hbs index 9ca25b612..83c88a735 100644 --- a/frontend/app/templates/components/panel/manage-explorer.hbs +++ b/frontend/app/templates/components/panel/manage-explorer.hbs @@ -1,13 +1,15 @@ {{form/api-servers addNewDatasource=(action 'addNewDatasource')}} -{{#elem/panel-container state="primary"}} - {{#elem/panel-heading icon="folder-open"}} +{{#elem/panel-container state="primary" as |panelContainer|}} + {{#elem/panel-heading icon="folder-open" panelContainer=panelContainer}} Datasets {{elem/button-refresh onClick=(action 'refreshAvailable')}} {{/elem/panel-heading}} + {{#if panelContainer.showComponent}} +
{{!-- there is no suitable autocomplete, so may change to =off --}} @@ -198,6 +200,8 @@
{{!-- end of tabs and tree --}} + {{/if}} {{!-- showComponent --}} + {{/elem/panel-container}} {{panel/select-parent block=blockWithoutParentOnPrimary loadBlock="loadBlock"}} diff --git a/frontend/app/templates/components/panel/manage-search.hbs b/frontend/app/templates/components/panel/manage-search.hbs index 21fce754b..485d64e38 100644 --- a/frontend/app/templates/components/panel/manage-search.hbs +++ b/frontend/app/templates/components/panel/manage-search.hbs @@ -1,21 +1,30 @@ -{{#elem/panel-container state="primary"}} - {{#elem/panel-heading icon="search"}} +{{#elem/panel-container state="primary" as |panelContainer|}} + {{#elem/panel-heading icon="search" panelContainer=panelContainer}} Feature Search {{/elem/panel-heading}} + + {{#if panelContainer.showComponent}} + {{goto-feature-list class="panel panel-primary" selectedFeatures=selectedFeatures updateFeaturesInBlocks="updateFeaturesInBlocks" loadBlock=(action loadBlock) }} + + {{/if}} {{!-- showComponent --}} {{/elem/panel-container}} -{{#elem/panel-container state="primary"}} - {{#elem/panel-heading icon="plane"}} +{{#elem/panel-container state="primary" as |panelContainer|}} + {{#elem/panel-heading icon="plane" panelContainer=panelContainer}} External Lookup {{/elem/panel-heading}} + {{#if panelContainer.showComponent}} + {{goto-feature drawActions=this class="panel panel-primary" selectedFeatures=selectedFeatures selectedBlock=selectedBlock }} + + {{/if}} {{!-- showComponent --}} {{/elem/panel-container}} diff --git a/frontend/app/templates/components/panel/manage-view.hbs b/frontend/app/templates/components/panel/manage-view.hbs index c0f3ed8a0..731d8c259 100644 --- a/frontend/app/templates/components/panel/manage-view.hbs +++ b/frontend/app/templates/components/panel/manage-view.hbs @@ -1,5 +1,5 @@ -{{#elem/panel-container state="primary"}} - {{#elem/panel-heading icon="globe"}} +{{#elem/panel-container state="primary" as |panelContainer|}} + {{#elem/panel-heading icon="globe" panelContainer=panelContainer}} Displayed Data {{#if hasDisplayData}} @@ -11,6 +11,8 @@ {{/if}} {{/elem/panel-heading}} + {{#if panelContainer.showComponent}} +
    {{#each displayData as |block|}}
  • {{/each}}
+ + {{/if}} {{!-- showComponent --}} {{/elem/panel-container}} {{#if isMapview}} -{{#elem/panel-container state="primary"}} - {{#elem/panel-heading icon="wrench"}} +{{#elem/panel-container state="primary" showComponent=showChartOptions as |panelContainer|}} + {{#elem/panel-heading icon="wrench" panelContainer=panelContainer}} Chart Options - - {{x-toggle - theme='light' - value=showChartOptions - onToggle=(action (mut showChartOptions)) }} - - {{/elem/panel-heading}} - {{#if showChartOptions}} + {{#if panelContainer.showComponent}} + {{!-- .chart-options is enabled by options=chartOptions -> body.chartOptions .chart-options CSS --}}
  • @@ -87,8 +85,8 @@ {{draw/flow-controls viewOptions=(hash isShowUnique pathColourScale showScaffoldMarkers showAsymmetricAliases) modelParamOptions=model.params.options }}
- {{/if}} + {{/if}} {{!-- showComponent --}} {{/elem/panel-container}} diff --git a/frontend/app/templates/components/panel/paths-table.hbs b/frontend/app/templates/components/panel/paths-table.hbs index 9ee549cb7..09aeef355 100644 --- a/frontend/app/templates/components/panel/paths-table.hbs +++ b/frontend/app/templates/components/panel/paths-table.hbs @@ -1,9 +1,11 @@ {{#if visible}} -{{#elem/panel-container state="primary"}} - {{#elem/panel-heading icon="filter"}} +{{#elem/panel-container state="primary" as |panelContainer|}} + {{#elem/panel-heading icon="filter" panelContainer=panelContainer}} Actions {{/elem/panel-heading}} + {{#if panelContainer.showComponent}} +
@@ -17,6 +19,8 @@
+ + {{/if}} {{!-- showComponent --}} {{/elem/panel-container}}
diff --git a/frontend/app/templates/components/panel/sequence-search.hbs b/frontend/app/templates/components/panel/sequence-search.hbs new file mode 100644 index 000000000..9bc8e5223 --- /dev/null +++ b/frontend/app/templates/components/panel/sequence-search.hbs @@ -0,0 +1,151 @@ + +{{#elem/panel-container state="primary" as |panelContainer|}} + {{!-- https://fontawesome.com/icons/dna Unicode f471 --}} + {{#elem/panel-heading icon="search" panelContainer=panelContainer}} + DNA Sequence Blast Search + {{/elem/panel-heading}} + + {{#if panelContainer.showComponent}} + +
+ + + + {{elem/icon-base name="edit"}} Sequence Input + + {{elem/icon-base name="arrow-right"}} Blast Output + + +
+ + + +
DNA Sequence Input : + FASTA +
+ + {{textarea + class="form-control" + maxLength=15 + input=(action 'inputIsActive') + enter=(action 'dnaSequenceInput') + insert-newline=(action 'dnaSequenceInput') + escape-press=(action 'dnaSequenceInput') + paste=(action 'paste') + placeholder="e.g. >BobWhite_c10015_641 +AGCTGGGTGTCGTTGATCTTCAGGTCCTTCTGGATGTACAGCGACGCTCC" }} + +
+ {{#elem/button-base + click=(action "search") + classNames=refreshClassNames + disabled=searchButtonDisabled + icon='refresh' + classColour="primary" + }} +   Search + {{/elem/button-base}} +
+ + +
    + +
  • + + +
  • + + + +
  • +
    + + Rows : {{ resultRows }} +
    +
    + +
    +
  • + +
  • + {{input type="checkbox" name="addDataset" checked=addDataset }} + + + {{!-- copied from data-csv.hbs --}} + {{#if this.addDataset}} + + {{input type="checkbox" name="replaceDataset" checked=replaceDataset }} + + + +
    + {{input + id="dataset_new" + type="text" + value=newDatasetName + class="form-control" + placeholder="New dataset name..." + disabled=isProcessing + }} +
    + + {{/if}} +
  • + +
+ + + +
+ + + + {{panel/upload/blast-results data=data + active=(bs-eq tab.activeId "sequence-search-output") }} + + + +
+
+ + {{elem/panel-message + successMessage=successMessage + warningMessage=warningMessage + errorMessage=errorMessage}} + {{#if nameWarning}} + {{elem/panel-message + warningMessage=nameWarning}} + {{/if}} + + {{#if isProcessing}} + {{#elem/panel-form + name="info" + heading="Processing..." }} + {{progressMsg}} + {{/elem/panel-form}} + {{/if}} + +
+ + + {{/if}} {{!-- showComponent --}} +{{/elem/panel-container}} + + + diff --git a/frontend/app/templates/components/panel/upload-data.hbs b/frontend/app/templates/components/panel/upload-data.hbs index c4618812a..a117f3972 100644 --- a/frontend/app/templates/components/panel/upload-data.hbs +++ b/frontend/app/templates/components/panel/upload-data.hbs @@ -1,7 +1,9 @@ -{{#elem/panel-container state="primary"}} - {{#elem/panel-heading icon="edit"}} +{{#elem/panel-container state="primary" class="no-side-border" as |panelContainer|}} + {{#elem/panel-heading icon="edit" panelContainer=panelContainer}} Data Specification {{/elem/panel-heading}} + {{#if panelContainer.showComponent}} +
{{#each-in filterOptions as |key value|}} @@ -17,7 +19,7 @@ {{/each-in}}
-{{/elem/panel-container}} + {{#if serverTabSelected }}
Destination Server: {{ serverTabSelected }}
@@ -47,7 +49,11 @@ {{/elem/panel-form}} {{/if}} -{{#if (compare filter '===' 'cell')}} +{{#if (compare filter '===' 'fileDrop')}} + {{panel/upload/file-drop-zone + refreshDatasets=refreshDatasets + }} +{{else if (compare filter '===' 'cell')}} {{panel/upload/data-csv datasets=datasets refreshDatasets=refreshDatasets @@ -58,3 +64,6 @@ refreshDatasets=refreshDatasets }} {{/if}} + + {{/if}} {{!-- showComponent --}} +{{/elem/panel-container}} diff --git a/frontend/app/templates/components/panel/upload/blast-results.hbs b/frontend/app/templates/components/panel/upload/blast-results.hbs new file mode 100644 index 000000000..cd6f39a85 --- /dev/null +++ b/frontend/app/templates/components/panel/upload/blast-results.hbs @@ -0,0 +1,16 @@ +{{dataMatrixEffect}} +{{activeEffect}} + + +{{!-- --------------------------------------------------------------------- --}} + +{{#if isProcessing}} + {{#elem/panel-form + name="info" + heading="Processing..." }} + {{progressMsg}} + {{/elem/panel-form}} +{{/if}} + +
+
diff --git a/frontend/app/templates/components/panel/upload/data-json.hbs b/frontend/app/templates/components/panel/upload/data-json.hbs index 1ded9a138..66e56a389 100644 --- a/frontend/app/templates/components/panel/upload/data-json.hbs +++ b/frontend/app/templates/components/panel/upload/data-json.hbs @@ -11,11 +11,11 @@ {{/elem/panel-form}} {{/if}} -
Upload Datasets (JSON / gzip)
+
Upload Datasets (JSON / gzip, xlsx, ods)

+ +
  • + + {{input type="checkbox" name="replaceDataset" checked=replaceDataset }} + + +
  • + + + + + +{{!-- + + {{#if dropzone.active}} + {{#if dropzone.valid}} + Drop to upload + {{else}} + Invalid + {{/if}} + {{else if queue.files.length}} + Uploading {{queue.files.length}} files. ({{queue.progress}}%) + {{else}} +

    Upload Spreadsheets

    +

    + {{#if dropzone.supported}} + Drag and drop Spreadsheets and JSON files onto this area to upload them or + {{/if}} + + Add a Spreadsheet. + +

    + {{/if}} + + +
    +{{elem/panel-message + successMessage=successMessage + warningMessage=warningMessage + errorMessage=errorMessage}} + +{{#if isProcessing}} + {{#elem/panel-form + name="info" + heading="Processing..." }} + {{progressMsg}} + {{/elem/panel-form}} +{{/if}} diff --git a/frontend/app/templates/components/panel/view-controls.hbs b/frontend/app/templates/components/panel/view-controls.hbs index 5409073c6..c0ab48f1d 100644 --- a/frontend/app/templates/components/panel/view-controls.hbs +++ b/frontend/app/templates/components/panel/view-controls.hbs @@ -1,7 +1,8 @@ -{{#elem/panel-container state="primary"}} - {{#elem/panel-heading icon="option-vertical"}} +{{#elem/panel-container state="primary" as |panelContainer|}} + {{#elem/panel-heading icon="option-vertical" panelContainer=panelContainer}} Selected Axis Options {{/elem/panel-heading}} + {{#if panelContainer.showComponent}}
    • @@ -238,4 +239,5 @@
    + {{/if}} {{!-- showComponent --}} {{/elem/panel-container}} diff --git a/frontend/app/templates/components/suggest-link.hbs b/frontend/app/templates/components/suggest-link.hbs index c0d6ae897..a9d2dc0fc 100644 --- a/frontend/app/templates/components/suggest-link.hbs +++ b/frontend/app/templates/components/suggest-link.hbs @@ -1,6 +1,6 @@
    - {{suggest-text}} - {{#link-to link-name classNames='' tagName='a'}} - {{link-pretty}} + {{@suggest-text}} + {{#link-to @link-name classNames='' tagName='a'}} + {{@link-pretty}} {{/link-to}}
    diff --git a/frontend/app/templates/index.hbs b/frontend/app/templates/index.hbs index fe88220be..bd8abfcb5 100644 --- a/frontend/app/templates/index.hbs +++ b/frontend/app/templates/index.hbs @@ -12,13 +12,14 @@

    Pretzel

    An Express/Ember/D3 framework to display and interactively navigate complex datasets.

    -

    Developed by - - AgriBio, Department of Economic Development, Jobs, Transport and Resources (DEDJTR), Victoria, - Australia; - - CSIRO, Canberra, Australia. - Funded by the Grains Research Development Corporation (GRDC). +

    +Currently (2020-) funded and developed by Agriculture Victoria, Department of Jobs, Precincts and Regions (DJPR), Victoria, Australia.

    +

    +Previously (2016-2020) funded by the Grains Research Development Corporation (GRDC) and co-developed by Agriculture Victoria and CSIRO, Canberra, Australia. +

    +
    diff --git a/frontend/app/utils/draw/featuresCountsResults.js b/frontend/app/utils/draw/featuresCountsResults.js new file mode 100644 index 000000000..655c44922 --- /dev/null +++ b/frontend/app/utils/draw/featuresCountsResults.js @@ -0,0 +1,470 @@ +import { isEqual } from 'lodash/lang'; +import groupBy from 'lodash/groupBy'; + +import createIntervalTree from 'interval-tree-1d'; + +import { + intervalOverlap, + intervalOrdered, + intervalJoin, + intervalSubtract2, + intervalsAbut, +} from '../interval-calcs'; +import { inInterval } from './interval-overlap'; +import { inRange, subInterval, overlapInterval, intervalSign } from './zoomPanCalcs'; +import { featureCountDataProperties } from '../data-types'; + +const dLog = console.debug; + + +/** Check that the bins which are in the overlap of the 2 given FCRs match. + * + * This is used in featuresCountsResultsMergeOrAppend() before + * discarding the overlap section from one of the FCRs using + * featuresCountsResultsMerge(). + * @param fcr1, fcr2 featuresCountsResults + * @return true if the bins in the overlap match between the 2 FCRs + */ +function featuresCountsResultsCheckOverlap(fcr1, fcr2) { + let o = intervalOverlap([fcr1.domain, fcr2.domain]), + fcr1O = featuresCountsResultsFilter(fcr1, o), + fcr2O = featuresCountsResultsFilter(fcr2, o), + same = isEqual(fcr1O, fcr2O); + if (! same) { + dLog('featuresCountsResultsCheckOverlap', same, fcr1, fcr2, o, fcr1O, fcr2O); + } + return same; +} + +/** The two given featuresCountsResults overlap; merge them. + * If one contains the other, then discard the sub-interval, + * otherwise ap/pre -pend to fcr1 the part of fcr2 which is outside of fcr1. + * @return the larger or combined featuresCountsResult + */ +function featuresCountsResultsMerge(fcr1, fcr2) { + let fcr; + if (subInterval(fcr1.domain, fcr2.domain)) { + fcr = fcr2; + } else if (subInterval(fcr2.domain, fcr1.domain)) { + fcr = fcr1; + } else { + let + addInterval = intervalJoin('subtract', fcr2.domain, fcr1.domain), + add = featuresCountsResultsFilter(fcr2, addInterval); + fcr = fcr1; + fcr.result = featuresCountsResultsConcat(fcr.result, add.result); + // this doesn't count the empty bins in fcr2 / add + fcr.nBins += add.result.length; + fcr.domain = intervalJoin('union', fcr1.domain, fcr2.domain); + } + dLog('featuresCountsResultsMerge', fcr, fcr1, fcr2); + return fcr; +} +/** concat() two featuresCountsResult .result[] arrays, preserving ._id order. + */ +function featuresCountsResultsConcat(r1, r2) { + let r; + if (r1[r1.length-1]._id < r2[0]._id) { + r = r1.concat(r2); + } else if (r2[r2.length-1]._id < r1[0]._id) { + r = r2.concat(r1); + } else { + // ignore order - just concat. + dLog('featuresCountsResultsConcat', r1[0], r1[r1.length-1], r2[0], r2[r2.length-1], r1, r2); + r = r1.concat(r2); + } + return r; +} + + +/** Copy a featuresCountsResult, within the given domain. + * @return a copy of fcResult, with results outside of domain filtered out. + */ +function featuresCountsResultsFilter(fcResult, domain) { + let {...out} = fcResult; + resultFilter(out, domain); + out.domain = domain; + dLog('featuresCountsResultsFilter', out, fcResult, domain); + return out; +} +function resultFilter(out, domain) { + /* if needed could also support featureCountAutoDataProperties */ + let datum2Location = featureCountDataProperties.datum2Location; + out.result = out.result.filter( + (fc) => binInRange(datum2Location(fc), domain)); + out.nBins = out.result.length; +} +/** Similar to intervalOverlap(). + * Regard a bin interval as [closed, open) + */ +function binInRange(binInt, domain) { + // related : intervalOverlap([]) ( open) + // overlapInterval() allows === (closed) + // inRange() (closed) + + let + i0 = intervalOrdered(binInt), + i1 = intervalOrdered(domain); + + let within = + (i1[0] <= i0[0]) && (i0[1] <= i1[1]); + + return within; +} + + + +/** Truncate excess decimal places in fcResult.result[*]._id + * If result[].idWidth < 1 then ._id often has alias error + * e.g. {_id: 49.20000000000024, count: 1, idWidth: [0.2]} + * + * This impacts on comparison isEqual() done by + * featuresCountsResultsCheckOverlap(), which is purely for + * development verification, and otherwise doesn't matter. + * + * @param fcResult fcResult.result[*]._id is mutated in situ. + */ +function featuresCountsResultsTidy(fcResult) { + let result = fcResult.result; + if (result[result.length-1] === undefined) { + result.pop(); + } + + result.forEach((r) => { + // this assumes featureCountDataProperties, not featureCountAutoDataProperties. + if (r.idWidth < 1) { r._id = Math.round(r._id / r.idWidth) * r.idWidth; } + }); +} +/*----------------------------------------------------------------------------*/ + +/** The given featuresCountsResults selectedResults have been selected + * by their coverage of a given interval (e.g. zoomedDomain), and by + * their binSize being suited for display at the current scale. + * Adjacent or overlapping results with the same binSize have been + * merged using featuresCountsResultsMerge(), so for a given binSize, + * results in selectedResults do not overlap. + * For different binSizes, they are likely to overlap and may have + * gaps in covering the domain. + * + * This function selects sections of these results; the return + * featuresCountsResult contains results whose bins join exactly with + * no overlap, and no gap if none was present in the input. + * Results with smaller binSize (higher resolution) are preferred. + * + +Rough design notes + * (from early Mar11) + +. starting from result of featuresCountsInZoom() +. group into layers by binSize +. start with the layer with smallest binSize (only those large enough to display are chosen by featuresCountsInZoom()) + . accept all of these; set .join = .domain; add them to interval tree +. for each subsequent layer : + . subtract all previous (smaller) layers from results, this defines .join at an end where subtraction limits the result + . for each result in layer : for each end : search for overlapping results in interval tree + . this may split results into multiple pieces; add a function in featuresCountsResults.js, using + added operation 'subtract2' to intervalJoin( ), for this specific internal use, not public api. + . for edges which are not cut, set .join = .domain + . at the subtraction edge : set .join to the cut point, calculate .rounded : + . on the result being added (larger binSize) : round outwards by .binSize + . on the result already accepted (smaller binSize) : round inwards by .binSize of the result being added. +. after the above : all results have .join set at both ends, and possibly .rounded + . where .rounded is not set, set it to .join +. all results have .rounded and are non-overlapping +. slice each result : removing bins at each end which are outside .rounded + + + * @param selectedResults array of featuresCountsResults, which have the form e.g. + * {binSize: 200000, nBins: 100, domain: Array(2), result: Array(90)} + * .result is an array of feature counts : e.g. {_id: 8500000, count: 131, idWidth: Array(1)} + * .idWidth[0] is binSize. + * + * This assumes the result type is featureCountDataProperties, not featureCountAutoDataProperties. + * It would be easy to add an _id lookup function to featureCount{,Auto}DataProperties, + * but bucketauto would not suit the current requirements, and using defined boundaries does. + * + * @param preferredBinSize the binSize the user has configured as + * preferred, calculated from axis size in pixels and zoomedDomain and + * featuresCountsNBins; see lengthRounded in @see selectFeaturesCountsResults() + */ + +function featuresCountsResultsSansOverlap (selectedResults, preferredBinSize) { + if (! selectedResults || ! selectedResults.length) + return selectedResults; + + /** group into layers by binSize */ + let binSize2fcrs = groupBy(selectedResults, 'binSize'); + + let + /** createIntervalTree() handles just the interval, so map from that to the FCR */ + domain2Fcr = new WeakMap(); + // map .domain before assigning in domain2Fcr . + selectedResults.forEach((fcr) => { + let direction = intervalSign(fcr.domain); + /** round outwards by binSize. if i===0 and direction then up is false */ + fcr.domain = fcr.domain.map((d, i) => roundToBinSize(d, fcr.binSize, /*up*/ ((i===0) ^ direction ))); + }); + selectedResults.forEach((fcr) => domain2Fcr.set(fcr.domain, fcr)); + + /** .join[] and .rounded[] are parallel to .domain[], i.e. [start, end]. + * When end `i` is cut, .join[i] is set, and .rounded[i] is + * calculated from that by rounding by the binSize of the shadowing + * fcr. + * Dropping .join because it is not needed, and it introduces + * the complication of using .join[i] || .domain[i] + * Dropping .rounded - use .domain instead + */ + // selectedResults.forEach((fcr) => { fcr.rounded = []; /*fcr.join = [];*/ }); + + + + /** start with the layer with binSize closest to preferredBinSize (only those large + * enough to display are chosen by selectFeaturesCountsResults()) + * accept all of these; set .join = .domain; add them to interval tree + */ + let + /** sorted in order of closeness to preferredBinSize (lengthRounded). + * similar calc in selectFeaturesCountsResults(). */ + closeToPreferred = function(binSize) { return Math.abs(Math.log2(binSize / preferredBinSize)); }, + binSizes = Object.keys(binSize2fcrs).sort((a,b) => closeToPreferred(a) - closeToPreferred(b)), + firstBinSize = binSizes.shift(), + firstLayer = binSize2fcrs[firstBinSize], + intervalTree = createIntervalTree(firstLayer.mapBy('domain')); + /** can't intervalTree.remove during queryInterval(), so collate for .remove after query. */ + let intervalTreeChanges = []; + + // firstLayer.forEach((fcr) => fcr.join = fcr.domain); + /** a subset of selectedResults, containing those which are not entirely shadowed and hence not used. */ + let selectedUsed = firstLayer.slice(); + /** fcr-s created by subtracting a sub-interval */ + let addedFcr = []; + + function setDomain(fcr, domain, inTree) { + if (inTree) { + intervalTree.remove(fcr.domain); + } + fcr.domain = domain; + domain2Fcr.set(fcr.domain, fcr); + if (inTree) { + intervalTree.insert(fcr.domain); + } + } + +/* +. for each subsequent layer : + . subtract all previous (smaller) layers from results, this defines .join at an end where subtraction limits the result + . for each result in layer : for each end : search for overlapping results in interval tree + . this may split results into multiple pieces; add a function in featuresCountsResults.js, using + added operation 'subtract2' to intervalJoin( ), for this specific internal use, not public api. + */ + binSizes.forEach((binSize) => { + let fcrs = binSize2fcrs[binSize]; + fcrs.forEach((fcr) => subtractAccepted(fcr) && selectedUsed.push(fcr)); + }); + + /** @return true if fcr is not completely shadowed by a previously-accepted result. + */ + function subtractAccepted(fcr) { + let used = true; + let addedFcrLocal = []; + let [lo, hi] = fcr.domain; + intervalTree.queryInterval(lo, hi, function(interval) { + let fcrI = domain2Fcr.get(interval); + let abut = intervalsAbut(interval, fcr.domain, false); + if (fcrI.binSize === fcr.binSize) { + // ignore - no overlap, and no rounding required. + } else + /* fcr.domain may be cut by multiple matching intervals. + */ + if (subInterval(fcr.domain, interval)) { + // fcr is already covered by interval + used = false; + } else if (subInterval(interval, fcr.domain) && + ! abut) { + let + outer = intervalSubtract2(fcr.domain, interval); + setDomain(fcr, outer[0], false); + let {...fcr2} = fcr; + fcr2.domain = outer[1]; + // copy because it will have different values to fcr. + // fcr2.rounded = fcr2.rounded.slice(); + // copy because it may be used to lookup domain2Fcr. + fcr2.domain = fcr2.domain.slice(); + domain2Fcr.set(fcr2.domain, fcr2); + addedFcrLocal.push(fcr2); + addedFcr.push(fcr2); + cutEdge(fcr, interval, 1); + cutEdge(fcr2, interval, 0); + } else + /* fcr.domain may have reduced since start of .queryInterval() so re-check if overlap. */ + if (!!intervalOverlap([fcr.domain, interval]) ) { + /** this case includes (subInterval && abut). */ + /** interval overlaps fcr.domain, or they + * abut, so subtract produces just 1 interval. */ + fcr.domain = intervalJoin('subtract', fcr.domain, interval); + domain2Fcr.set(fcr.domain, fcr); + + /** edge of fcr cut by interval is fcr.domain[ci] */ + let ci = fcr.domain.findIndex((d) => inRange(d, interval)); + cutEdge(fcr, interval, ci); + } + }); + + let fromTo; + while ((fromTo = intervalTreeChanges.shift())) { let [from, to] = fromTo; intervalTree.remove(from); intervalTree.insert(to); }; + + /* for edges which are not cut, set .join = .domain + fcr.domain.forEach((d, i) => { + if (fcr.join[i] === undefined) { fcr.join[i] = d; }}); + */ + if (used) { + intervalTree.insert(fcr.domain); + } + addedFcrLocal.forEach((fcr) => subtractAccepted(fcr)); + return used; + } + + + /** fcr (i1) is cut by i2 at i2[+!edge]. + * Round the edge. + * + * @param fcr not yet accepted (not in intervalTree) + * + * @desc + * For featuresCountsResults, direction is true (positive) because + * it is determined by the block domain, which is positive; some of + * this code handles direction variation, but there seems no point + * in making that complete. + */ + function cutEdge(fcr, i2, edge) { + /* + . at the subtraction edge : set .join to the cut point, calculate .rounded : + . on the result being added (larger binSize) : round outwards by .binSize + . on the result already accepted (smaller binSize) : round inwards by .binSize of the result being added. +*/ + let + /** i2 is from intervalTree. */ + fcr2 = domain2Fcr.get(i2), + /** in the original design the binSize2fcrs[smallestBinSize] was + * accepted first, so here fcr.binSize was always the larger. */ + binSize = Math.max(fcr2.binSize, fcr.binSize); + /*if ((fcr.rounded[+!edge] !== undefined) || (fcr2.rounded[edge] !== undefined)) { + dLog('cutEdge', fcr, i2, edge); + } else*/ { + // fcr.domain[edge] has been limited at i2[+!edge] + featuresCountsResultsRound(fcr, edge, true, binSize); + featuresCountsResultsRound(fcr2, +!edge, false, binSize); + // fcr2 is already in tree, so if .domain changed, update tree. + if (i2[+!edge] !== fcr2.domain[+!edge]) { + intervalTreeChanges.push([i2, fcr2.domain]); + } + } + } + + /* + . after the above : all results have .join set at both ends, and possibly .rounded + . where .rounded is not set, set it to .join + . all results have .rounded and are non-overlapping + */ + let withAdded = selectedUsed.concat(addedFcr); + if (false) + withAdded.forEach((fcr) => { + fcr.domain.forEach((r, i) => (fcr.rounded[i] ||= fcr.domain[i])); + }); + + /* + . slice each result : removing bins at each end which are outside .rounded + */ + withAdded.forEach((fcr) => { + resultFilter(fcr, fcr.domain/*rounded*/); + }); + + /* Result is single-layer - no overlapping featuresCountsResults. */ + let single = withAdded; + + dLog('featuresCountsResultsSansOverlap', single, selectedUsed, addedFcr, selectedResults, firstBinSize, binSizes); + return single; +} + + +/** Round one edge of fcr (fcr.domain[edge]) by binSize. + */ +function featuresCountsResultsRound(fcr, edge, outwards, binSize) { + const fnName = 'featuresCountsResultsRound'; + /** + fcr |<-- binSize -->| + ... ------|---------------|-----------| + |---|---|---|---|---|---|- ... + shadowing fcr (already accepted; smaller binSize) + + edge outwards direction up + 0 true true 0 + 0 true false 1 + + 0 false true 1 + 0 false false 0 + + 1 true true 1 + 1 true false 0 + + 1 false true 0 + 1 false false 1 + + Check the above truth table with : + [0,1].forEach((edge) => [true, false].forEach( + (outwards) => [true, false].forEach( + (direction) => console.log(edge, outwards, direction, (edge === 1) ^ !direction ^ !outwards)))); + + */ + + + + { + // if edge is 1 and direction is positive and outwards then round up + let + edgeLocn = fcr.domain[edge], + direction = intervalSign(fcr.domain), + up = (edge === 1) ^ !direction ^ !outwards, + r = roundToBinSize(edgeLocn, binSize, up); + if (true) { + // doesn't affect domain2Fcr. + fcr.domain[edge] = r; + } else { + /** The fcr to be added can be shadowed by multiple accepted fcrs, + * which should reduce its size. i.e. if .rounded[edge] is already + * defined, then it should be further from .domain[+!edge] than r. + */ + if ((fcr.rounded[edge] !== undefined) && (r !== fcr.rounded[edge]) + && (intervalSign([fcr.domain[+!edge], r]) !== intervalSign([r, fcr.rounded[edge]]))) { + dLog(fnName, r, fcr, edge, outwards, binSize, edgeLocn, direction, up, fcr.rounded, fcr.domain); + } else if (Math.abs(fcr.rounded[edge] - fcr.domain[edge]) > binSize) { + dLog(fnName, r, fcr, edge, outwards, binSize, edgeLocn, direction, up, fcr.rounded, fcr.domain); + } else { + fcr.rounded[edge] = r; + } + } + } +} + +function roundToBinSize(edgeLocn, binSize, up) { + let r = Math.trunc(edgeLocn / binSize + (up ? 1 : 0)) * binSize; + return r; +} + +/*----------------------------------------------------------------------------*/ + +/** trace an array of FCR-s. formatted for pasting into web inspector console. + */ +const +fcrsShow = function (fcrs) { fcrs.forEach((fcr) => console.log('featuresCountsResults show', fcr, fcr.domain, fcr.rounded, fcr.result[0], fcr.result[fcr.result.length-1])); } +; + +/*----------------------------------------------------------------------------*/ + +export { + featuresCountsResultsCheckOverlap, + featuresCountsResultsMerge, + featuresCountsResultsFilter, + featuresCountsResultsTidy, + featuresCountsResultsSansOverlap, +}; diff --git a/frontend/app/utils/draw/interval-overlap.js b/frontend/app/utils/draw/interval-overlap.js index 620f36812..c8edcce89 100644 --- a/frontend/app/utils/draw/interval-overlap.js +++ b/frontend/app/utils/draw/interval-overlap.js @@ -21,6 +21,7 @@ var trace_filter = 1; * The result is analogous to the comparator function (cmp) result. * Assume i[0] < i[1]. * @return 0 if v is in i, -1 if v < i, +1 if v > i + * The argument order is opposite to the similar function @see inRange() */ function inInterval(i, v) { let diff --git a/frontend/app/utils/draw/zoomPanCalcs.js b/frontend/app/utils/draw/zoomPanCalcs.js index 27c6f23dd..e88592f3e 100644 --- a/frontend/app/utils/draw/zoomPanCalcs.js +++ b/frontend/app/utils/draw/zoomPanCalcs.js @@ -20,6 +20,9 @@ const dLog = console.debug; /* copied from draw-map.js; this has already been split out of draw-map.js into * utils/graph-maths.js in an unpushed branch (8fccbd3). * Added : this version handles range[] being in -ve order, i.e. range[0] > range[1]. + * @param a point value + * @param range interval [start, end] + * The argument order is opposite to the similar function @see inInterval() */ function inRange(a, range) { @@ -138,6 +141,9 @@ function intervalSign(interval) { * @param inFilter true when called from zoomFilter() (d3.zoom().filter()), * false when called from zoom() (d3.zoom().on('zoom')); this indicates * variation of the event information structure. + * @return inFilter ? include : newDomain + * include is a flag for which true means don't filter out this event. + * newDomain is the new domain resulting from the zoom change. */ function wheelNewDomain(axis, axisApi, inFilter) { let yp = axis.y; @@ -146,7 +152,7 @@ function wheelNewDomain(axis, axisApi, inFilter) { * wheel, but if this can happen if there is an error in requesting block * features. */ - if (! yp) return; + if (! yp) return inFilter ? false : undefined; /** Access these fields from the DOM event : .shiftKey, .deltaY, .currentTarget. * When called from zoom(), d3.event is the d3 wrapper around the event, and * the DOM event is referenced by .sourceEvent, whereas in zoomFilter() @@ -247,7 +253,12 @@ function wheelNewDomain(axis, axisApi, inFilter) { console.log('mousePosition', mousePosition); let range = yp.range(), - rangeYCentre = mousePosition[1], + rangeYCentre = mousePosition[1]; + if (rangeYCentre === undefined) { + dLog('mousePosition has no [1]', mousePosition); + return false; + } + let /** This is the centre of zoom, i.e. the mouse position, not the centre of the axis. */ centre = axisApi.axisRange2Domain(axis.axisName, rangeYCentre), @@ -315,4 +326,8 @@ function wheelNewDomain(axis, axisApi, inFilter) { /*----------------------------------------------------------------------------*/ -export { inRangeEither, subInterval, overlapInterval, wheelNewDomain }; +export { + inRange, inRangeEither, subInterval, overlapInterval, + intervalSign, + wheelNewDomain +}; diff --git a/frontend/app/utils/ember-devel.js b/frontend/app/utils/ember-devel.js index 5c058613b..8433009fb 100644 --- a/frontend/app/utils/ember-devel.js +++ b/frontend/app/utils/ember-devel.js @@ -36,4 +36,13 @@ function getAttrOrCP(object, attrName) { /*----------------------------------------------------------------------------*/ -export { parentOfType, elt0, getAttrOrCP }; +/** Display Ember Data store Object field values. for devel debug - this is not a public API. + * Before Ember V3 this was '_internalModel.__data' + */ +const _internalModel_data = '_internalModel._recordData.__data'; + + + +/*----------------------------------------------------------------------------*/ + +export { parentOfType, elt0, getAttrOrCP, _internalModel_data }; diff --git a/frontend/app/utils/hover.js b/frontend/app/utils/hover.js index 874cfd823..18f3f8d3e 100644 --- a/frontend/app/utils/hover.js +++ b/frontend/app/utils/hover.js @@ -69,6 +69,7 @@ function showHover(context, textFn, d, i, g) { delay: {show: 200, hide: 3000}, container: 'div#holder', placement : hoverNearElement ? "auto right" : "left", + // comment re. title versus content in @see draw-map.js: configureHorizTickHover() content : text }; if (! hoverNearElement) { @@ -89,25 +90,23 @@ function hideHover() { } - +/** Wrapper for configureHover(), supporting existing uses in + * utils/draw/chart1.js : ChartLine.prototype.{bars,linebars} + */ function configureHorizTickHover(d, block, hoverTextFn) { // console.log("configureHorizTickHover", d, this, this.outerHTML); - let text = hoverTextFn(d, block); - let node_ = this; - if ($(node_).popover) - $(node_) - .popover({ - trigger : "click hover", - sticky: true, - delay: {show: 200, hide: 3000}, - container: 'div#holder', - placement : "auto right", - positionFixed : true, - // comment re. title versus content in @see draw-map.js: configureHorizTickHover() - content : text, - html: false - }); + /** client data : block for hoverTextFn() */ + let context = {block}; + configureHover.apply(this, [context, (context_, d) => hoverTextFn(d, context_.block)]); } -/*------------------------------------------------------------------------*/ +/* The original of this function configureHorizTickHover (up to 3e674205) is + * very similar to draw-map : configureHorizTickHover() which was factored from. + * Using configureHover() is equivalent, minor differences : + * this version had positionFixed : true, and html: false, + * and configureHover() adds hoverNearElement ... "left". + */ + +/*----------------------------------------------------------------------------*/ + export { configureHover, configureHorizTickHover }; diff --git a/frontend/app/utils/interval-calcs.js b/frontend/app/utils/interval-calcs.js index 58590ba05..589fd5334 100644 --- a/frontend/app/utils/interval-calcs.js +++ b/frontend/app/utils/interval-calcs.js @@ -1,3 +1,9 @@ +import { intervalSign } from './draw/zoomPanCalcs'; +import { inInterval } from './draw/interval-overlap'; +import { maybeFlip } from './draw/axis'; + +/*----------------------------------------------------------------------------*/ + /* related : see utils/draw/zoomPanCalcs.js * backend/common/utilities/interval-overlap.js */ @@ -6,6 +12,8 @@ /* global d3 */ +const dLog = console.debug; + /*----------------------------------------------------------------------------*/ /** Determine the absolute length of the given interval or domain. @@ -101,6 +109,95 @@ function intervalOrdered(interval) { return interval; } +/** @return i1 - i2, i.e. the part of i1 outside of i2 + * Result direction is the same as the direction of i1. + * @param operation 'intersect', 'union', 'subtract'. + * + * @param i1, i2 are intervals, i.e. [start, end] + * (i1 and i2 have the same direction) + * i1 and i2 overlap, and neither is a sub-interval of the other. + * @see subInterval(), featuresCountsResultsMerge(). + */ +function intervalJoin(operation, i1, i2) { + /** + + |----------------| i1 + |-----------------| i2 + |-------|--------|--------| + outside inside outside + |--------| intersect + |-------|--------|--------| union + |-------| subtract + + */ + const inside = 1, outside = 0; + let + cmp1 = i1.map((i) => inInterval(i2, i)), + /** i1[indexes1[outside]] is outside i2, and + * i1[indexes1[inside]] is inside i2. + */ + indexes1 = cmp1.map((i) => (+(i === 0))), + /** could calculate cmp2, indexes2, but for current use + * (featureCountsResults) can assume that direction of i1 and i2 is + * the same, so i2[indexes1[outside]] is inside i1. + */ + interval = + (operation === 'intersect') ? [i1[indexes1[inside]], i2[indexes1[outside]]] : + (operation === 'union') ? [i1[indexes1[outside]], i2[indexes1[inside]]] : + (operation === 'subtract') ? [i1[indexes1[outside]], i2[indexes1[outside]]] : + undefined; + + let flip = intervalSign(interval) !== intervalSign(i1); + interval = maybeFlip(interval, flip); + + dLog('intervalJoin', operation, interval, i1, i2, cmp1, indexes1); + return interval; +} + +/** Subtract i2 from i1, where i2 is a sub-interval of i1. + * If i2 overlaps i1 but is not a sub-interval of it, then use intervalJoin('subtract', i1, i2). + * + * This is applicable + * when i2 is a subInterval of i1, and hence the result is 2 intervals + * in an array; (used by featuresCountsResultsSansOverlap()). + */ +function intervalSubtract2(i1, i2) { + /** + + |-------------------------| i1 + |--------| i2 + |-------| |--------| subtract2 + + */ + + let + sameDir = intervalSign(i1) === intervalSign(i2), + start1 = 0, + end1 = 1 - start1, + start2 = sameDir ? start1 : end1, + end2 = 1 - start2, + interval = [[i1[start1], i2[start2]], [i2[end2], i1[end1]]]; + + interval.forEach((i3, i) => { if (! intervalSign(i3)) { console.log('intervalSubtract2', i3, i); } }); + dLog('intervalSubtract2', interval, i1, i2); + return interval; +} + +/** @return true if the 2 intervals have a common endpoint. + * Form of i1 and i2 is : [number, number]. + * The implementation will handle other vector lengths; if sameDir + * then i2.length is expected to be >= i1.length + * @param sameDir if true then assume i1 and i2 have the same direction. + */ +function intervalsAbut(i1, i2, sameDir) { + let + matchFn = sameDir ? + (x1, i) => x1 === i2[i] : + (x1, i) => i2.find((x2, j) => (x1 === x2)), + match = i1.find(matchFn); + return match; +} + /*----------------------------------------------------------------------------*/ /** Keep the top byte of the mantissa and clear the rest. @@ -132,5 +229,9 @@ export { intervalSize, intervalLimit, intervalOutside, intervalMerge, intervalExtent, intervalOverlapCoverage, intervalOverlap, + intervalOrdered, + intervalJoin, + intervalSubtract2, + intervalsAbut, truncateMantissa }; diff --git a/frontend/app/utils/stacks-adj.js b/frontend/app/utils/stacks-adj.js index 38637b893..4c4fa1cdc 100644 --- a/frontend/app/utils/stacks-adj.js +++ b/frontend/app/utils/stacks-adj.js @@ -55,8 +55,8 @@ function collateAdjacentAxes() let dataBlocks = []; for (let stackIndex=0; stackIndex 2) { console.log('collateAdjacentAxes', stackIndex, fAxis_s0, stackIndex+1, fAxis_s1); diff --git a/frontend/app/utils/stacks.js b/frontend/app/utils/stacks.js index b90ae1e4e..5a83a28cd 100644 --- a/frontend/app/utils/stacks.js +++ b/frontend/app/utils/stacks.js @@ -223,7 +223,7 @@ Block.prototype.datasetHasParent = function() { }; /** @return true if this Block is a data block, not the reference block. */ -Block.prototype.isData = function() { +Block.prototype.isData = function(showPaths) { let axis = this.getAxis(), blockR = this.block, /** The most significant check here is blockR.get('featureCount'); now that we @@ -239,8 +239,11 @@ Block.prototype.isData = function() { * in populating the blocks parameter of getBlockFeaturesInterval(). * (checking if features is defined and features.length > 0) */ - isData = - (blockR.get('namespace') || blockR.get('isChartable') || blockR.get('features.length') || blockR.get('featureCount') || ! this.isReference()); + isData = blockR.get('isData'); + // (blockR.get('namespace') || blockR.get('isChartable') || blockR.get('features.length') || blockR.get('featureCount') || ! this.isReference()); + if (showPaths) { + isData &&= blockR.get('showPaths'); + } return isData; }; @@ -320,7 +323,11 @@ Stacked.axis1dRemove = function (axisName, axis1dComponent) { delete axes1d[axisName]; }; Stacked.prototype.getAxis1d = function () { - let axis1d = this.axis1d || (this.axis1d = axes1d[this.axisName]); + let axis1d = this.axis1d, + a1; + if (! axis1d && (a1 = axes1d[this.axisName])) { + Ember.set(this, 'axis1d', a1); + } if (axis1d && (axis1d.isDestroying || axis1d.isDestroying)) { dLog('getAxis1d() isDestroying', axis1d, this); axis1d = this.axis1d = undefined; @@ -783,16 +790,17 @@ Stack.prototype.childBlocks = function (names) /** @return all the blocks in this axis which are data blocks, not reference blocks. * Data blocks are recognised by having a .namespace; * @param visible if true then exclude blocks which are not visible + * @param showPaths if true then exclude blocks which are not for paths alignment */ -Stacked.prototype.dataBlocks = function (visible) +Stacked.prototype.dataBlocks = function (visible, showPaths) { let db = this.blocks .filter(function (block) { return (! visible || block.visible) - && block.isData(); }); + && block.isData(showPaths); }); if (trace_stack > 1) dLog( - 'Stacked', 'blocks', visible, this.blocks.map(function (block) { return block.longName(); }), + 'Stacked', 'blocks', visible, showPaths, this.blocks.map(function (block) { return block.longName(); }), this.axisName, this.mapName, 'dataBlocks', db.map(function (block) { return block.longName(); })); return db; @@ -800,13 +808,14 @@ Stacked.prototype.dataBlocks = function (visible) /** @return all the blocks in this Stack which are data blocks, not reference blocks. * Data blocks are recognised by having a .namespace; * this is a different criteria to @see Stack.prototype.dataBlocks0(). + * @param showPaths if true then exclude blocks which are not for paths alignment */ -Stack.prototype.dataBlocks = function () +Stack.prototype.dataBlocks = function (showPaths) { /** Currently only visible == true is used, but could make this a param. */ let visible = true; let axesDataBlocks = this.axes - .map(function (stacked) { return stacked.dataBlocks(visible); } ), + .map(function (stacked) { return stacked.dataBlocks(visible, showPaths); } ), db = Array.prototype.concat.apply([], axesDataBlocks) ; // Stacked.longName() handles blocks also. @@ -2223,7 +2232,7 @@ Stacked.prototype.axisDimensions = function () let currentPosition = axis1d && axis1d.get('currentPosition'); if (! currentPosition || ! isEqual(domain, currentPosition.yDomain)) - dLog('axisDimensions', domain, currentPosition.yDomain, zoomed, currentPosition); + dLog('axisDimensions', domain, currentPosition && currentPosition.yDomain, zoomed, currentPosition); return dim; }; /** Set the domain of the current position to the given domain diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 4fe1f40ab..0777e72cb 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -1,6 +1,6 @@ { "name": "pretzel-frontend", - "version": "2.3.1", + "version": "2.6.1", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -26258,6 +26258,510 @@ } } }, + "ember-file-upload": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/ember-file-upload/-/ember-file-upload-3.0.5.tgz", + "integrity": "sha512-muhAI7peP6kXckzrvtnhzbr6NXhdbJRAWYxXm91E7Sb22qcKYp7CXZxONnwHqtArrBNG8OEfE2sng11p+DvAVA==", + "dev": true, + "requires": { + "@babel/core": "^7.4.4", + "ember-cli-babel": "^7.7.3", + "ember-cli-htmlbars": "^3.0.1" + }, + "dependencies": { + "@babel/code-frame": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.13.tgz", + "integrity": "sha512-HV1Cm0Q3ZrpCR93tkWOYiuYIgLxZXZFVG2VgK+MBWjUqZTundupbfx2aXarXuw5Ko5aMcjtJgbSs4vUGBS5v6g==", + "dev": true, + "requires": { + "@babel/highlight": "^7.12.13" + } + }, + "@babel/compat-data": { + "version": "7.13.12", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.13.12.tgz", + "integrity": "sha512-3eJJ841uKxeV8dcN/2yGEUy+RfgQspPEgQat85umsE1rotuquQ2AbIub4S6j7c50a2d+4myc+zSlnXeIHrOnhQ==", + "dev": true + }, + "@babel/core": { + "version": "7.13.14", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.13.14.tgz", + "integrity": "sha512-wZso/vyF4ki0l0znlgM4inxbdrUvCb+cVz8grxDq+6C9k6qbqoIJteQOKicaKjCipU3ISV+XedCqpL2RJJVehA==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.12.13", + "@babel/generator": "^7.13.9", + "@babel/helper-compilation-targets": "^7.13.13", + "@babel/helper-module-transforms": "^7.13.14", + "@babel/helpers": "^7.13.10", + "@babel/parser": "^7.13.13", + "@babel/template": "^7.12.13", + "@babel/traverse": "^7.13.13", + "@babel/types": "^7.13.14", + "convert-source-map": "^1.7.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.1.2", + "semver": "^6.3.0", + "source-map": "^0.5.0" + } + }, + "@babel/generator": { + "version": "7.13.9", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.13.9.tgz", + "integrity": "sha512-mHOOmY0Axl/JCTkxTU6Lf5sWOg/v8nUa+Xkt4zMTftX0wqmb6Sh7J8gvcehBw7q0AhrhAR+FDacKjCZ2X8K+Sw==", + "dev": true, + "requires": { + "@babel/types": "^7.13.0", + "jsesc": "^2.5.1", + "source-map": "^0.5.0" + } + }, + "@babel/helper-compilation-targets": { + "version": "7.13.13", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.13.13.tgz", + "integrity": "sha512-q1kcdHNZehBwD9jYPh3WyXcsFERi39X4I59I3NadciWtNDyZ6x+GboOxncFK0kXlKIv6BJm5acncehXWUjWQMQ==", + "dev": true, + "requires": { + "@babel/compat-data": "^7.13.12", + "@babel/helper-validator-option": "^7.12.17", + "browserslist": "^4.14.5", + "semver": "^6.3.0" + } + }, + "@babel/helper-function-name": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.12.13.tgz", + "integrity": "sha512-TZvmPn0UOqmvi5G4vvw0qZTpVptGkB1GL61R6lKvrSdIxGm5Pky7Q3fpKiIkQCAtRCBUwB0PaThlx9vebCDSwA==", + "dev": true, + "requires": { + "@babel/helper-get-function-arity": "^7.12.13", + "@babel/template": "^7.12.13", + "@babel/types": "^7.12.13" + } + }, + "@babel/helper-get-function-arity": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.12.13.tgz", + "integrity": "sha512-DjEVzQNz5LICkzN0REdpD5prGoidvbdYk1BVgRUOINaWJP2t6avB27X1guXK1kXNrX0WMfsrm1A/ZBthYuIMQg==", + "dev": true, + "requires": { + "@babel/types": "^7.12.13" + } + }, + "@babel/helper-member-expression-to-functions": { + "version": "7.13.12", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.13.12.tgz", + "integrity": "sha512-48ql1CLL59aKbU94Y88Xgb2VFy7a95ykGRbJJaaVv+LX5U8wFpLfiGXJJGUozsmA1oEh/o5Bp60Voq7ACyA/Sw==", + "dev": true, + "requires": { + "@babel/types": "^7.13.12" + } + }, + "@babel/helper-module-imports": { + "version": "7.13.12", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.13.12.tgz", + "integrity": "sha512-4cVvR2/1B693IuOvSI20xqqa/+bl7lqAMR59R4iu39R9aOX8/JoYY1sFaNvUMyMBGnHdwvJgUrzNLoUZxXypxA==", + "dev": true, + "requires": { + "@babel/types": "^7.13.12" + } + }, + "@babel/helper-module-transforms": { + "version": "7.13.14", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.13.14.tgz", + "integrity": "sha512-QuU/OJ0iAOSIatyVZmfqB0lbkVP0kDRiKj34xy+QNsnVZi/PA6BoSoreeqnxxa9EHFAIL0R9XOaAR/G9WlIy5g==", + "dev": true, + "requires": { + "@babel/helper-module-imports": "^7.13.12", + "@babel/helper-replace-supers": "^7.13.12", + "@babel/helper-simple-access": "^7.13.12", + "@babel/helper-split-export-declaration": "^7.12.13", + "@babel/helper-validator-identifier": "^7.12.11", + "@babel/template": "^7.12.13", + "@babel/traverse": "^7.13.13", + "@babel/types": "^7.13.14" + } + }, + "@babel/helper-optimise-call-expression": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.12.13.tgz", + "integrity": "sha512-BdWQhoVJkp6nVjB7nkFWcn43dkprYauqtk++Py2eaf/GRDFm5BxRqEIZCiHlZUGAVmtwKcsVL1dC68WmzeFmiA==", + "dev": true, + "requires": { + "@babel/types": "^7.12.13" + } + }, + "@babel/helper-replace-supers": { + "version": "7.13.12", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.13.12.tgz", + "integrity": "sha512-Gz1eiX+4yDO8mT+heB94aLVNCL+rbuT2xy4YfyNqu8F+OI6vMvJK891qGBTqL9Uc8wxEvRW92Id6G7sDen3fFw==", + "dev": true, + "requires": { + "@babel/helper-member-expression-to-functions": "^7.13.12", + "@babel/helper-optimise-call-expression": "^7.12.13", + "@babel/traverse": "^7.13.0", + "@babel/types": "^7.13.12" + } + }, + "@babel/helper-simple-access": { + "version": "7.13.12", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.13.12.tgz", + "integrity": "sha512-7FEjbrx5SL9cWvXioDbnlYTppcZGuCY6ow3/D5vMggb2Ywgu4dMrpTJX0JdQAIcRRUElOIxF3yEooa9gUb9ZbA==", + "dev": true, + "requires": { + "@babel/types": "^7.13.12" + } + }, + "@babel/helper-split-export-declaration": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.12.13.tgz", + "integrity": "sha512-tCJDltF83htUtXx5NLcaDqRmknv652ZWCHyoTETf1CXYJdPC7nohZohjUgieXhv0hTJdRf2FjDueFehdNucpzg==", + "dev": true, + "requires": { + "@babel/types": "^7.12.13" + } + }, + "@babel/helper-validator-identifier": { + "version": "7.12.11", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz", + "integrity": "sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw==", + "dev": true + }, + "@babel/helper-validator-option": { + "version": "7.12.17", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.12.17.tgz", + "integrity": "sha512-TopkMDmLzq8ngChwRlyjR6raKD6gMSae4JdYDB8bByKreQgG0RBTuKe9LRxW3wFtUnjxOPRKBDwEH6Mg5KeDfw==", + "dev": true + }, + "@babel/helpers": { + "version": "7.13.10", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.13.10.tgz", + "integrity": "sha512-4VO883+MWPDUVRF3PhiLBUFHoX/bsLTGFpFK/HqvvfBZz2D57u9XzPVNFVBTc0PW/CWR9BXTOKt8NF4DInUHcQ==", + "dev": true, + "requires": { + "@babel/template": "^7.12.13", + "@babel/traverse": "^7.13.0", + "@babel/types": "^7.13.0" + } + }, + "@babel/highlight": { + "version": "7.13.10", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.13.10.tgz", + "integrity": "sha512-5aPpe5XQPzflQrFwL1/QoeHkP2MsA4JCntcXHRhEsdsfPVkvPi2w7Qix4iV7t5S/oC9OodGrggd8aco1g3SZFg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.12.11", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + } + }, + "@babel/parser": { + "version": "7.13.13", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.13.13.tgz", + "integrity": "sha512-OhsyMrqygfk5v8HmWwOzlYjJrtLaFhF34MrfG/Z73DgYCI6ojNUTUp2TYbtnjo8PegeJp12eamsNettCQjKjVw==", + "dev": true + }, + "@babel/template": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.12.13.tgz", + "integrity": "sha512-/7xxiGA57xMo/P2GVvdEumr8ONhFOhfgq2ihK3h1e6THqzTAkHbkXgB0xI9yeTfIUoH3+oAeHhqm/I43OTbbjA==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.12.13", + "@babel/parser": "^7.12.13", + "@babel/types": "^7.12.13" + } + }, + "@babel/traverse": { + "version": "7.13.13", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.13.13.tgz", + "integrity": "sha512-CblEcwmXKR6eP43oQGG++0QMTtCjAsa3frUuzHoiIJWpaIIi8dwMyEFUJoXRLxagGqCK+jALRwIO+o3R9p/uUg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.12.13", + "@babel/generator": "^7.13.9", + "@babel/helper-function-name": "^7.12.13", + "@babel/helper-split-export-declaration": "^7.12.13", + "@babel/parser": "^7.13.13", + "@babel/types": "^7.13.13", + "debug": "^4.1.0", + "globals": "^11.1.0" + } + }, + "@babel/types": { + "version": "7.13.14", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.13.14.tgz", + "integrity": "sha512-A2aa3QTkWoyqsZZFl56MLUsfmh7O0gN41IPvXAE/++8ojpbz12SszD7JEGYVdn4f9Kt4amIei07swF1h4AqmmQ==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.12.11", + "lodash": "^4.17.19", + "to-fast-properties": "^2.0.0" + } + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "broccoli-persistent-filter": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/broccoli-persistent-filter/-/broccoli-persistent-filter-2.3.1.tgz", + "integrity": "sha512-hVsmIgCDrl2NFM+3Gs4Cr2TA6UPaIZip99hN8mtkaUPgM8UeVnCbxelCvBjUBHo0oaaqP5jzqqnRVvb568Yu5g==", + "dev": true, + "requires": { + "async-disk-cache": "^1.2.1", + "async-promise-queue": "^1.0.3", + "broccoli-plugin": "^1.0.0", + "fs-tree-diff": "^2.0.0", + "hash-for-dep": "^1.5.0", + "heimdalljs": "^0.2.1", + "heimdalljs-logger": "^0.1.7", + "mkdirp": "^0.5.1", + "promise-map-series": "^0.2.1", + "rimraf": "^2.6.1", + "rsvp": "^4.7.0", + "symlink-or-copy": "^1.0.1", + "sync-disk-cache": "^1.3.3", + "walk-sync": "^1.0.0" + } + }, + "browserslist": { + "version": "4.16.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.16.3.tgz", + "integrity": "sha512-vIyhWmIkULaq04Gt93txdh+j02yX/JzlyhLYbV3YQCn/zvES3JnY7TifHHvvr1w5hTDluNKMkV05cs4vy8Q7sw==", + "dev": true, + "requires": { + "caniuse-lite": "^1.0.30001181", + "colorette": "^1.2.1", + "electron-to-chromium": "^1.3.649", + "escalade": "^3.1.1", + "node-releases": "^1.1.70" + } + }, + "caniuse-lite": { + "version": "1.0.30001205", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001205.tgz", + "integrity": "sha512-TL1GrS5V6LElbitPazidkBMD9sa448bQDDLrumDqaggmKFcuU2JW1wTOHJPukAcOMtEmLcmDJEzfRrf+GjM0Og==", + "dev": true + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "convert-source-map": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.7.0.tgz", + "integrity": "sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.1" + } + }, + "debug": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "dev": true, + "requires": { + "ms": "2.1.2" + } + }, + "electron-to-chromium": { + "version": "1.3.704", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.704.tgz", + "integrity": "sha512-6cz0jvawlUe4h5AbfQWxPzb+8LzVyswGAWiGc32EJEmfj39HTQyNPkLXirc7+L4x5I6RgRkzua8Ryu5QZqc8cA==", + "dev": true + }, + "ember-cli-htmlbars": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/ember-cli-htmlbars/-/ember-cli-htmlbars-3.1.0.tgz", + "integrity": "sha512-cgvRJM73IT0aePUG7oQ/afB7vSRBV3N0wu9BrWhHX2zkR7A7cUBI7KC9VPk6tbctCXoM7BRGsCC4aIjF7yrfXA==", + "dev": true, + "requires": { + "broccoli-persistent-filter": "^2.3.1", + "hash-for-dep": "^1.5.1", + "json-stable-stringify": "^1.0.1", + "strip-bom": "^3.0.0" + } + }, + "ensure-posix-path": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ensure-posix-path/-/ensure-posix-path-1.1.1.tgz", + "integrity": "sha512-VWU0/zXzVbeJNXvME/5EmLuEj2TauvoaTz6aFYK1Z92JCBlDlZ3Gu0tuGR42kpW1754ywTs+QB0g5TP0oj9Zaw==", + "dev": true + }, + "fs-tree-diff": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/fs-tree-diff/-/fs-tree-diff-2.0.1.tgz", + "integrity": "sha512-x+CfAZ/lJHQqwlD64pYM5QxWjzWhSjroaVsr8PW831zOApL55qPibed0c+xebaLWVr2BnHFoHdrwOv8pzt8R5A==", + "dev": true, + "requires": { + "@types/symlink-or-copy": "^1.2.0", + "heimdalljs-logger": "^0.1.7", + "object-assign": "^4.1.0", + "path-posix": "^1.0.0", + "symlink-or-copy": "^1.1.8" + } + }, + "gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true + }, + "globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true + }, + "hash-for-dep": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/hash-for-dep/-/hash-for-dep-1.5.1.tgz", + "integrity": "sha512-/dQ/A2cl7FBPI2pO0CANkvuuVi/IFS5oTyJ0PsOb6jW6WbVW1js5qJXMJTNbWHXBIPdFTWFbabjB+mE0d+gelw==", + "dev": true, + "requires": { + "broccoli-kitchen-sink-helpers": "^0.3.1", + "heimdalljs": "^0.2.3", + "heimdalljs-logger": "^0.1.7", + "path-root": "^0.1.1", + "resolve": "^1.10.0", + "resolve-package-path": "^1.0.11" + } + }, + "is-core-module": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.2.0.tgz", + "integrity": "sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "dev": true + }, + "json5": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz", + "integrity": "sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==", + "dev": true, + "requires": { + "minimist": "^1.2.5" + } + }, + "lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "matcher-collection": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/matcher-collection/-/matcher-collection-1.1.2.tgz", + "integrity": "sha512-YQ/teqaOIIfUHedRam08PB3NK7Mjct6BvzRnJmpGDm8uFXpNr1sbY4yuflI5JcEs6COpYA0FpRQhSDBf1tT95g==", + "dev": true, + "requires": { + "minimatch": "^3.0.2" + } + }, + "minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", + "dev": true + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node-releases": { + "version": "1.1.71", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.71.tgz", + "integrity": "sha512-zR6HoT6LrLCRBwukmrVbHv0EpEQjksO6GmFcZQQuCAy139BEsoVKPYnf3jongYW83fAa1torLGYwxxky/p28sg==", + "dev": true + }, + "path-parse": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", + "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", + "dev": true + }, + "resolve": { + "version": "1.20.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", + "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==", + "dev": true, + "requires": { + "is-core-module": "^2.2.0", + "path-parse": "^1.0.6" + } + }, + "rsvp": { + "version": "4.8.5", + "resolved": "https://registry.npmjs.org/rsvp/-/rsvp-4.8.5.tgz", + "integrity": "sha512-nfMOlASu9OnRJo1mbEk2cz0D56a1MBNrJ7orjRZQG10XDyuvwksKbuXNp6qa+kbn839HwjwhBzhFmdsaEAfauA==", + "dev": true + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + }, + "to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", + "dev": true + }, + "walk-sync": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/walk-sync/-/walk-sync-1.1.4.tgz", + "integrity": "sha512-nowc9thB/Jg0KW4TgxoRjLLYRPvl3DB/98S89r4ZcJqq2B0alNcKDh6pzLkBSkPMzRSMsJghJHQi79qw0YWEkA==", + "dev": true, + "requires": { + "@types/minimatch": "^3.0.3", + "ensure-posix-path": "^1.1.0", + "matcher-collection": "^1.1.1" + } + } + } + }, "ember-focus-trap": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/ember-focus-trap/-/ember-focus-trap-0.3.2.tgz", @@ -33477,6 +33981,15 @@ } } }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1" + } + }, "has-ansi": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", diff --git a/frontend/package.json b/frontend/package.json index 114378e27..244726258 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,6 +1,6 @@ { "name": "pretzel-frontend", - "version": "2.5.0", + "version": "2.6.1", "description": "Frontend code for Pretzel", "repository": "", "license": "MIT", @@ -52,6 +52,7 @@ "ember-data-model-fragments": "^4.0.0", "ember-export-application-global": "^2.0.1", "ember-fetch": "^8.0.2", + "ember-file-upload": "^3.0.5", "ember-load-initializers": "^2.1.2", "ember-maybe-import-regenerator": "^0.1.6", "ember-modal-dialog": "^3.0.1", diff --git a/package.json b/package.json index 7b397932d..c422ec759 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "pretzel", "private": true, - "version": "2.5.0", + "version": "2.6.1", "dependencies": { }, "repository" : diff --git a/resources/data_templates/datasets.ots b/resources/data_templates/datasets.ots new file mode 100644 index 000000000..e4c7ea0d6 Binary files /dev/null and b/resources/data_templates/datasets.ots differ diff --git a/resources/data_templates/datasets.xltx b/resources/data_templates/datasets.xltx new file mode 100644 index 000000000..f1be99cb6 Binary files /dev/null and b/resources/data_templates/datasets.xltx differ diff --git a/resources/emacs_config.el b/resources/emacs_config.el index 7144ea607..6e8bfc652 100644 --- a/resources/emacs_config.el +++ b/resources/emacs_config.el @@ -7,8 +7,12 @@ ;; The path of this directory. ;; Used to calculate the git work-tree root dir. (setq mmv_Dav127 + (replace-regexp-in-string "^~/" "$HOME/" (replace-regexp-in-string "/resources/$" "" (file-name-directory load-file-name) ) - ) + )) +;; same as $MMv +(setq MMv + (replace-regexp-in-string "/pretzel.*" "" mmv_Dav127)) ;;------------------------------------------------------------------------------ @@ -58,7 +62,13 @@ (setq safe-local-variable-values `((create-lockfiles . nil) + (js2-basic-offset . 2) (js2-bounce-indent-p . t) + (js2-pretty-multiline-declarations . nil) + ;; GNU style + (perl-indent-level . 2) + (perl-continued-statement-offset . 2) + (perl-continued-brace-offset . 0) ) ) @@ -70,7 +80,7 @@ ;; To make this code flexible wrt directory path, the path of the git work-tree ;; is calculated and the settings are configured to apply for that tree. (dir-locals-set-directory-class - mmv_Dav127 + MMv ;; was mmv_Dav127 'project-root-directory) diff --git a/resources/tools/dev/functions_convert.bash b/resources/tools/dev/functions_convert.bash new file mode 100644 index 000000000..4cc14424c --- /dev/null +++ b/resources/tools/dev/functions_convert.bash @@ -0,0 +1,83 @@ +#!/bin/bash + +# Usage : source pretzel/resources/tools/dev/functions_convert.bash + + +# sp=~/pretzel/resources/tools/dev/snps2Dataset.pl; +# commonName=Chickpea; +# shortName=WGS_SNP; +# platform=WGS_SNP; +# parentName=... + +# genBankRename= sed script of the form : +# s/gi|442654316|gb|CM001764.1|/Ca1/ +# s/gi|442654315|gb|CM001765.1|/Ca2/ + +# setup : +# mkdir out out_json +# for i in *.xlsx; do echo $i; ssconvert -S "$i" out/"$i.%s.csv"; done + + +function snp1() { + echo "$i"; <"$i" tail -n +2 | sed -f $genBankRename | sort -t, -k 2 | \ + $sp -d "$parentName.$datasetName" -s "$shortName" -p $parentName -n"$parentName:$platform" -c "$commonName" \ + > ../out_json/"$i".json ; ls -gG ../out_json/"$i".json +} +function datasetName2shortName() { + sed 's/_Submission//ig;s/_Gydle//ig;s/SSRs/SSR/;s/SNPs/SNP/;s/^CP_//;s/FieldPea//;s/FABABEAN_//;s/FABA_//;s/^FB_//;s/_FP$//;s/^Len_//;s/Lentil_//;s/inhouse_Pretzel//;s/ (2)//' ; } + +function fileName2DatasetName() { + sed -n 's/\.csv$//;s/[ _]*Linkage[ _]*map[_ ]*//ig;s/Pretzel_submission_//ig;s/ $//;s/ map$//i;s/\([^ ls]\)[xX]\([^ ls]\)/\1 x \2/g;s/ x / x /ig;s/.*\.xlsx\.//p;'; } + +# env var $snpFile is the name of the file which contains SNPs which associate the markers in this map file with chromosome names +# See also mapChrsCN() +# usage e.g. snpFile=*mission*CP_EST_SNP-OPA* +function mapChrs() { + lm_c=$( awk -F, ' { print $2; }' "$i" | uniq) + datasetName=$( echo "$i" | fileName2DatasetName ); echo "$datasetName $i"; + mkdir chrSnps/"$datasetName" + if [ -f chrSnps/"$datasetName".chrCount ] + then + rm chrSnps/"$datasetName".chrCount + fi + for j in $lm_c; do echo $j; awk -F, "/,$j,/ {print \$1;}" "$i" >chrSnps/"$datasetName"/$j; done + for j in $(cd chrSnps/"$datasetName"; ls ); do suffix=$(echo $j | sed -n "s/.*\(\..*\)/\1/p"); fgrep -f "chrSnps/$datasetName/$j" $snpFile | sed -f $genBankRename | awk -F, '{a[$2]++;} END {for (i in a) print a[i], i;}' | sort -n -r | head -1 | tee -a chrSnps/"$datasetName".chrCount | awk ' {printf("s/,%s,/,%s%s,/\n", "'$j'", $2, "'$suffix'"); }' ; done > chrSnps/"$datasetName".chrRename.sed +} + +function map1() { + j=$(echo "$i" | fileName2DatasetName); \ + datasetName=$j; + echo "$j"; <"$i" sed -f chrSnps/"$datasetName".chrRename.sed | $sp -d "$j" -p '' -n 'SNP_OPA' -c "$commonName" -g > ../out_json/"$i".json ; ls -gG ../out_json/"$i".json +} + + +# Convert a linkage / genetic map from csv to Pretzel json. +# Similar to mapChrs() except the column order here is assumed to be +# columnsKeyString="chr name pos" +# i.e. chr is in $1, name is in $2 (awk) +# This also impacts the regexp /^$j +# +# snpFile=*mission*CP_EST_SNP-OPA* +# snpFile=*CP_GBS-TC* +function mapChrsCN() { + lm_c=$( awk -F, ' { print $1; }' "$i" | uniq) + datasetName=$( echo "$i" | fileName2DatasetName ); echo "$datasetName $i"; + mkdir chrSnps/"$datasetName" + for j in $lm_c; do echo $j; awk -F, "/^$j,/ {print \$2;}" "$i" >chrSnps/"$datasetName"/$j; done + for j in $(cd chrSnps/"$datasetName"; ls L*); do suffix=$(echo $j | sed -n "s/.*\(\..*\)/\1/p"); fgrep -f "chrSnps/$datasetName/$j" $snpFile | sed -f $genBankRename | awk -F, '{a[$2]++;} END {for (i in a) print a[i], i;}' | sort -n -r | head -1 | awk ' {printf("s/^%s,/%s%s,/\n", "'$j'", $2, "'$suffix'"); }' ; done > chrSnps/"$datasetName".chrRename.sed +} + +function CP_GM() { + export columnsKeyString="name chr pos"; + for i in *inkage*_LasseterxICC3996* ; do mapChrs; done + + export columnsKeyString="chr name pos"; + for i in *inkage*_SonalixGenesis* ; do mapChrsCN; done + + export columnsKeyString="chr name pos"; + for i in *inkage*_SonalixGenesis* ; do map1; done + + export columnsKeyString="name chr pos"; + for i in *inkage*_LasseterxICC3996* ; do map1; done + +} diff --git a/resources/tools/dev/functions_data.bash b/resources/tools/dev/functions_data.bash new file mode 100644 index 000000000..9b8810cb3 --- /dev/null +++ b/resources/tools/dev/functions_data.bash @@ -0,0 +1,119 @@ +#!/bin/bash + +# Usage : +# source pretzel/resources/tools/dev/functions_data.bash +# source ~/pretzel/resources/tools/functions_prod.bash +# setToken ... +# loadChr 2H SNPs.vcf.gz Barley_RGT_Planet_SNPs_10M Hordeum_vulgare_RGT_Planet_v1 + +# $URL is modified + +#------------------------------------------------------------------------------- + +snps2Dataset=~/tmp/snps2Dataset.value_0.pl + +# check to warn if size of $vcfGz is < $bytesAvailable / 10 +function checkSpace() { + vcfGz=$1 + bytesAvailable=$(df -k . | tail -n +2 | awk ' { print $4;}') + gzSize=$(ls -gG "$vcfGz" | awk ' { print $3; } ') + echo vcfGz="$vcfGz" bytesAvailable=$bytesAvailable gzSize=$gzSize +} + +#------------------------------------------------------------------------------- + +function datasetAndName2BlockId { + if [ $# -eq 2 ] ; then + datasetId=$1 + blockName=$2 + $dockerExec mongo --quiet $DB_NAME --eval "db.Block.find({ datasetId : \"$datasetId\", name : \"$blockName\" }).map( function (b) { return b._id.valueOf(); })" | tr -d '[:punct:] ' + fi +} + +dockerExec="docker exec $DIM" +DB_NAME=pretzel +# or local : +# dockerExec= +# DB_NAME=admin + + +#------------------------------------------------------------------------------- + + + +# Load 1 chromosome from the given .vcf.gz file +# +# This handles large chromosomes by splitting into chunks and using +# Datasets/createComplete for the first chunk then +# Blocks/blockFeaturesAdd for the remainder. +# +# Column 3 of the vcf is expected to be '.'; this is converted into a unique name "$1:$2" +# Split into 1e5 line chunks, to avoid JSON data too large for curl or node +# (node handles 1e6 OK, but got curl: option --data-binary: out of memory). +# +# Usage in file header comment above. +# +# @param chr not expected to contain a space or punctuation, e.g. 2H +# @param vcfGz +# @param datasetName Name of dataset to create and add the chromosome / block to +# @param parentName Name of parent / reference genome for this dataset to reference as parent. +function loadChr() +{ + [ $# -eq 4 ] || (echo "Usage : loadChr chr vcfGz datasetName parentName" 1>&2 ; exit 1) + chr="$1" + vcfGz="$2" + datasetName="$3" + parentName="$4" + echo chr=$chr, vcfGz="$vcfGz", datasetName="$datasetName", parentName="$parentName" + + checkSpace "$vcfGz" + + mkdir ${chr} + gzip -d < "$vcfGz" | grep "^chr${chr}" | awk -F'\t' ' { printf("%s\t%s\t%s:%s\t%s\t%s\t\n", $1, $2, $1,$2, $4, $5); } ' | split -l 100000 - ${chr}/ + + # cd ${chr} + + echo URL="$URL"; sleep 5 + + for splitChunk in $chr/[a-z][a-z]; do + echo $splitChunk; + case $splitChunk in + */aa) + export URL=localhost:8080/api/Datasets/createComplete + < "$splitChunk" "$snps2Dataset" -d "$datasetName" -p "$parentName" > "$splitChunk".json + status=$? + if [ $status -ne 0 ] + then + echo 1>&2 Exit due to error. "$splitChunk" not loaded.; + return $status + fi + + # The normal output is small, but error output could be the whole json, so |cut|head. + time uploadData "$splitChunk".json 2>&1 | cut -c-200 | head -100 + status=$? + if [ $status -ne 0 ] + then + echo 1>&2 Exit due to error. "$splitChunk".json not loaded.; + return $status + fi + # rm "$splitChunk".json + + blockId=$(datasetAndName2BlockId "$datasetName" ${chr} ) + echo blockId=$blockId + URL=$(echo $URL | sed 's,Datasets/createComplete,Blocks/blockFeaturesAdd,') + echo URL="$URL" + ;; + *) + # > $splitChunk.json && time + < $splitChunk "$snps2Dataset" -b $blockId | uploadData - 2>&1 | cut -c-200 | head -100 + status=$? + if [ $status -ne 0 ] + then + echo 1>&2 Exit due to error. "$splitChunk" not loaded.; + return $status + fi + ;; + esac + done + # cd .. +} diff --git a/resources/tools/dev/snps2Dataset.pl b/resources/tools/dev/snps2Dataset.pl index 857c8282e..10f1559c3 100755 --- a/resources/tools/dev/snps2Dataset.pl +++ b/resources/tools/dev/snps2Dataset.pl @@ -11,68 +11,438 @@ # # initial version based on effects2Dataset.pl (a6e96c6) -#------------------------------------------------------------------------------- -# -*- tab-width 2; perl-indent-level : 2; perl-continued-statement-offset : 2; perl-continued-brace-offset : -2; -*- (emacs) -# vim: set tabstop=2 shiftwidth=2 noexpandtab: #------------------------------------------------------------------------------- use strict; use warnings; use Getopt::Std; # for getopt() +use Scalar::Util qw/reftype/; + +#------------------------------------------------------------------------------- + +# Forward declarations +sub convertInput(); +sub createDataset(); +sub appendToBlock(); +sub makeTemplates(); +sub encode_json_2($$); +sub columnConfig(); +sub chromosomeRenamePrepare(); + +#------------------------------------------------------------------------------- + +# Handles dynamic / optional columns, in place of ColumnsEnum. +my %columnsKeyLookup = (); +my $c_arrayColumnName; + +#------------------------------------------------------------------------------- +# main + + +## Get options from ARGV +my %options; +getopts("vhd:p:b:n:c:s:C:F:P:gM:R:A:t:D:H", \%options); + +## Version and help options display +use constant versionMsg => "2021 Apr.\n"; +use constant usageMsg => < Exome_SNPs_1A.json + Optional params : -n namespace [empty | 90k | ... ] -c "common name" + -C columnsKeyString e.g. "chr pos name ref_alt" + -F field separator, e.g. '\t', default ',' + -P species prefix for chr number, e.g. Ca + -M column for dataset from Metadata worksheet csv + -R Chromosome Renaming worksheet csv + -A array column name + -t tags + -D output directory + -H first line is header line +EOF + +my $datasetName = $options{d}; +my $parentName = $options{p}; +my $blockId = $options{b}; +# may be '', which is false-y +my $namespace = defined($options{n}) ? $options{n} : (defined($parentName) ? "$parentName:$datasetName" : $datasetName); +my $commonName = $options{c}; +my $shortName = $options{s}; # option, Exome. WGS +my $columnsKeyString = "chr pos name ref_alt"; +if (defined($options{C})) +{ + $columnsKeyString = $options{C}; +} + +my $fieldSeparator = $options{F} || ','; # '\t' +# Prefix the chr with e.g. 2-letter abbreviation of latin name (e.g. 'Ca') +# The chr input may be just a number, or it may have some other prefix which is trimmed off (see $chrPrefix). +my $chrOutputPrefix = $options{P} || ''; + +my $datasetMetaFile = $options{M}; +my $chromosomeRenamingFile = $options{R}; +# An array which is accumulated from preceding lines. +my $arrayColumnName = $options{A}; +# Accumulate values from column $arrayColumnName since last Feature. +my $arrayRef = []; + +#my $refAltSlash = 0; # option, default 0 +# true means add other columns to Feature.values { } +my $addValues = 1; # option : add values : { other columns, } +# option : if $namespace =~ m/90k/ etc, use $datasetHeaderGM +my $isGM = $options{g}; # default 0, 1 for physical data blocks + +# QTL worksheet may output multiple datasets. +# If undefined, output is to stdout, otherwise create a file named $dataset.json in $outputDir for each dataset. +my $outputDir = $options{D}; + +my $extraTags = $options{t}; # '"SNP"'; # . ", \"HighDensity\""; # option, default '' +if ($extraTags) +{ + # the tags are comma-separated, express them as a comma-separated list of strings wrapped with "". + $extraTags = '"' . join('", "', split(',', $extraTags)) . '"'; +} +else +{ + $extraTags = ''; +} + +# For loading Genome Reference / Parent : +# my $extraMeta = ''; # '"paths" : "false",'; # '"type" : "Genome",'; + +my $line1IsHeader = $options{H}; + +#------------------------------------------------------------------------------- + +if ($arrayColumnName) +{ + columnConfig(); + $c_arrayColumnName = defined($columnsKeyLookup{$arrayColumnName}) ? $columnsKeyLookup{$arrayColumnName} : undef; + # print join(';', keys(%columnsKeyLookup)), ',', $columnsKeyLookup{'end'}, ',', $arrayColumnName, ', ', $c_arrayColumnName || 'undef', "\n"; +} + +my $c_Trait = defined($columnsKeyLookup{'Trait'}) ? $columnsKeyLookup{'Trait'} : undef; +#------------------------------------------------------------------------------- + +# initialised by makeTemplates() +my $datasetHeader; +my $blockHeader; +my $blockFooter; +my $datasetFooter; +my $datasetHeaderGM; +# true after startDataset() +my $startedDataset = 0; #------------------------------------------------------------------------------- +sub main() +{ +if ($options{v}) { + print STDERR versionMsg; +} +elsif ($options{h}) +{ + print STDERR usageMsg; +} +elsif (defined ($datasetName) == defined ($blockId)) +{ + print STDERR usageMsg, < qw(c_chr c_pos c_scaffold_pos c_ref_alt); +# scaffold_pos -> name +# $columnsKeyString = "chr pos name ref_alt"; + +#SNP_20002403,LG7.2,40.5 +#PBA_LC_0373,LG7.3,0 +#SSR184,LG7.3,1.9 +#SNP_20004741,LG7.3,7.2 +# $columnsKeyString = "name chr pos"; +# This may be a requirement : +# my $chrPrefix = 'L.'; +# Assumption : if chr has 2 '.' after $chrPrefix then scope is : trim off the 2nd . and following chars. +#Lc_ILL_00694,L.5.1,480.1670411 +#Lc_ILL_00714,L.5.2,0 +#Lc_ILL_00037,L.5.2,4.321070321 + + +# equivalent to e.g : qw(c_chr c_pos c_name c_ref_alt) +# /r for non-destructive, allows chaining. +my $columnsKeyPrefixed; +# End position, optional column. +my $c_endPos; + +sub columnConfig() { + # $columnsKeyString indicates which columns contain the key values + # e.g. "chr name pos" or "name chr pos end" or "chr pos name ref_alt" + # Words are separated by single spaces (multiple spaces can be used to indicate columns which are not keys). + $columnsKeyString = $ENV{columnsKeyString} || "chr name pos"; + # print "columnsKeyString", $columnsKeyString, "\n"; + + # data flow : $columnsKeyString -> $columnsKeyPrefixed -> ColumnsEnum + # which defines the enums, c_name, c_chr, c_pos etc. + # Using an enum made sense in the initial version which had fixed columns, + # but now %columnsKeyLookup is more suitable. + # + # $columnsKeyString is space-separated, not comma. + # column header names which contain spaces are wrapped with "". + my @a1 = split(/"([^\"]*)"| */, $columnsKeyString ); + my @columnsKeyValues = grep { $_ } @a1; + # print 'columnsKeyValues : ', join(':', @columnsKeyValues), "\n"; + + for (my $ki=0; $ki <= $#columnsKeyValues; $ki++) + { + $columnsKeyLookup{$columnsKeyValues[$ki]} = $ki; + } +} + BEGIN { - eval "use constant (ColumnsEnum)[$_] => $_;" foreach 0..(ColumnsEnum)-1; + columnConfig(); + $columnsKeyPrefixed = $columnsKeyString + =~ s/,/ /rg + =~ s/^/c_/r + =~ s/ / c_/rg; + # print 'columnsKeyPrefixed : ', $columnsKeyPrefixed, "\n"; + # my @a2 = split(' ', $columnsKeyPrefixed); + # print 'a2 : ', join(':', @a2), "\n"; + + # These columns are identified using variables, (e.g. $c_endPos), + # because the corresponding enum (e.g. c_endPos) can't have a conditional value. + $c_endPos = defined($columnsKeyLookup{'end'}) ? $columnsKeyLookup{'end'} : undef; +} +use constant ColumnsEnum => split(' ', $columnsKeyPrefixed); +BEGIN +{ + eval "use constant (ColumnsEnum)[$_] => $_;" foreach 0..(ColumnsEnum)-1; + eval "use constant c_start => c_pos;"; } -sub convertInput(); #------------------------------------------------------------------------------- +my @columnHeaders; + +# @return true if the given line is a column header row +sub headerLine($$) { + my ($line, $lineNumber) = @_; + my $isHeader = ($lineNumber == 1) && + ( + $line1IsHeader || + ($line =~ m/^label chr pos/) + || ($line =~ m/^name,chr,pos/) + || (($line =~ m/Marker|Name/i) && ($line =~ m/Chromosome/i)) + || ($line =~ m/Contig,Position/i) + ); + if ($isHeader) { + @columnHeaders = map { trimOutsideQuotesAndSpaces($_); } split($fieldSeparator); + } + return $isHeader; +} + +#------------------------------------------------------------------------------- + +# Sanitize input by removing punctuation other than space, comma, _, ., /, \n +# Commonly _ and . are present in parentName. +# Space appears in commonName (handled in .bash). +# , is used for splitting csv lines, and / appears in some chr names e.g. 'LG5/LG7' +# Related : deletePunctuation() in uploadSpreadsheet.bash +sub deletePunctuation($) +{ + my ($text) = @_; + $text =~ tr/_.,\/\n 0-9A-Za-z//cd; + return $text; +} + + +# hash -> json +# Only need simple 1-level json output, so implement it here to avoid installing JSON.pm. +sub simple_encode_json($) +{ + my ($data) = @_; + my @fields = (); + for my $key (keys %$data) { + push @fields, '"' . $key . '" : "' . $data->{$key} . '"'; + } + return @fields; +} + +# slightly more complete - handle hash or array, or a hash with an array value +# @param $indent +# @param $data +sub encode_json_2($$) +{ + my ($indent, $data) = @_; + + my $json; + if (reftype $data eq 'ARRAY') + { + my $quote = $#$data ? '"' : ''; + $json = '[' . $quote . join('"' . ",\n" . $indent . '"' , @$data) . $quote . ']'; + + } + elsif (reftype $data eq 'HASH') + { + my @fields = (); + for my $key (keys %$data) { + my $value = $data->{$key}; + my $valueString = (reftype \$value eq 'SCALAR') ? + '"' . $value . '"' + : encode_json_2($indent . ' ', $value); + push @fields, '"' . $key . '" : ' . $valueString; + } + $json = '{' . join(",\n" . $indent, @fields) . '}'; + } + else + { + $json = '"' . $data . '"'; + } + + return $json; +} + +# Populate Dataset .meta from command-line options and +# column for dataset from Metadata worksheet. +sub setupMeta() +{ + my %meta = (); + + if (defined($shortName) && $shortName) + { + $meta{'shortName'} = $shortName; + } + if (defined($commonName) && $commonName) + { + $meta{'commonName'} = $commonName; + } + # When called from uploadSpreadsheet.bash, meta.type can now be set from the Metadata worksheet. + if ($isGM) { + $meta{'type'} = "Genetic Map"; + } + + #----------------------------------------------------------------------------- + # Read additional meta from file. + if (defined($datasetMetaFile) && $datasetMetaFile) + { + if (! open(FH, '<', $datasetMetaFile)) + { warn $!; } + else + { + while(){ + chomp; + my ($fieldName, $value) = split(/,/, $_); + if (! ($fieldName =~ m/commonName|parentName|platform|shortName/)) { + $meta{$fieldName} = $value; + } + } + close(FH); + } + } + + # use JSON; + # my $metaJson = encode_json \%meta; + my $metaJson = '{' . join(",\n ", simple_encode_json(\%meta)) . '}'; + + return $metaJson; +} + +sub makeTemplates() +{ + my $metaJson = setupMeta(); + + # Could include . "\n" in this expression, but OTOH there is some + # value in leaving blank lines when parent and namespace are not defined. + # (the template does contain the indent spaces so the line is blank but not empty). + my $parentJson = defined($parentName) ? '"parent" : "' . $parentName . '",' : ''; + my $namespaceJson = defined($namespace) ? '"namespace" : "' . $namespace . '",' : ''; + + # Used to form the JSON structure of datasets and blocks. # Text extracted from pretzel-data/myMap.json -# These are indented with 4 spaces, whereas the remainder of the file is indented with 2-column tabs. -my $datasetHeader = < "2020 Dec 07 (Don Isdale).\n"; -use constant usageMsg => < Exome_SNPs_1A.json -EOF +main(); -my $datasetName = $options{d}; -my $parentName = $options{p}; +#------------------------------------------------------------------------------- -if ($options{v}) { - print versionMsg; -} -elsif ($options{h}) +sub createDataset() { - print usageMsg; + if ($isGM) { + $datasetHeader = $datasetHeaderGM; + } + + if (! $outputDir) + { + print $datasetHeader; + } + + convertInput(); + + optionalBlockFooter(); + print $datasetFooter; } -elsif (!defined ($datasetName)) +sub startDataset() { - print usageMsg, <) + while (<>) { - chomp; - # commenting out this condition will output the column headers in the JSON, - # which is a useful check of column alignment with the ColumnsEnum. - if (! m/^label chr pos/) + chomp; + # commenting out this condition will output the column headers in the JSON, + # which is a useful check of column alignment with the ColumnsEnum. + if (@columnHeaders || ! headerLine($_, $.)) { snpLine($_); } } - optionalBlockFooter(); - print $datasetFooter; } sub optionalBlockFooter() { - if (defined($lastChr)) + if (defined($lastChr)) { print $blockFooter; } } +#------------------------------------------------------------------------------- + +my %chromosomeRenames; +# Read $chromosomeRenamingFile +sub chromosomeRenamePrepare() +{ + if (defined($chromosomeRenamingFile) && $chromosomeRenamingFile) + { + if (! open(FH, '<', $chromosomeRenamingFile)) + { warn $!, "'$chromosomeRenamingFile'\n"; } + else + { + while(){ + chomp; + # Skip empty lines. + ! $_ && continue; + # deletePunctuation() is applied to both $fromName and $toName. + # $fromName is used as an array index, whereas $toName is + # simply inserted into the json output, so is perhaps lower risk. + my ($fromName, $toName) = split(/,/, deletePunctuation($_)); + $chromosomeRenames{$fromName} = $toName; + } + close(FH); + } + } +} + + +#------------------------------------------------------------------------------- + +my $chromosomeRenamedFrom; # read 1 line, which defines a SNP and associated reference/alternate data sub snpLine($) { - my ($line) = @_; - # input line e.g. - #c_chr c_pos c_scaffold_pos c_ref_alt - #chr1A 22298 scaffold38755_22298 T/C + my ($line) = @_; + # input line e.g. + #c_chr c_pos c_name c_ref_alt + #chr1A 22298 scaffold38755_22298 T/C + + + my @a = split($fieldSeparator, $line); + @a = map { trimOutsideQuotesAndSpaces($_) } @a; + + if (defined($c_arrayColumnName) && $a[$c_arrayColumnName]) + { + push @$arrayRef, $a[$c_arrayColumnName]; + } + + # Skip blank lines + if (! $a[c_name] && ! $a[c_chr]) + { + # Could output a warning if the line is not blank, i.e. not /^,,,/, or $a[c_pos] + return; + } + # For QTL : Flanking Marker by itself in a row is added as a feature + # to current block / QTL + elsif ($a[c_name] && ! $a[c_chr] && ! $a[c_pos] && + defined($c_Trait) && $columnsKeyLookup{'parentname'}) + { + $a[c_pos] = 'null'; + $a[$c_endPos] = ''; + } + elsif (defined($c_Trait)) + { + # If trait is blank / empty, use current. + if ($a[$c_Trait]) + { + $currentTrait = $a[$c_Trait]; + } + else + { + $a[$c_Trait] = $currentTrait; + } + } - my @a = split( '\t', $line); - # tsv datasets often follow the naming convention 'chr1A'; Pretzel data omits 'chr' for block scope & name : '1A'. + # $a[c_chr] = trimOutsideQuotesAndSpaces($a[c_chr]); + # tsv datasets often follow the naming convention 'chr1A'; Pretzel data omits 'chr' for block scope & name : '1A'. + if (! %chromosomeRenames) + { $a[c_chr] =~ s/^chr//; - my $c = $a[c_chr]; - if (! defined($lastChr) || ($lastChr ne $c)) + $a[c_chr] = $chrOutputPrefix . $a[c_chr]; + } + else + # Apply %chromosomeRenames + { + # deletePunctuation() is applied to $fromName in chromosomeRenamePrepare(), + # so applying it equally here to $a[c_chr] enables fromName containing punctuation to match, + # e.g. genbank ids contain '|'. + # Apply to Scope column, or Chromosome. + my $c_scope = $columnsKeyLookup{'Scope'}; + my $col = defined($c_scope) ? $c_scope : c_chr; + my $toName = $chromosomeRenames{deletePunctuation($a[$col])}; + if (defined($toName)) { - optionalBlockFooter(); + $chromosomeRenamedFrom = $a[$col]; + $a[$col] = $toName; + } + } - # print $c; - $lastChr = $c; + $a[c_name] = markerPrefix($a[c_name]); + + # start new Dataset when change in parentName + my $c_parentName = $columnsKeyLookup{'parentname'}; + if (defined($c_parentName)) + { + $parentName = $a[$c_parentName]; + if ($parentName) + { + $datasetName = $currentTrait; + makeTemplates(); + if ($startedDataset) + { + endDataset(); + } + $lastChr = undef; + $blockSeparator = undef; + if ($outputDir) + { + my $datasetOutFile = "$outputDir/$datasetName.json"; + # re-open stdout + open(my $oldStdout, ">&STDOUT") or die "Can't dup STDOUT: $!"; + open(STDOUT, '>', $datasetOutFile) or die "Can't redirect STDOUT to '$datasetOutFile': $!"; + } + startDataset(); + } + } - if (defined($blockSeparator)) - { print $blockSeparator; } - else - { $blockSeparator = ",\n"; } - my $h = $blockHeader; - # replace '1A' in the $blockHeader template with the actual chromosome name $c. - $h =~ s/1A/$c/g; - print $h; + # If Chromosome has changed, end the block and start a new block. + # If Chromosome is empty / blank, use current ($lastChr). + my $c = $a[c_chr]; + if (! defined($lastChr) || ($c && ($lastChr ne $c))) + { + if (defined($blockId)) + { + $lastChr = $c; + } + else + { + optionalBlockFooter(); + + # print $c; + $lastChr = $c; + + if (defined($blockSeparator)) + { print $blockSeparator; } + else + { $blockSeparator = ",\n"; } + + my $h = blockHeader($chromosomeRenamedFrom); + # replace 'blockName' in the $blockHeader template with the actual chromosome name $c. + # and blockScope with : the scope which is the chr $c with .[1-9] trimmed off + # or scope might be just the chr name $c so that each GM block gets its own axis. + # Use Scope column if given. + my $c_scope = $columnsKeyLookup{'Scope'}; + my $scope = defined($c_scope) ? $a[$c_scope] : $c; # ($c =~ s/\.[1-9]$//r); + $h =~ s/blockName/$c/g; + $h =~ s/blockScope/$scope/g; + print $h; + + # create block (and nominal feature) and feature. use scope and parentName, + # Start/End are block range, or create a nominal feature for the block + # (could put extra columns values in this, or in block.meta) + + # Output nominal feature of block + # printFeature(@a); # done below + my $c_parentName = $columnsKeyLookup{'parentName'}; + if (defined($c_parentName)) + { + my @f = (); + $f[c_name] = $a[c_name]; + $f[c_pos] = 'null'; + if (defined($c_endPos)) + { $f[$c_endPos] = ''; } + printFeature(@f); + # print feature separator + print ","; + } + } } - else # print feature separator + else # print feature separator { print ","; } - printFeature(@a); + + printFeature(@a); +} + +# Strip off outside " and spaces, to handle e.g. +# "LG4 ",Ca_2289,0 +# Ps_ILL_03447,"LG 2",0 +# Used for name (label) and chr (chromosome / block) name columns. +sub trimOutsideQuotesAndSpaces($) { + my ($label) = @_; + if ($label =~ m/"/) { + $label =~ s/^"//; + $label =~ s/"$//; + } + if ($label =~ m/ /) { + $label =~ s/^ //; + $label =~ s/ $//; + } + return $label; } +# Illumina OPA SNP names are [1234]000 or SNP_[1234]000. +# Prefix with SNP_ if not present, to make all consistent. +sub markerPrefix($) { + my ($name) = @_; + if ($name =~ m/^[1234]000/) + { + $name = "SNP_" . $name; + } + return $name +} + +# @return true if the given string has a leading # or "# +# i.e. is a comment. +# related : filterOutComments() (backend/scripts/uploadSpreadsheet.bash) +sub isComment($) +{ + my ($columnHeader) = @_; + return $columnHeader =~ m/^#|^"#/; +} + +# Recognise decimal fraction aliasing and round the number. +# +# ssconvert apparently has different rounding to libreoffice, as the former +# expresses some decimal fractions with recurring 0 or 9. +# e.g comparing output from libreoffice and ssconvert respectively +# < SNP_40002085,LG1,1.3 +# > SNP_40002085,LG1,1.2999999999999998 +# < SNP_40001996,LG1,7.6 +# > SNP_40001996,LG1,7.6000000000000005 +# +# ssconvert handles multiple work-sheets within the .xslx, but libreoffice does not. +# +# If the number has a few decimal digits in the source spreadsheet, then +# the number of 0-s or 9-s to match here may be as few as 11. match a minimum of 6. +# The SNP / marker name may also contain 4 0-s, but that is a different column and they are unlikely to have 8. +sub roundPosition($) +{ + my ($pos) = @_; + if ($pos =~ m/000000|999999/) { + $pos = (sprintf('%.8f', $pos) =~ s/0+$//r =~ s/\.$//r); + } + return $pos; +} + + # For printing array as comma-separated list. # Could make this local if it clashed with any other print. # As an alternative to using join to construct $aCsv in printFeature(), can do : @@ -198,22 +774,104 @@ ($) sub printFeature($) { - my (@a) = @_; + my (@a) = @_; + + # No longer removing key values from @a, so $ak can be simply $a. + # Copy the essential / key columns; remainder may go in .values. + my (@ak) = (); + + my $c; + for $c (c_name, c_chr, c_pos, c_start, $c_endPos) + { + if (defined($c)) { + $ak[$c] = $a[$c]; + } + } - my $chr = shift @a; - my $pos = shift @a; - my $label = shift @a; # c_scaffold_pos - my $ref_alt = shift @a; - print <&2 required : define slack_postEventsAPP_URL e.g. https://hooks.slack.com/services/.../.../... + +logDir=$HOME/log/monitor + +#------------------------------------------------------------------------------- + +# post param to Slack app postEventsAPP (plantinformatics) +# @param text should not contain punctuation such as \'\"\''[]<()-' +function postText() { + pText="$1" + curl -X POST -H 'Content-type: application/json' --data '{"text":"$pText"}' $slack_postEventsAPP_URL +} +# post stdin to Slack app postEventsAPP (plantinformatics) +# Punctuation is filtered out because currently the text is passed via command-line params. +function postInput() { + # enable this for dev / test + if false + then + (date; cat >> $logDir/test.log) + else + tr -d \'\"\''[]<()-' | curl -X POST -H 'Content-type: application/json' --data '{"text":"'"$SERVER_NAME"' +'"$(cat)"'"}' $slack_postEventsAPP_URL + fi +} + +# Post stdin as a 'text snippet' via file-upload +# @param textLabel displayed as the message +function postInputAsSnippet() { + textLabel=$1 + tr -d \'\"\''[]<()-' | curl -X POST -H 'Content-type: application/json' --data '{"text":"$textLabel", "channels":"GC57GHSR2", "fileType":"text", "content":"'"$(cat)"'"}' $slack_postEventsAPP_URL +} + +# run initially to set up $logDir, so that accessDiffPost() may be run. +function setupMonitor() { + [ -d ~/log ] || mkdir ~/log || return + [ -d $logDir ] || mkdir $logDir || return + if [ ! -f $logDir/access.log ] ; then sudo cp -ip /var/log/nginx/access.log $logDir/access.log ; fi; + + cd $logDir || return + [ -f server.log ] || touch server.log || return +} + +# run regularly, e.g. from cron +function accessDiffPost() { + # To handle nginx log rolling, show only the added lines of the diff, not the removed lines. + # refn http://www.gnu.org/software/diffutils/manual/html_node/Line-Group-Formats.html + # and https://stackoverflow.com/a/15385080 + if sudo diff --changed-group-format='%>' --unchanged-group-format='' $logDir/access.log /var/log/nginx/access.log > $logDir/access.log.diff; + then + : # same + else + if fgrep /api/Clients $logDir/access.log.diff | fgrep -v /api/Clients/login > $logDir/access.log.diff.api_Clients; + then + postInput < $logDir/access.log.diff.api_Clients + fi + sudo cp -p /var/log/nginx/access.log $logDir/access.log + fi +} + +# ------------------------------------------------------------------------------ + +function currentLog() { echo -n ~/log/nohup_out/; ls -rt ~/log/nohup_out/ | tail -1; } + +# Similar to accessDiffPost, but monitor the node server log +# run regularly, e.g. from cron +function serverDiffPost() { + l1=$( currentLog ) + [ -z "$l1" -o \! -f "$l1" ] && return + cd $logDir || return + logPrev=server.log + # To handle server log rolling (when server is restarted), show only the added lines of the diff, not the removed lines. + if sudo diff --changed-group-format='%>' --unchanged-group-format='' $logPrev "$l1" > $logPrev.diff; + then + : # same + else + # /api/Clients| is already logged from nginx log + if egrep 'Error: Invalid token|ValidationError' $logPrev.diff | fgrep -v /api/Clients/login > $logPrev.diff.report; + then + postInput < $logPrev.diff.report + fi + sudo cp -p "$l1" $logPrev + fi +} + +# ------------------------------------------------------------------------------ + +newSignup=newSignup +function setupMonitorSignup() { + [ -d $logDir/$newSignup ] || mkdir $logDir/$newSignup || return + cd $logDir || return + monitorSignup + ls -l *erified.tsv $newSignup +} + + +function monitorSignup() { + emailVerified=true signupReport > verified.tsv + signupReport > notVerified.tsv +} + + +# Compare the current and previous versions of a file. +# Used for showing additions to a log file. +# Output diff to stdout, with diffLabel appended if not empty. +# +# @return same as diff : +# diff returns : 0 (true) if files are identical, otherwise 1 (false) +# +# @param newDir dir containing the new version. previous is in ./ +# @param fileName name of pair of files to diff +# @param diffLabel text to label the diff output with, if not empty +function diffPrevious() { + newDir="$1" + fileName="$2" + diffLabel="$3" + statusDP=0 + diff {,"$newDir"/}"$fileName" || { statusDP=$?; echo "$diffLabel" ; } + return $statusDP +} + + +# @return same as diff : +# diff returns : 0 (true) if each pair of files is identical, otherwise 1 or 2 (both values are false) +function signupDiffBoth() { + diffPrevious "$newSignup" verified.tsv 'verified +---' + status1=$? + + diffPrevious "$newSignup" notVerified.tsv 'notVerified' + status2=$? + + return $(expr $status1 + $status2) +} + +function signupDiffPost() { + cd $logDir/$newSignup || return + monitorSignup + cd .. + if signupDiffBoth > signup.diff + then + : # same + else + postInput < signup.diff + ls -l {,$newSignup/}*erified.tsv signup.diff + cp -p $newSignup/* . + fi +} + +# Diff notVerified / unapproved since last call +# @param periodName text name for directory - e.g. "daily" +# The directory caches the previous value which is the reference for diff +# +# Usage, e.g. cron : bash -c "source ~/pretzel/resources/tools/mongo_admin.bash; source ~/pretzel/resources/tools/functions_hosted.bash; DIM=... ; slack_postEventsAPP_URL=...; signupDiffUnapprovedPost daily 2>&1" >> $HOME/log/monitor/cron.log 2>&1 +function signupDiffUnapprovedPost() { + if [ $# -ne 1 ] + then + echo "Usage : $0 periodName" 1>&2; + else + periodName="$1" + cd $logDir/ || return + [ -d "$periodName" ] || mkdir "$periodName" || return + cd "$periodName" + [ -d $newSignup ] || mkdir $newSignup || return + [ -f "notVerified.tsv" ] || { signupReport > notVerified.tsv; return; } + + signupReport > $newSignup/notVerified.tsv + + diffPrevious "$newSignup" notVerified.tsv 'notVerified' > signupUnapproved.diff + statusPeriod=$? + + if [ "$statusPeriod" -ne 0 ] + then + postInput < signupUnapproved.diff + ls -l {,$newSignup/}notVerified.tsv signupUnapproved.diff + cp -p $newSignup/notVerified.tsv . + fi + fi +} + +# ------------------------------------------------------------------------------ diff --git a/resources/tools/mongo_admin.bash b/resources/tools/mongo_admin.bash index ea58c5e61..ab43d3470 100644 --- a/resources/tools/mongo_admin.bash +++ b/resources/tools/mongo_admin.bash @@ -9,6 +9,8 @@ #------------------------------------------------------------------------------- unused=${SERVER_NAME=main} +# Using pretzel in place of admin in new instances. +unused=${DB_NAME=admin} #------------------------------------------------------------------------------- @@ -26,7 +28,7 @@ checkDIM() dbCollections() { checkDIM && - docker exec -it $DIM mongo --quiet admin --eval "db.getCollectionNames()" | tr -d '[\[\]",\t ]' | tr '\r' ' ' + docker exec -it $DIM mongo --quiet $DB_NAME --eval "db.getCollectionNames()" | tr -d '[\[\]",\t ]' | tr '\r' ' ' } @@ -35,13 +37,13 @@ function mongodump2S3() logDate=`date +%Y%b%d` echo $logDate # 2018Sep26 - export S3_MON="s3://shared-data-4pretzel/mongodb/$SERVER_NAME.admin/$logDate" + export S3_MON="s3://shared-data-4pretzel/mongodb/$SERVER_NAME.$DB_NAME/$logDate" echo $S3_MON collections=$(dbCollections ) echo $collections sleep 5 - docker exec -i $DIM mongodump --archive --gzip --db admin | aws s3 cp - $S3_MON.gz \ + docker exec -i $DIM mongodump --archive --gzip --db $DB_NAME | aws s3 cp - $S3_MON.gz \ && aws s3 ls $S3_MON.tar.gz } @@ -64,7 +66,7 @@ function signupList() unused=${emailVerified=false} checkDIM && - docker exec -i $DIM mongo --quiet admin <